diff -Nru osm2pgsql-0.82.0/binarysearcharray.c osm2pgsql-0.86.0/binarysearcharray.c --- osm2pgsql-0.82.0/binarysearcharray.c 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/binarysearcharray.c 2014-10-25 06:42:27.000000000 +0000 @@ -45,7 +45,6 @@ { return array->array[idx].value; } - exit(1); } void binary_search_remove(struct binary_search_array * array, int key) diff -Nru osm2pgsql-0.82.0/binarysearcharray.h osm2pgsql-0.86.0/binarysearcharray.h --- osm2pgsql-0.82.0/binarysearcharray.h 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/binarysearcharray.h 2014-10-25 06:42:27.000000000 +0000 @@ -1,4 +1,5 @@ - +#ifndef BINARYSEARCHARRAY_H +#define BINARYSEARCHARRAY_H struct key_val_tuple { @@ -18,3 +19,4 @@ struct binary_search_array * init_search_array(int capacity); void shutdown_search_array(struct binary_search_array ** array); +#endif diff -Nru osm2pgsql-0.82.0/build_geometry.cpp osm2pgsql-0.86.0/build_geometry.cpp --- osm2pgsql-0.82.0/build_geometry.cpp 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/build_geometry.cpp 2014-10-25 06:42:27.000000000 +0000 @@ -23,8 +23,13 @@ #include #include #include +#include #include +#if defined(__CYGWIN__) +#define GEOS_INLINE +#endif + /* Need to know which geos version we have to work out which headers to include */ #include @@ -118,12 +123,30 @@ } } +// helper method to add the WKT for a geometry to the +// global wkts list - used primarily for polygons. +void add_wkt(geom_ptr &geom, double area) { + WKTWriter wktw; + std::string wkt = wktw.write(geom.get()); + wkts.push_back(wkt); + areas.push_back(area); +} + +// helper method to add the WKT for a line built from a +// coordinate sequence to the global wkts list. +void add_wkt_line(GeometryFactory &gf, std::auto_ptr &segment) { + WKTWriter wktw; + geom_ptr geom = geom_ptr(gf.createLineString(segment.release())); + std::string wkt = wktw.write(geom.get()); + wkts.push_back(wkt); + areas.push_back(0); + segment.reset(gf.getCoordinateSequenceFactory()->create((size_t)0, (size_t)2)); +} size_t get_wkt_split(osmNode *nodes, int count, int polygon, double split_at) { GeometryFactory gf; std::auto_ptr coords(gf.getCoordinateSequenceFactory()->create((size_t)0, (size_t)2)); double area; - WKTWriter wktw; size_t wkt_size = 0; try @@ -148,10 +171,8 @@ } geom->normalize(); // Fix direction of ring area = geom->getArea(); - std::string wkt = wktw.write(geom.get()); - wkts.push_back(wkt); - areas.push_back(area); - wkt_size++; + add_wkt(geom, area); + } else { if (coords->getSize() < 2) return 0; @@ -161,21 +182,47 @@ segment = std::auto_ptr(gf.getCoordinateSequenceFactory()->create((size_t)0, (size_t)2)); segment->add(coords->getAt(0)); for(unsigned i=1; igetSize(); i++) { - segment->add(coords->getAt(i)); - distance += coords->getAt(i).distance(coords->getAt(i-1)); - if ((distance >= split_at) || (i == coords->getSize()-1)) { - geom = geom_ptr(gf.createLineString(segment.release())); - std::string wkt = wktw.write(geom.get()); - wkts.push_back(wkt); - areas.push_back(0); - wkt_size++; - distance=0; - segment = std::auto_ptr(gf.getCoordinateSequenceFactory()->create((size_t)0, (size_t)2)); - segment->add(coords->getAt(i)); + const Coordinate this_pt = coords->getAt(i); + const Coordinate prev_pt = coords->getAt(i-1); + const double delta = this_pt.distance(prev_pt); + // figure out if the addition of this point would take the total + // length of the line in `segment` over the `split_at` distance. + const size_t splits = std::floor((distance + delta) / split_at); + + if (splits > 0) { + // use the splitting distance to split the current segment up + // into as many parts as necessary to keep each part below + // the `split_at` distance. + for (size_t i = 0; i < splits; ++i) { + double frac = (double(i + 1) * split_at - distance) / delta; + const Coordinate interpolated(frac * (this_pt.x - prev_pt.x) + prev_pt.x, + frac * (this_pt.y - prev_pt.y) + prev_pt.y); + segment->add(interpolated); + add_wkt_line(gf, segment); + segment->add(interpolated); + } + // reset the distance based on the final splitting point for + // the next iteration. + distance = segment->getAt(0).distance(this_pt); + + } else { + // if not split then just push this point onto the sequence + // being saved up. + distance += delta; + } + + // always add this point + segment->add(this_pt); + + // on the last iteration, close out the line. + if (i == coords->getSize()-1) { + add_wkt_line(gf, segment); } } } + // ensure the number of wkts in the global list is accurate. + wkt_size = wkts.size(); } catch (std::bad_alloc) { @@ -195,12 +242,14 @@ char * get_wkt(size_t index) { // return wkts[index].c_str(); - char *result; - result = (char*) std::malloc( wkts[index].length() + 1); - // At least give some idea of why we about to seg fault - if (!result) std::cerr << std::endl << "Unable to allocate memory: " << (wkts[index].length() + 1) << std::endl; - std::strcpy(result, wkts[index].c_str()); - return result; + char *result; + result = (char*) std::malloc( wkts[index].length() + 1); + // At least give some idea of why we about to seg fault + if (!result) + std::cerr << std::endl << "Unable to allocate memory: " << (wkts[index].length() + 1) << std::endl; + else + std::strcpy(result, wkts[index].c_str()); + return result; } double get_area(size_t index) @@ -548,3 +597,44 @@ { excludepoly = 1; } + +char *get_multiline_geometry(osmid_t osm_id, struct osmNode **xnodes, int *xcount) { + std::auto_ptr > lines(new std::vector); + GeometryFactory gf; + geom_ptr geom; + + try + { + for (int c=0; xnodes[c]; c++) { + std::auto_ptr coords(gf.getCoordinateSequenceFactory()->create((size_t)0, (size_t)2)); + for (int i = 0; i < xcount[c]; i++) { + struct osmNode *nodes = xnodes[c]; + Coordinate c; + c.x = nodes[i].lon; + c.y = nodes[i].lat; + coords->add(c, 0); + } + if (coords->getSize() > 1) { + geom = geom_ptr(gf.createLineString(coords.release())); + lines->push_back(geom.release()); + } + } + + geom_ptr mline (gf.createMultiLineString(lines.release())); + WKTWriter writer; + + std::string wkt = writer.write(mline.get()); + return strdup(wkt.c_str()); + } + catch (std::exception& e) + { + std::cerr << std::endl << "Standard exception processing relation id "<< osm_id << ": " << e.what() << std::endl; + } + catch (...) + { + std::cerr << std::endl << "Exception caught processing relation id " << osm_id << std::endl; + } + + return 0; +} + diff -Nru osm2pgsql-0.82.0/build_geometry.h osm2pgsql-0.86.0/build_geometry.h --- osm2pgsql-0.82.0/build_geometry.h 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/build_geometry.h 2014-10-25 06:42:27.000000000 +0000 @@ -39,6 +39,7 @@ size_t build_geometry(osmid_t osm_id, struct osmNode **xnodes, int *xcount, int make_polygon, int enable_multi, double split_at); void clear_wkts(); void exclude_broken_polygon (); +char *get_multiline_geometry(osmid_t osm_id, struct osmNode **xnodes, int *xcount); #ifdef __cplusplus } diff -Nru osm2pgsql-0.82.0/configure.ac osm2pgsql-0.86.0/configure.ac --- osm2pgsql-0.82.0/configure.ac 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/configure.ac 2014-10-25 06:42:27.000000000 +0000 @@ -1,11 +1,14 @@ dnl Process this file with autoconf to produce a configure script. -AC_INIT(osm2pgsql, 0.82.0) +AC_INIT(osm2pgsql, 0.86.0) dnl Required autoconf version AC_PREREQ(2.61) AX_CONFIG_NICE +dnl Allow maintainer mode to be disabled (debian build scripts do this) +AM_MAINTAINER_MODE([enable]) + dnl use automake to generate standard Makefiles AM_INIT_AUTOMAKE([1.9.6 dist-bzip2 std-options check-news]) @@ -118,9 +121,17 @@ dnl Check for pthread library AX_PTHREAD(,[AC_MSG_ERROR([no])]) +AX_PROG_LUA([5.0],[],[ + AX_LUA_HEADERS([ + AX_LUA_LIBS([ + AC_DEFINE([HAVE_LUA], [1], [Requirements for lua are met]) + HAVE_LUA=yes + ],[AC_MSG_WARN([cannot find Lua libs])]) + ],[AC_MSG_WARN([cannot find Lua includes])]) +],[AC_MSG_WARN([cannot find Lua interpreter])]) dnl Generate Makefile -AC_OUTPUT(Makefile legacy/Makefile) +AC_OUTPUT(Makefile) if test "$BUILD_READER_PBF" != "yes" then @@ -131,3 +142,14 @@ Look for packages named: libprotobuf-c0-dev protobuf-c-compiler ]) fi + +if test "$HAVE_LUA" != "yes" +then + AC_MSG_WARN([ +lua libraries not found. You will NOT be able to use lua scripts for tag transform. + +To enable lua support, the lua interpreter and libraries are required. +Look for packages named: lua5.2 liblua5.2-dev +]) +fi + diff -Nru osm2pgsql-0.82.0/cygpkg.sh osm2pgsql-0.86.0/cygpkg.sh --- osm2pgsql-0.82.0/cygpkg.sh 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/cygpkg.sh 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,59 @@ +#!/bin/bash +# +# Author: Jason Huntley +# Email: onepremise@gmail.com +# Description: Cygwin Package script +# +# Change Log +# +# Date Description Initials +#------------------------------------------------------------- +# 04-11-13 Initial Coding JAH +#============================================================= + +if [ ! -e "cygwin-package" ]; then + mkdir cygwin-package +fi + +echo +echo Copying Executable... +echo + +cp -rfv default.style cygwin-package || { stat=$?; echo "Packaging failed, aborting" >&2; exit $stat; } +cp -rfv 900913.sql cygwin-package || { stat=$?; echo "Packaging failed, aborting" >&2; exit $stat; } +cp -rfv README cygwin-package || { stat=$?; echo "Packaging failed, aborting" >&2; exit $stat; } +cp -rfv .libs/osm2pgsql.exe cygwin-package || { stat=$?; echo "Packaging failed, aborting" >&2; exit $stat; } + +echo +echo Copying Dependent Libraries... +echo + +cp -rfv /bin/cygcrypt*.dll cygwin-package +cp -rfv /bin/cyggcc*.dll cygwin-package +cp -rfv /usr/local/bin/cyggeos*.dll cygwin-package +cp -rfv /bin/cygiconv*.dll cygwin-package +cp -rfv /bin/cygintl*.dll cygwin-package +cp -rfv /bin/cyglber*.dll cygwin-package +cp -rfv /bin/cygldap*.dll cygwin-package +cp -rfv /bin/cyglzma*.dll cygwin-package +cp -rfv /bin/cygpq*.dll cygwin-package +cp -rfv /usr/local/bin/cygproj*.dll cygwin-package +cp -rfv /usr/local/bin/cygproto*.dll cygwin-package +cp -rfv /bin/cygsasl*.dll cygwin-package +cp -rfv /bin/cygssl*.dll cygwin-package +cp -rfv /bin/cygstdc++**.dll cygwin-package +cp -rfv /bin/cygwin*.dll cygwin-package +cp -rfv /bin/cygxml2*.dll cygwin-package +cp -rfv /bin/cygz*.dll cygwin-package + +echo +echo Creating Archive... +echo + +zip -r9 cygwin-package.zip cygwin-package + +echo +echo Packaging Complete. +echo + +exit 0 diff -Nru osm2pgsql-0.82.0/debian/changelog osm2pgsql-0.86.0/debian/changelog --- osm2pgsql-0.82.0/debian/changelog 2013-12-18 07:47:00.000000000 +0000 +++ osm2pgsql-0.86.0/debian/changelog 2015-05-20 13:58:46.000000000 +0000 @@ -1,13 +1,58 @@ -osm2pgsql (0.82.0-1git1) trusty; urgency=low +osm2pgsql (0.86.0-1~gekkio1~trusty1) trusty; urgency=medium - Upload current Debian git head for the postgis transition. + * Backport to Trusty + * Remove libprotobuf-c-dev dependency, which breaks the build for some + reason - [ Bas Couwenberg ] - * Team upload. + -- Joonas Javanainen Wed, 20 May 2015 16:48:22 +0300 + +osm2pgsql (0.86.0-1) unstable; urgency=medium + + * New upstream release. + * Remove 00-fix_build.patch, applied upstream. + * Remove 01-hyphen-used-as-minus-sign.patch, applied upstream. + * Remove 02-spelling-error-in-manpage.patch, applied upstream. + * Remove 03-fix-build-for-protobuf-c-1.0.0.patch, applied upstream. + * Remove 04-api-changes-for-protobuf-c-1.0.0.patch, applied upstream. + * Add python build dependencies for updated regression test script. + * Update docs to install README.md. + * Bump Standards-Version to 3.9.6, no changes. + + -- Bas Couwenberg Sat, 25 Oct 2014 13:15:52 +0200 + +osm2pgsql (0.84.0-3) unstable; urgency=medium + + * Only recommend postgis, now that postgis recommends the postgresql + specific shared library. + (closes: #757972) + + -- Bas Couwenberg Sun, 17 Aug 2014 15:11:05 +0200 + +osm2pgsql (0.84.0-2) unstable; urgency=medium + + * Add patches for protobuf-c 1.0.0 support. + Thanks to Robert Edmonds for the patches. + (closes: #755553) + + -- Bas Couwenberg Sat, 26 Jul 2014 13:33:01 +0200 + +osm2pgsql (0.84.0-1) unstable; urgency=low + + * New upstream release. + * Add myself to Uploaders. * Update postgis Recommends to postgresql-9.3-postgis-2.1. - (closes: #732413) + (closes: #732415) + * Add gbp.conf to use pristine-tar by default. + * Refresh patches. + * Drop get-orig-source target, upstream moved from OSM SVN to GitHub. + * Add build dependency on liblua5.2-dev and lua5.2. + * Append CPPFLAGS to CFLAGS & CXXFLAGS to use -D_FORTIFY_SOURCE=2. + * Add patch to fix hyphen-used-as-minus-sign warnings. + * Add patch to fix spelling-error-in-manpage warning. + * Bump Standards-Version to 3.9.5, no changes. + * Enable parallel builds. - -- Martin Pitt Wed, 18 Dec 2013 08:46:43 +0100 + -- Bas Couwenberg Wed, 30 Apr 2014 13:05:29 +0200 osm2pgsql (0.82.0-1) unstable; urgency=low diff -Nru osm2pgsql-0.82.0/debian/control osm2pgsql-0.86.0/debian/control --- osm2pgsql-0.82.0/debian/control 2013-12-18 07:46:14.000000000 +0000 +++ osm2pgsql-0.86.0/debian/control 2015-05-20 13:56:17.000000000 +0000 @@ -2,7 +2,8 @@ Maintainer: Debian GIS Project Uploaders: Francesco Paolo Lovergine , David Paleino , - Andreas Tille + Andreas Tille , + Bas Couwenberg Section: utils Priority: optional Build-Depends: debhelper (>= 9), @@ -14,8 +15,12 @@ libxml2-dev, libproj-dev, libprotobuf-c0-dev (>= 0.14), - protobuf-c-compiler -Standards-Version: 3.9.4 + protobuf-c-compiler, + liblua5.2-dev, + lua5.2, + python, + python-psycopg2 +Standards-Version: 3.9.6 Vcs-Browser: http://anonscm.debian.org/gitweb/?p=pkg-grass/osm2pgsql.git Vcs-Git: git://anonscm.debian.org/pkg-grass/osm2pgsql.git Homepage: http://wiki.openstreetmap.org/wiki/Osm2pgsql @@ -24,10 +29,7 @@ Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends} -Recommends: postgis, - postgresql-9.3-postgis-2.1 -Suggests: josm, - gosmore +Recommends: postgis Description: OpenStreetMap data to PostgreSQL converter Convert OSM planet snapshot data to SQL suitable for loading into a PostgreSQL database with PostGIS geospatial extensions. This diff -Nru osm2pgsql-0.82.0/debian/copyright osm2pgsql-0.86.0/debian/copyright --- osm2pgsql-0.82.0/debian/copyright 2013-12-18 07:46:14.000000000 +0000 +++ osm2pgsql-0.86.0/debian/copyright 2014-10-25 16:24:01.000000000 +0000 @@ -25,6 +25,10 @@ © 1996-2006, PostgreSQL Global Development Group License: other +Files: wildcmp.c +Copyright: © 2002, Jim Kent +License: GPL-2+ + License: GPL-2+ This package is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by diff -Nru osm2pgsql-0.82.0/debian/docs osm2pgsql-0.86.0/debian/docs --- osm2pgsql-0.82.0/debian/docs 2013-12-18 07:46:14.000000000 +0000 +++ osm2pgsql-0.86.0/debian/docs 2014-10-25 16:24:01.000000000 +0000 @@ -1,4 +1,4 @@ AUTHORS NEWS -README +README.md TODO diff -Nru osm2pgsql-0.82.0/debian/gbp.conf osm2pgsql-0.86.0/debian/gbp.conf --- osm2pgsql-0.82.0/debian/gbp.conf 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/debian/gbp.conf 2014-10-25 16:24:01.000000000 +0000 @@ -0,0 +1,16 @@ +[DEFAULT] + +# The default name for the upstream branch is "upstream". +# Change it if the name is different (for instance, "master"). +upstream-branch = upstream + +# The default name for the Debian branch is "master". +# Change it if the name is different (for instance, "debian/unstable"). +debian-branch = master + +# git-import-orig uses the following names for the upstream tags. +# Change the value if you are not using git-import-orig +upstream-tag = upstream/%(version)s + +# Always use pristine-tar. +pristine-tar = True diff -Nru osm2pgsql-0.82.0/debian/patches/00-fix_build.patch osm2pgsql-0.86.0/debian/patches/00-fix_build.patch --- osm2pgsql-0.82.0/debian/patches/00-fix_build.patch 2013-12-18 07:46:14.000000000 +0000 +++ osm2pgsql-0.86.0/debian/patches/00-fix_build.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -From: David Paleino -Subject: add missing ZLIB linkage -Last-Update: 2013-09-17 23:21:42 +0200 -Forwarded: no - ---- a/Makefile.am -+++ b/Makefile.am -@@ -38,7 +38,7 @@ endif - - osm2pgsqldir = $(datadir)/osm2pgsql - --AM_CFLAGS = @PTHREAD_CFLAGS@ @LFS_CFLAGS@ @POSTGRESQL_CFLAGS@ @XML2_CFLAGS@ @BZIP2_CFLAGS@ @GEOS_CFLAGS@ @PROJ_CFLAGS@ @PROTOBUF_C_CFLAGS@ -DOSM2PGSQL_DATADIR='"$(osm2pgsqldir)"' -DVERSION='"@PACKAGE_VERSION@"' -+AM_CFLAGS = @PTHREAD_CFLAGS@ @LFS_CFLAGS@ @POSTGRESQL_CFLAGS@ @XML2_CFLAGS@ @BZIP2_CFLAGS@ @GEOS_CFLAGS@ @PROJ_CFLAGS@ @PROTOBUF_C_CFLAGS@ @ZLIB_CFLAGS@ -DOSM2PGSQL_DATADIR='"$(osm2pgsqldir)"' -DVERSION='"@PACKAGE_VERSION@"' - AM_CPPFLAGS = @PTHREAD_CFLAGS@ @POSTGRESQL_CFLAGS@ @XML2_CFLAGS@ @BZIP2_CFLAGS@ @GEOS_CFLAGS@ @PROJ_CFLAGS@ -DOSM2PGSQL_DATADIR='"$(osm2pgsqldir)"' -Igeos-fallback - - AM_LDFLAGS = @PTHREAD_CFLAGS@ @ZLIB_LDFLAGS@ @ZLIB_LIBS@ @POSTGRESQL_LDFLAGS@ @POSTGRESQL_LIBS@ @XML2_LDFLAGS@ @BZIP2_LDFLAGS@ @BZIP2_LIBS@ @GEOS_LDFLAGS@ @GEOS_LIBS@ @PROJ_LDFLAGS@ @PROJ_LIBS@ @PROTOBUF_C_LDFLAGS@ @PROTOBUF_C_LIBS@ diff -Nru osm2pgsql-0.82.0/debian/patches/series osm2pgsql-0.86.0/debian/patches/series --- osm2pgsql-0.82.0/debian/patches/series 2013-12-18 07:46:14.000000000 +0000 +++ osm2pgsql-0.86.0/debian/patches/series 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -00-fix_build.patch diff -Nru osm2pgsql-0.82.0/debian/rules osm2pgsql-0.86.0/debian/rules --- osm2pgsql-0.82.0/debian/rules 2013-12-18 07:46:14.000000000 +0000 +++ osm2pgsql-0.86.0/debian/rules 2014-10-25 16:24:01.000000000 +0000 @@ -4,31 +4,18 @@ # Uncomment this to turn on verbose mode. #export DH_VERBOSE=1 -SVNREPO := http://svn.openstreetmap.org/applications/utils/export/osm2pgsql -VERSION := $(shell dpkg-parsechangelog | grep Version | cut -d' ' -f2) -UPVER := $(shell echo $(VERSION) | awk -F'+r' '{print $$1}') -SVNREV := $(shell echo $(VERSION) | awk -F'+r' '{print $$2}' | cut -d- -f1) -SUFFIX := +r$(SVNREV) +CFLAGS += $(CPPFLAGS) +CXXFLAGS += $(CPPFLAGS) %: dh $@ \ - --with autoreconf + --with autoreconf \ + --parallel + +override_dh_auto_configure: + dh_auto_configure -- CFLAGS="$(CFLAGS)" CPPFLAGS="$(CPPFLAGS)" CXXFLAGS="$(CXXFLAGS)" override_dh_compress: # clean up some naive file permissions dh_compress -X.php -X.sql -X.js -X.c -X.h -get-orig-source: - @echo Downloading osm2pgsql from ${SVNREPO} - svn export -r ${SVNREV} ${SVNREPO} osm2pgsql-${UPVER}${SUFFIX}/ - @echo Removing debian/ directory - rm -rf osm2pgsql-${UPVER}${SUFFIX}/debian/ - @echo Building snapshot tarball. - tar czvf osm2pgsql_${UPVER}${SUFFIX}.orig.tar.gz osm2pgsql-${UPVER}${SUFFIX} - @echo Cleaning up - rm -rf osm2pgsql-${UPVER}${SUFFIX} - @echo . - @echo To update debian/changelog type - @echo dch -v ${UPVER}${SUFFIX}-1 - @echo . - diff -Nru osm2pgsql-0.82.0/default.style osm2pgsql-0.86.0/default.style --- osm2pgsql-0.82.0/default.style 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/default.style 2014-10-25 06:42:27.000000000 +0000 @@ -1,124 +1,226 @@ -# This is the style file that matches the old version of osm2pgsql, which -# did not make distinctions between tags for nodes and for ways. There are a -# number of optimisations that can be applied here. Firstly, certain tags -# only apply to only nodes or only ways. By fixing this we reduce the amount -# of useless data loaded into the DB, which is a good thing. Possible -# optimisations for the future: - -# 1. Generate this file directly from the mapnik XML config, so it's always -# optimal - -# 2. Extend it so it can understand that highway=tertiary is for ways and -# highway=bus_stop is for nodes - -# Flags field isn't used much yet, expect if it contains the text "polygon" -# it indicates the shape is candidate for the polygon table. In the future I -# would like to be able to add directives like "nocache" which tells -# osm2pgsql that it is unlikely this node will be used by a way and so it -# doesn't need to be stored (eg coastline nodes). While in essence an -# optimisation hack, for --slim mode it doesn't matter if you're wrong, but -# in non-slim you might break something! - -# Also possibly an ignore flag, for things like "note" and "source" which -# can simply be deleted. (In slim mode this is, does not apply to non-slim -# obviously) - -# OsmType Tag DataType Flags -node,way note text delete # These tags can be long but are useless for rendering -node,way source text delete # This indicates that we shouldn't store them -node,way created_by text delete - -node,way access text linear -node,way addr:housename text linear -node,way addr:housenumber text linear -node,way addr:interpolation text linear -node,way admin_level text linear -node,way aerialway text linear -node,way aeroway text polygon -node,way amenity text nocache,polygon -node,way area text # hard coded support for area=1/yes => polygon is in osm2pgsql -node,way barrier text linear -node,way bicycle text nocache -node,way brand text linear -node,way bridge text linear -node,way boundary text linear -node,way building text polygon -node capital text linear -node,way construction text linear -node,way covered text linear -node,way culvert text linear -node,way cutting text linear -node,way denomination text linear -node,way disused text linear -node ele text linear -node,way embankment text linear -node,way foot text linear -node,way generator:source text linear -node,way harbour text polygon -node,way highway text linear -node,way historic text polygon -node,way horse text linear -node,way intermittent text linear -node,way junction text linear -node,way landuse text polygon -node,way layer text linear -node,way leisure text polygon -node,way lock text linear -node,way man_made text polygon -node,way military text polygon -node,way motorcar text linear -node,way name text linear -node,way natural text polygon # natural=coastline tags are discarded by a hard coded rule in osm2pgsql -node,way office text polygon -node,way oneway text linear -node,way operator text linear -node,way place text polygon -node poi text -node,way population text linear -node,way power text polygon -node,way power_source text linear -node,way public_transport text polygon -node,way railway text linear -node,way ref text linear -node,way religion text nocache -node,way route text linear -node,way service text linear -node,way shop text polygon -node,way sport text polygon -node,way surface text linear -node,way toll text linear -node,way tourism text polygon -node,way tower:type text linear -way tracktype text linear -node,way tunnel text linear -node,way water text polygon -node,way waterway text polygon -node,way wetland text polygon -node,way width text linear -node,way wood text linear -node,way z_order int4 linear # This is calculated during import -way way_area real # This is calculated during import - -# If you're interested in bicycle routes, you may want the following fields -# To make these work you need slim mode or the necessary data won't be remembered. -#way lcn_ref text linear -#way rcn_ref text linear -#way ncn_ref text linear -#way lcn text linear -#way rcn text linear -#way ncn text linear -#way lwn_ref text linear -#way rwn_ref text linear -#way nwn_ref text linear -#way lwn text linear -#way rwn text linear -#way nwn text linear -#way route_pref_color text linear -#way route_name text linear - -# The following entries can be used with the --extra-attributes option -# to include the username, userid, version & timstamp in the DB -#node,way osm_user text -#node,way osm_uid text -#node,way osm_version text -#node,way osm_timestamp text +# This is the default osm2pgsql .style file that comes with osm2pgsql. +# +# A .style file has 4 columns that define how OSM objects end up in tables in +# the database and what columns are created. It interacts with the command-line +# hstore options. +# +# Columns +# ======= +# +# OsmType: This is either "node", "way" or "node,way" and indicates if this tag +# applies to nodes, ways, or both. +# +# Tag: The tag +# +# DataType: The type of the column to be created. Normally "text" +# +# Flags: Flags that indicate what table the OSM object is moved into. +# +# There are 5 possible flags. These flags are used both to indicate if a column +# should be created, and if ways with the tag are assumed to be areas. The area +# assumptions can be overridden with an area=yes/no tag +# +# polygon - Create a column for this tag, and objects the tag with are areas +# +# linear - Create a column for this tag +# +# phstore - Don't create a column for this tag, but objects with the tag are areas +# +# delete - Drop this tag completely and don't create a column for it. This also +# prevents the tag from being added to hstore columns +# +# nocache - Deprecated and does nothing +# +# If an object has a tag that indicates it is an area or has area=yes/1, +# osm2pgsql will try to turn it into an area. If it succeeds, it places it in +# the polygon table. If it fails (e.g. not a closed way) it places it in the +# line table. +# +# Nodes are never placed into the polygon or line table and are always placed in +# the point table. +# +# Hstore +# ====== +# +# The options --hstore, --hstore-match-only, and --hstore-all interact with +# the .style file. +# +# With --hstore any tags without a column will be added to the hstore column. +# This will also cause all objects to be kept. +# +# With --hstore-match-only the behavior for tags is the same, but objects are +# only kept if they have a non-NULL value in one of the columns. +# +# With --hstore-all all tags are added to the hstore column unless they appear +# in the style file with a delete flag, causing duplication between the normal +# columns and the hstore column. +# +# Special database columns +# ======================== +# +# There are some special database columns that if present in the .style file +# will be populated by osm2pgsql. +# +# These are +# +# z_order - datatype int4 +# +# way_area - datatype real. The area of the way, in the units of the projection +# (e.g. square mercator meters). Only applies to areas +# +# osm_user, osm_uid, osm_version, osm_timestamp - datatype text. Used with the +# --extra-attributes option to include metadata in the database. If importing +# with both --hstore and --extra-attributes the meta-data will end up in the +# tags hstore column regardless of the style file. + +# OsmType Tag DataType Flags +node,way access text linear +node,way addr:housename text linear +node,way addr:housenumber text linear +node,way addr:interpolation text linear +node,way admin_level text linear +node,way aerialway text linear +node,way aeroway text polygon +node,way amenity text polygon +node,way area text # hard coded support for area=1/yes => polygon is in osm2pgsql +node,way barrier text linear +node,way bicycle text +node,way brand text linear +node,way bridge text linear +node,way boundary text linear +node,way building text polygon +node capital text linear +node,way construction text linear +node,way covered text linear +node,way culvert text linear +node,way cutting text linear +node,way denomination text linear +node,way disused text linear +node ele text linear +node,way embankment text linear +node,way foot text linear +node,way generator:source text linear +node,way harbour text polygon +node,way highway text linear +node,way historic text polygon +node,way horse text linear +node,way intermittent text linear +node,way junction text linear +node,way landuse text polygon +node,way layer text linear +node,way leisure text polygon +node,way lock text linear +node,way man_made text polygon +node,way military text polygon +node,way motorcar text linear +node,way name text linear +node,way natural text polygon # natural=coastline tags are discarded by a hard coded rule in osm2pgsql +node,way office text polygon +node,way oneway text linear +node,way operator text linear +node,way place text polygon +node poi text +node,way population text linear +node,way power text polygon +node,way power_source text linear +node,way public_transport text polygon +node,way railway text linear +node,way ref text linear +node,way religion text nocache +node,way route text linear +node,way service text linear +node,way shop text polygon +node,way sport text polygon +node,way surface text linear +node,way toll text linear +node,way tourism text polygon +node,way tower:type text linear +way tracktype text linear +node,way tunnel text linear +node,way water text polygon +node,way waterway text polygon +node,way wetland text polygon +node,way width text linear +node,way wood text linear +node,way z_order int4 linear # This is calculated during import +way way_area real # This is calculated during import + +# Area tags +# We don't make columns for these tags, but objects with them are areas. +# Mainly for use with hstore +way abandoned:aeroway text phstore +way abandoned:amenity text phstore +way abandoned:building text phstore +way abandoned:landuse text phstore +way abandoned:power text phstore +way area:highway text phstore + +# Deleted tags +# These are tags that are generally regarded as useless for most rendering. +# Most of them are from imports or intended as internal information for mappers +# Some of them are automatically deleted by editors. +# If you want some of them, perhaps for a debugging layer, just delete the lines. + +# These tags are used by mappers to keep track of data. +# They aren't very useful for rendering. +node,way note text delete +node,way note:* text delete +node,way source text delete +node,way source_ref text delete +node,way source:* text delete +node,way attribution text delete +node,way comment text delete +node,way fixme text delete + +# Tags generally dropped by editors, not otherwise covered +node,way created_by text delete +node,way odbl text delete +node,way odbl:note text delete +node,way SK53_bulk:load text delete + +# Lots of import tags +# TIGER (US) +node,way tiger:* text delete + +# NHD (US) +# NHD has been converted every way imaginable +node,way NHD:* text delete +node,way nhd:* text delete + +# GNIS (US) +node,way gnis:* text delete + +# Geobase (CA) +node,way geobase:* text delete +# NHN (CA) +node,way accuracy:meters text delete +node,way sub_sea:type text delete +node,way waterway:type text delete + +# KSJ2 (JA) +# See also note:ja and source_ref above +node,way KSJ2:* text delete +# Yahoo/ALPS (JA) +node,way yh:* text delete + +# osak (DK) +node,way osak:* text delete + +# kms (DK) +node,way kms:* text delete + +# ngbe (ES) +# See also note:es and source:file above +node,way ngbe:* text delete + +# naptan (UK) +node,way naptan:* text delete + +# Corine (CLC) (Europe) +node,way CLC:* text delete + +# misc +node,way 3dshapes:ggmodelk text delete +node,way AND_nosr_r text delete +node,way import text delete +node,way it:fvg:* text delete diff -Nru osm2pgsql-0.82.0/docs/analysis.md osm2pgsql-0.86.0/docs/analysis.md --- osm2pgsql-0.82.0/docs/analysis.md 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/docs/analysis.md 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,81 @@ +# Geospatial analysis with osm2pgsql # + +An osm2pgsql database and PostGIS is well-suited for geospatial analysis using +OpenStreetMap data where topology is not a consideration. + +PostGIS provides an [extensive number of geometry functions](http://postgis.net/docs/manual-2.1/reference.html) +and a full description of how to perform analysis with them is beyond the +scope of a readme, but a simple example of finding the total road lengths by +classification for a municipality should help. + +To start with, we'll download the data for the region as an [extract from Geofabrik](http://download.geofabrik.de/) and import it with osm2pgsql. + + osm2pgsql --database gis --number-processes 4 --multi-geometry british-columbia-latest.osm.pbf + +``--multi-geometry`` (``-G``) is necessary for most analysis as it prevents +MULTIPOLYGONs from being split into multiple POLYGONs, a step that is +normally used to [increase rendering speed](http://paulnorman.ca/blog/2014/03/osm2pgsql-multipolygons) +but increases the complexity of analysis SQL. + +Loading should take about 10 minutes, depending on computer speed. Once this +is done we'll open a PostgreSQL terminal with ``psql -d gis``, although a GUI +like pgadmin or any standard tool could be used instead. + +To start, we'll create a partial index to speed up highway queries. + +```sql +CREATE INDEX planet_osm_line_highways_index ON planet_osm_line USING GiST (way) WHERE (highway IS NOT NULL); +``` + +We'll first find the ID of the polygon we want + +```sql +gis=# SELECT osm_id FROM planet_osm_polygon +WHERE boundary='administrative' AND admin_level='8' AND name='New Westminster'; + osm_id +---------- + -1377803 +``` + +The negative sign tells us that the geometry is from a relation, and checking +on [the OpenStreetMap site](https://www.openstreetmap.org/relation/1377803) +confirms which it is. + +We want to find all the roads in the city and get the length of the portion in +the city, sorted by road classification. Roads are in the ``planet_osm_line`` +table, not the ``planet_osm_roads`` table which is only has a subset of data +for low-zoom rendering. + +```sql +gis=# SELECT + round(SUM( + ST_Length(ST_Transform( + ST_Intersection(way, (SELECT way FROM planet_osm_polygon WHERE osm_id=-1377803)) + ,4326)::geography) + )) AS "distance (meters)", highway AS "highway type" + FROM planet_osm_line + WHERE highway IS NOT NULL + AND ST_Intersects(way, (SELECT way FROM planet_osm_polygon WHERE osm_id=-1377803)) + GROUP BY highway + ORDER BY "distance (meters)" DESC + LIMIT 10; + distance (meters) | highway type +-------------------+--------------- + 138122 | residential + 79519 | service + 51890 | footway + 25610 | tertiary + 23434 | secondary + 14900 | cycleway + 6468 | primary + 5217 | motorway + 4389 | motorway_link + 3728 | track +``` + +The ``ST_Transform(...,4326)::geography`` is necessary because the data was +imported in Mercator. This step could have been avoided by importing in a local +projection like a suitable UTM projection. + +More complicated analysises can be completed, but this simple example shows how +to use the tables and put conditions on the columns. \ No newline at end of file diff -Nru osm2pgsql-0.82.0/docs/export.md osm2pgsql-0.86.0/docs/export.md --- osm2pgsql-0.82.0/docs/export.md 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/docs/export.md 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,12 @@ +# Exporting with osm2pgsql # + +Osm2pgsql can be used in combination with [ogr2ogr](http://www.gdal.org/ogr2ogr.html) and a [PostgreSQL data source](http://www.gdal.org/drv_pg.html). + +An example command to export to GeoJSON would be + + ogr2ogr -f "GeoJSON" roads.geojson -t_srs EPSG:4326 \ + PG:"dbname=gis" -s_srs EPSG:900913 \ + -sql "SELECT name,highway,oneway,toll,way FROM planet_osm_line WHERE highway IS NOT NULL" + +Care should be taken if exporting to shapefiles, as characters may be present +which cannot be represented in ISO-8859-1, the standard encoding for shapefiles. diff -Nru osm2pgsql-0.82.0/docs/gazetteer.md osm2pgsql-0.86.0/docs/gazetteer.md --- osm2pgsql-0.82.0/docs/gazetteer.md 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/docs/gazetteer.md 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,7 @@ +# Gazetteer Backend # + +The gazetteer backend is designed for use with +[Nominatim](http://wiki.openstreetmap.org/wiki/Nominatim) +and will not generally be used outside that context. + +The tables are designed for a hiarchy of places. diff -Nru osm2pgsql-0.82.0/docs/lua.md osm2pgsql-0.86.0/docs/lua.md --- osm2pgsql-0.82.0/docs/lua.md 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/docs/lua.md 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,57 @@ +# Lua tag transformations + +osm2pgsql supports [Lua](http://lua.org/) scripts to rewrite tags before they enter the database. + +This allows you to unify disparate tagging (for example, `highway=path; foot=yes` and `highway=footway`) and perform complex queries, potentially more efficiently than writing them as rules in your Mapnik or other stylesheet. + +## How to + +Pass a Lua script to osm2pgsql using the command line switch `--tag-transform-script`: + + osm2pgsql -S your.style --tag-transform-script your.lua --hstore-all extract.osm.pbf + +This Lua script needs to implement the following functions: + + function filter_tags_node(tags, num_tags) + return filter, tags + + function filter_tags_way(tags, num_tags) + return filter, tags, polygon, roads + + function filter_basic_tags_rel(tags, num_tags) + return filter, tags + +These take a set of tags as a Lua key-value table, and an integer which is the number of tags supplied. + +The first return value is `filter`, a flag which you should set to `1` if the way/node/relation should be filtered out and not added to the database, `0` otherwise. (They will still end up in the slim mode tables, but not in the rendering tables) + +The second return value is `tags`, a transformed (or unchanged) set of tags. + +`filter_tags_way` returns two additional flags. `poly` should be `1` if the way should be treated as a polygon, `0` as a line. `roads` should be `1` if the way should be added to the planet_osm_roads table, `0` otherwise. + + function filter_tags_relation_member(tags, member_tags, + roles, num_members) + return filter, tags, member_superseded, boundary, + polygon, roads + +The function filter_tags_relation_member is more complex and can handle more advanced relation tagging, such as multipolygons that take their tags from the member ways. + +This function is called with the tags from the relation; an set of tags for each of the member ways (member relations and nodes are ignored); the set of roles for each of the member ways; and the number of members. The tag and role sets are both arrays (indexed tables) of hashes (tables). + +As usual, it should return a filter flag, and a transformed set of tags to be applied to the relation in later processing. + +The third return value, `member_superseded`, is a flag set to `1` if the way has now been dealt with (e.g. outer ways in multipolygon relations, which are superseded by the multipolygon geometry), `0` if it needs to have its own entry in the database (e.g. tagged inner ways). + +The fourth and fifth return values, `boundary` and `polygon`, are flags that specify if the relation should be processed as a line, a polygon, or both (e.g. administrative boundaries). + +The final return value, `roads`, is `1` if the geometry should be added to the `planet_osm_roads` table. + +There is a sample tag transform lua script in the repository as an example, which (nearly) replicates current processing and can be used as a template for one's own scripts. + +## In practice + +There is inevitably a performance hit with any extra processing. The sample Lua tag transformation is a little slower than the C-based default. However, extensive Lua pre-processing may save you further processing in your Mapnik (or other) stylesheet. + +Test your Lua script with small excerpts before applying it to a whole country or even the planet. + +Where possible, add new tags, don't replace existing ones; otherwise you will be faced with a reimport if you decide to change your transformation. diff -Nru osm2pgsql-0.82.0/docs/nodecachefilereader.1 osm2pgsql-0.86.0/docs/nodecachefilereader.1 --- osm2pgsql-0.82.0/docs/nodecachefilereader.1 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/docs/nodecachefilereader.1 2014-10-25 06:42:27.000000000 +0000 @@ -18,7 +18,7 @@ command. .PP .B nodecachefilereader -allows you to inspect and test osm2pgsql's custome node database. +allows you to inspect and test osm2pgsql's custom node database. .PP .SH OPTIONS If only the filename of the node cache is given, nodecachefilereader @@ -34,4 +34,4 @@ .SH AUTHOR nodecachefilereader was written by Kai Krueger and other OpenStreetMap project members. -.PP \ No newline at end of file +.PP diff -Nru osm2pgsql-0.82.0/docs/osm2pgsql.1 osm2pgsql-0.86.0/docs/osm2pgsql.1 --- osm2pgsql-0.82.0/docs/osm2pgsql.1 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/docs/osm2pgsql.1 2014-10-25 06:42:27.000000000 +0000 @@ -36,7 +36,7 @@ .PP .SH OPTIONS These programs follow the usual GNU command line syntax, with long -options starting with two dashes (`-'). +options starting with two dashes (`\-'). A summary of options is included below. .TP \fB\-a\fR|\-\-append @@ -44,9 +44,9 @@ existing data. .TP \fB\-b\fR|\-\-bbox -Apply a bounding box filter on the imported data +Apply a bounding box filter on the imported data. Must be specified as: minlon,minlat,maxlon,maxlat -e.g. \fB\-\-bbox\fR \fB\-0\fR.5,51.25,0.5,51.75 +e.g. \fB\-\-bbox\fR \fB\-0.5,51.25,0.5,51.75\fR .TP \fB\-c\fR|\-\-create Remove existing data from the database. This is the @@ -58,49 +58,49 @@ .TP \fB\-i\fR|\-\-tablespace\-index tablespacename Store all indices in a separate PostgreSQL tablespace named by this parameter. -This allows to e.g. store the indices on faster storage like SSDs +This allows one to e.g. store the indices on faster storage like SSDs. .TP \fB\ \fR\-\-tablespace\-main\-data tablespacename -Store the data tables (non slim) in the given tablespace +Store the data tables (non slim) in the given tablespace. .TP \fB\ \fR\-\-tablespace\-main\-index tablespacename -Store the indices of the main tables (non slim) in the given tablespace +Store the indices of the main tables (non slim) in the given tablespace. .TP \fB\ \fR\-\-tablespace\-slim\-data tablespacename -Store the slim mode tables in the given tablespace +Store the slim mode tables in the given tablespace. .TP \fB\ \fR\-\-tablespace\-slim\-index tablespacename -Store the indices of the slim mode tables in the given tablespace +Store the indices of the slim mode tables in the given tablespace. .TP \fB\-l\fR|\-\-latlong Store data in degrees of latitude & longitude. .TP \fB\-m\fR|\-\-merc -Store data in proper spherical Mercator (the default) +Store data in proper spherical Mercator (the default). .TP \fB\-M\fR|\-\-oldmerc -Store data in the legacy OSM Mercator format +Store data in the legacy OSM Mercator format. .TP \fB\-E\fR|\-\-proj num Use projection EPSG:num .TP \fB\-u\fR|\-\-utf8\-sanitize -Repair bad UTF8 input data (present in planet +Repair bad UTF\-8 input data (present in planet dumps prior to August 2007). Adds about 10% overhead. .TP \fB\-p\fR|\-\-prefix prefix_string -Prefix for table names (default planet_osm) +Prefix for table names (default: planet_osm). .TP \fB\-r\fR|\-\-input\-reader format Select input format reader. Available choices are \fBlibxml2\fR (default) and \fBprimitive\fR for OSM XML format files, \fBo5m\fR for o5m formatted file -and \fBpbf\fR for OSM PBF binary format (may not be available on all platforms) +and \fBpbf\fR for OSM PBF binary format (may not be available on all platforms). .TP \fB\-s\fR|\-\-slim Store temporary data in the database. Without this mode, all temporary data is stored in -RAM and if you do not have enough the import will not succeed successfully. With slim mode, +RAM and if you do not have enough the import will not work successfully. With slim mode, you should be able to import the data even on a system with limited RAM, although if you -do no have enough RAM to cache at least all of the nodes, the time to import the data +do not have enough RAM to cache at least all of the nodes, the time to import the data will likely be greatly increased. .TP \fB\ \fR\-\-drop @@ -109,7 +109,7 @@ size, if not slightly bigger than the main tables. It does not, however, reduce the maximum spike of disk usage during import. It can furthermore increase the import speed, as no indices need to be created for the slim mode tables, which (depending on hardware) -can nearly half import time. Slim mode tables however have to be persistent if you want +can nearly halve import time. Slim mode tables however have to be persistent if you want to be able to update your database, as these tables are needed for diff processing. .TP \fB\-S\fR|\-\-style /path/to/style @@ -119,11 +119,11 @@ \fB\-C\fR|\-\-cache num Only for slim mode: Use up to num many MB of RAM for caching nodes. Giving osm2pgsql sufficient cache to store all imported nodes typically greatly increases the speed of the import. Each cached node -requires 8 bytes of cache, plus about 10% - 30% overhead. For a current OSM full planet import with +requires 8 bytes of cache, plus about 10% \- 30% overhead. For a current OSM full planet import with its ~ 1.9 billion nodes, a good value would be 17000 if you have enough RAM. If you don't have enough RAM, it is likely beneficial to give osm2pgsql close to the full available amount of RAM. Defaults to 800. .TP -\fB\ \fR\-\-cache-strategy strategy +\fB\ \fR\-\-cache\-strategy strategy There are a number of different modes in which osm2pgsql can organize its node cache in RAM. These are optimized for different assumptions of the data and the hardware resources available. Currently available strategies are @@ -150,16 +150,16 @@ \fB\-P\fR|\-\-port num Database server port. .TP -\fB\-e\fR|\-\-expire-tiles [min_zoom-]max-zoom +\fB\-e\fR|\-\-expire\-tiles [min_zoom\-]max\-zoom Create a tile expiry list. .TP -\fB\-o\fR|\-\-expire-output /path/to/expire.list +\fB\-o\fR|\-\-expire\-output /path/to/expire.list Output file name for expired tiles list. .TP \fB\-o\fR|\-\-output -Specifies the output back-end or database schema to use. Currently +Specifies the output back\-end or database schema to use. Currently osm2pgsql supports \fBpgsql\fR, \fBgazetteer\fR and \fBnull\fR. \fBpgsql\fR is -the default output back-end / schema and is optimized for rendering with Mapnik. +the default output back\-end / schema and is optimized for rendering with Mapnik. \fBgazetteer\fR is a db schema optimized for geocoding and is used by Nominatim. \fBnull\fR does not write any output and is only useful for testing. .TP @@ -169,34 +169,34 @@ Note: this option also requires additional entries in your style file. .TP \fB\-k\fR|\-\-hstore -Add tags without column to an additional hstore (key/value) column to PostgreSQL tables +Add tags without column to an additional hstore (key/value) column to PostgreSQL tables. .TP \fB\-j\fR|\-\-hstore\-all -Add all tags to an additional history (key/value) column in PostgreSQL tables +Add all tags to an additional hstore (key/value) column in PostgreSQL tables. .TP \fB\-z\fR|\-\-hstore\-column key_name -Add an additional history (key/value) column containing all tags -that start with the specified string, egg --hstore-column "name:" will +Add an additional hstore (key/value) column containing all tags +that start with the specified string, eg \-\-hstore\-column "name:" will produce an extra hstore column that contains all name:xx tags .TP \fB\ \fR\-\-hstore\-match\-only Only keep objects that have a value in one of the columns -(normal action with --hstore is to keep all objects) +(normal action with \-\-hstore is to keep all objects). .TP -\fB\ \fR\-\-hstore-add-index +\fB\ \fR\-\-hstore\-add\-index Create indices for the hstore columns during import. .TP \fB\-G\fR|\-\-melts\-geometry -Normally osm2pgsql splits multi-part geometries into separate database rows per part. +Normally osm2pgsql splits multi\-part geometries into separate database rows per part. A single OSM id can therefore have several rows. With this option, PostgreSQL instead -generates multi-geometry features in the PostgreSQL tables. +generates multi\-geometry features in the PostgreSQL tables. .TP -\fB\-K\fR|\-\-keep-coastlines +\fB\-K\fR|\-\-keep\-coastlines Keep coastline data rather than filtering it out. By default natural=coastline tagged data will be discarded based on the assumption that post-processed Coastline Checker shape files will be used. .TP -\fB\ \fR\-\-exclude-invalid-polygon +\fB\ \fR\-\-exclude\-invalid\-polygon OpenStreetMap data is defined in terms of nodes, ways and relations and not in terms of actual geometric features. Osm2pgsql therefore tries to build postgis geometries out of this data representation. However not all ways and relations @@ -207,33 +207,33 @@ .TP \fB\ \fR\-\-unlogged Use postgresql's unlogged tables for storing data. This requires PostgreSQL 9.1 -or above. Data written to unlogged tables is not written to PostgreSQL's write-ahead log, +or above. Data written to unlogged tables is not written to PostgreSQL's write\-ahead log, which makes them considerably faster than ordinary tables. However, they are not -crash-safe: an unlogged table is automatically truncated after a crash or unclean shutdown. +crash\-safe: an unlogged table is automatically truncated after a crash or unclean shutdown. .TP \fB\ \fR\-\-number\-processes num Specifies the number of parallel processes used for certain operations. If disks are fast enough e.g. if you have an SSD, then this can greatly increase speed of -the "going over pending ways" and "going over pending relations" stages on a multi-core -server. +the "going over pending ways" and "going over pending relations" stages on a multi\-core +server. .TP -\fB\-I\fR|\-\-disable-parallel-indexing +\fB\-I\fR|\-\-disable\-parallel\-indexing By default osm2pgsql initiates the index building on all tables in parallel to increase performance. This can be disadvantages on slow disks, or if you don't have -enough ram for PostgreSQL to perform up to 7 parallel index building processes -(e.g. because maintenance_work_mem is set high) +enough RAM for PostgreSQL to perform up to 7 parallel index building processes +(e.g. because maintenance_work_mem is set high). .TP -\fB\ \fR\-\-flat-nodes /path/to/nodes.cache -The flat-nodes mode is a separate method to store slim mode node information on disk. +\fB\ \fR\-\-flat\-nodes /path/to/nodes.cache +The flat\-nodes mode is a separate method to store slim mode node information on disk. Instead of storing this information in the main PostgreSQL database, this mode creates its own separate custom database to store the information. As this custom database has application level knowledge about the data to store and is not general purpose, -it can store the data much more efficient. Storing the node information for the full +it can store the data much more efficiently. Storing the node information for the full planet requires about 100GB in PostgreSQL, the same data is stored in only ~16GB using -the flat-nodes mode. This can also increase the speed of applying diff files. This option -activates the flat-nodes mode and specifies the location of the database file. It is a +the flat\-nodes mode. This can also increase the speed of applying diff files. This option +activates the flat\-nodes mode and specifies the location of the database file. It is a single large > 16GB file. This mode is only recommended for full planet imports -as it doesn't work well with small extracts. The default is disabled +as it doesn't work well with small extracts. The default is disabled. .TP \fB\-h\fR|\-\-help Help information. @@ -244,11 +244,11 @@ Verbose output. .PP .SH SUPPORTED PROJECTIONS -Latlong (-l) SRS: 4326 (none) -.br +Latlong (\-l) SRS: 4326 (none) +.br WGS84 Mercator ( ) SRS: 3395 +proj=merc +datum=WGS84 +k=1.0 +units=m +over +no_defs .br -Spherical Mercator (-m) SRS:900913 +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +no_defs +over +Spherical Mercator (\-m) SRS:900913 +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +no_defs +over .PP .SH SEE ALSO .BR proj (1), diff -Nru osm2pgsql-0.82.0/docs/pgsql.md osm2pgsql-0.86.0/docs/pgsql.md --- osm2pgsql-0.82.0/docs/pgsql.md 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/docs/pgsql.md 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,54 @@ +# Pgsql Backend # + +The pgsql backend is designed for rendering OpenStreetMap data, principally +with Mapnik, but is also useful for [analysis](docs/analysis.md) and +[exporting](docs/exporting.md) to other formats. + +## Database Layout ## +It connects to a PostgreSQL database and stores the data in four tables + +* ``planet_osm_point`` +* ``planet_osm_line`` +* ``planet_osm_roads`` +* ``planet_osm_polygon`` + +planet_osm_roads contains the data from other tables, but has tags selected +for low-zoom rendering. It does not only contain roads. + +The default prefix ``planet_osm`` can be changed with the ``--prefix`` option. + +If you are using ``--slim`` mode, it will create the following additional 3 +tables which are used by the pgsql middle layer, not the backend: + +* ``planet_osm_nodes`` +* ``planet_osm_ways`` +* ``planet_osm_rels`` + +With the ``--flat-nodes`` option, the ``planet_osm_nodes`` information is +instead stored in a binary file. + +## Importing ## + +1. Runs a parser on the input file and processes the nodes, ways and relations. + +2. If a node has a tag declared in the style file then it is added to + ``planet_osm_point``. Regardless of tags, its position is stored by the + middle layer. + +3. If there are tags on a way in the style file as linear but without polygon + tags, they are written into the lines and, depending on tags, roads tables. + + They are also stored by the middle layer. + +4. Ways without tags or with polygon tags are stored as "pending" in the + middle layer. + +5. Relations are parsed. In this stage, "new-style" multipolygon and boundary + relations are turned into polygons. Route relations are turned into + linestrings. + +6. "Pending" ways are processed, and they are either added as just the way, or + if a member of a multipolygon relation, they processed as multipolygons. + +7. Indexes are built. This may take substantial time, particularly for the + middle layer indexes created in non-slim mode. diff -Nru osm2pgsql-0.82.0/docs/usage.md osm2pgsql-0.86.0/docs/usage.md --- osm2pgsql-0.82.0/docs/usage.md 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/docs/usage.md 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,146 @@ +# Command-line usage # + +Osm2pgsql has one program, the executable itself, which has **43** command line +options. A full list of options can be obtained with ``osm2pgsql -h -v``. This +document provides an overview of options, and more importantly, why you might +use them. + +## Overall options + +* ``--append`` or ``--create`` specify if osm2pgsql is conducting a new import + or adding to an existing one. ``--slim`` is required with ``--append``. + +* ``--input-reader`` specifies the parser if the filetype can't be + automatically detected for some reason. The ``primitive`` parser is **not** + suggested. + +* ``--output`` specifies if the output backend is the default + [pgsql](pgsql.md), the [gazetteer](gazetteer.md) output used by Nominatim, or + null, which emits no output. + +## Performance + +Performance is heavily influenced by other options, but there are some options +that only impact performance. + +* ``--cache`` specifies how much memory to allocate for caching information. In + ``--slim`` mode, this is just node positions while in non-slim it has to + store information about ways and relations too. The maximum RAM it is useful + to set this to in slim mode is 8 bytes * number of nodes / efficiency, where + efficiency ranges from 50% on small extracts to 80% for a planet. + +* ``--number-processes`` sets the number of processes to use. This should + typically be set to the number of CPU threads, but gains in speed are minimal + past 8 threads. + +* ``--disable-parallel-indexing`` disables the clustering and indexing of all + tables in parallel. This reduces disk and ram requirements during the import, + but causes the last stages to take significantly longer. + +* ``--cache-strategy`` sets the cache strategy to use. The defaults are fine + here, and optimizied uses less RAM than the other options. + +## Database options ## + +osm2pgsql supports standard options for how to connect to PostgreSQL. If left +unset, it will attempt to connect to the ``gis`` database using a unix socket. +Most usage only requires setting ``--database``. + +``--tablespace`` options allow the location of main and slim tables and indexes +to be set to different tablespaces independently, typically on machines with +multiple drive arrays where one is not large enough for all of the database. + +``--flat-nodes`` specifies that instead of a table in PostgreSQL, a binary +file is used as a database of node locations. This should only be used on full +planet imports or very large extracts (e.g. Europe) but in those situations +offers significant space savings and speed increases, particularly on +mechanical drives. The file takes approximately 8 bytes * maximum node ID, or +about 23 GiB, regardless of the size of the extract. + +``--unlogged`` specifies to use unlogged tables which are dropped from the +database if the database server ever crashes, but are faster to import. + +``--prefix`` specifies the prefix for tables + +## Middle-layer options ## + +* ``--slim`` causes the middle layer to store node and way information in + database rather than in memory. It is required for updates and for large + extracts or the entire planet which will not fit in RAM. + +* ``--drop`` discards the slim tables when they are no longer needed in the + import, significantly reducing disk requirements and saving the time of + building slim table indexes. A ``--slim --drop`` + +## Output columns options ## + +### Column options + +* ``--extra-attributes`` creates psudo-tags with OSM meta-data like user, + last edited, and changeset. These also need to be added to the style file. + +* ``--style`` specifies the location of the style file. This defines what + columns are created, what tags denote areas, and what tags can be ignored. + The [default.style](../default.style) contains more documentation on this + file. + +* ``--tag-transform-script`` sets a [Lua tag transform](lua.md) to use in + place of the built-in C tag transform. + +### Hstore + +Hstore is a [PostgreSQL data type](http://www.postgresql.org/docs/9.3/static/hstore.html) +that allows storing arbitrary key-value pairs. It needs to be installed on +the database with ``CREATE EXTENSION hstore;`` + +osm2pgsql has five hstore options + +* ``--hstore`` or ``-k`` adds any tags not already in a conventional column to + a hstore column. With the standard stylesheet this would result in tags like + highway appearing in a conventional column while tags not in the style like + ``name:en`` or ``lanes:forward`` would appear only in the hstore column. + +* ``--hstore-all`` or ``-j`` adds all tags to a hstore column, even if they're + already stored in a conventional column. With the standard stylesheet this + would result in tags like highway appearing in conventional column and the + hstore column while tags not in the style like ``name:en`` or + ``lanes:forward`` would appear only in the hstore column. + +* ``--hstore-column`` or ``-z``, which adds an additional column for tags + starting with a specified string, e.g. ``--hstore-column 'name:'`` produces + a hstore column that contains all ``name:xx`` tags + +* ``--hstore-match-only`` modifies the above options and prevents objects from + being added if they only have tags in the hstore column and no conventional + tags. + +* ``--hstore-add-index`` adds a GIN index to the hstore columns. This can + speed up arbitrary queries, but for most purposes partial indexes will be + faster. + +Either ``--hstore`` or ``--hstore-all`` when combined with ``--hstore-match-only`` +should give the same rows as no hstore, just with the additional hstore column. + +Hstore is used to give more flexability to use additional tags without +reimporting the database, at the cost of a +[less speed and more space.](http://paulnorman.ca/blog/2014/03/osm2pgsql-and-hstore/) + +## Projection options + +* ``--latlong``, ``--merc``, or ``--proj`` are used to specify the projection + used for importing. The default, ``--merc`` is typically used for rendering, + while ``--latlong`` can offer advantages for analysis. Most stylesheets + assume ``--merc`` has been used. + +## Output data options + +* ``--multi-geometry`` skips an optimization for rendering where PostGIS + MULTIPOLYGONs are split into multiple POLYGONs. ``--multi-geometry`` can be + used to [avoid some labeling issues at the cost of speed](http://paulnorman.ca/blog/2014/03/osm2pgsql-multipolygons/). + It is also typically required for [analysis](analysis.md). + +* ``--keep-coastlines`` disables a hard-coded rule that would otherwise + discard ``natural=coastline`` ways. + +* ``--exclude-invalid-polygon`` prevents osm2pgsql from attempting to form + valid polygons from invalid ones and just rejects the invalid ones. diff -Nru osm2pgsql-0.82.0/empty.style osm2pgsql-0.86.0/empty.style --- osm2pgsql-0.82.0/empty.style 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/empty.style 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,107 @@ +# This osm2pgsql style file is one that will generate no columns from tags +# It is designed as a starting point for you to develop your own, or for +# use where all OSM tags are in hstore. + +# See default.style for documentation on all the flags + +# OsmType Tag Type Flags +# Insert your own columns here, or change phstore to polygon below +way abandoned:aeroway text phstore +way abandoned:amenity text phstore +way abandoned:building text phstore +way abandoned:landuse text phstore +way abandoned:power text phstore +way area:highway text phstore +node,way aeroway text phstore +node,way amenity text phstore +node,way building text phstore +way building:part text phstore +node,way harbour text phstore +node,way historic text phstore +node,way landuse text phstore +node,way leisure text phstore +node,way man_made text phstore +node,way military text phstore +node,way natural text phstore +node,way office text phstore +node,way place text phstore +node,way power text phstore +node,way public_transport text phstore +node,way shop text phstore +node,way sport text phstore +node,way tourism text phstore +node,way water text phstore +node,way waterway text phstore +node,way wetland text phstore +node,way z_order int4 linear # This is calculated during import +way way_area real # This is calculated during import + +# Deleted tags +# These are tags that are generally regarded as useless for most rendering. +# Most of them are from imports or intended as internal information for mappers +# Some of them are automatically deleted by editors. +# If you want some of them, perhaps for a debugging layer, just delete the lines. + +# These tags are used by mappers to keep track of data. +# They aren't very useful for rendering. +node,way note text delete +node,way note:* text delete +node,way source text delete +node,way source_ref text delete +node,way source:* text delete +node,way attribution text delete +node,way comment text delete +node,way fixme text delete + +# Tags generally dropped by editors, not otherwise covered +node,way created_by text delete +node,way odbl text delete +node,way odbl:note text delete +node,way SK53_bulk:load text delete + +# Lots of import tags +# TIGER (US) +node,way tiger:* text delete + +# NHD (US) +# NHD has been converted every way imaginable +node,way NHD:* text delete +node,way nhd:* text delete + +# GNIS (US) +node,way gnis:* text delete + +# Geobase (CA) +node,way geobase:* text delete +# NHN (CA) +node,way accuracy:meters text delete +node,way sub_sea:type text delete +node,way waterway:type text delete + +# KSJ2 (JA) +# See also note:ja and source_ref above +node,way KSJ2:* text delete +# Yahoo/ALPS (JA) +node,way yh:* text delete + +# osak (DK) +node,way osak:* text delete + +# kms (DK) +node,way kms:* text delete + +# ngbe (ES) +# See also note:es and source:file above +node,way ngbe:* text delete + +# naptan (UK) +node,way naptan:* text delete + +# Corine (CLC) (Europe) +node,way CLC:* text delete + +# misc +node,way 3dshapes:ggmodelk text delete +node,way AND_nosr_r text delete +node,way import text delete +node,way it:fvg:* text delete diff -Nru osm2pgsql-0.82.0/.gitignore osm2pgsql-0.86.0/.gitignore --- osm2pgsql-0.82.0/.gitignore 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/.gitignore 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,38 @@ +*.o +Makefile.in +aclocal.m4 +autom4te.cache/ +config.guess +config.h.in +config.h.in~ +config.sub +configure +compile +depcomp +fileformat.pb-c.c +fileformat.pb-c.h +install-sh +legacy/Makefile.in +ltmain.sh +m4/libtool.m4 +m4/ltoptions.m4 +m4/ltsugar.m4 +m4/ltversion.m4 +m4/lt~obsolete.m4 +missing +nodecachefilereader +osmformat.pb-c.c +osmformat.pb-c.h +osm2pgsql + +Makefile +config.h +config.log +config.nice +config.status +legacy/.deps/ +legacy/Makefile +libtool +.deps/ +stamp-h1 +INSTALL diff -Nru osm2pgsql-0.82.0/INSTALL osm2pgsql-0.86.0/INSTALL --- osm2pgsql-0.82.0/INSTALL 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/INSTALL 1970-01-01 00:00:00.000000000 +0000 @@ -1,365 +0,0 @@ -Installation Instructions -************************* - -Copyright (C) 1994, 1995, 1996, 1999, 2000, 2001, 2002, 2004, 2005, -2006, 2007, 2008, 2009 Free Software Foundation, Inc. - - Copying and distribution of this file, with or without modification, -are permitted in any medium without royalty provided the copyright -notice and this notice are preserved. This file is offered as-is, -without warranty of any kind. - -Basic Installation -================== - - Briefly, the shell commands `./configure; make; make install' should -configure, build, and install this package. The following -more-detailed instructions are generic; see the `README' file for -instructions specific to this package. Some packages provide this -`INSTALL' file but do not implement all of the features documented -below. The lack of an optional feature in a given package is not -necessarily a bug. More recommendations for GNU packages can be found -in *note Makefile Conventions: (standards)Makefile Conventions. - - The `configure' shell script attempts to guess correct values for -various system-dependent variables used during compilation. It uses -those values to create a `Makefile' in each directory of the package. -It may also create one or more `.h' files containing system-dependent -definitions. Finally, it creates a shell script `config.status' that -you can run in the future to recreate the current configuration, and a -file `config.log' containing compiler output (useful mainly for -debugging `configure'). - - It can also use an optional file (typically called `config.cache' -and enabled with `--cache-file=config.cache' or simply `-C') that saves -the results of its tests to speed up reconfiguring. Caching is -disabled by default to prevent problems with accidental use of stale -cache files. - - If you need to do unusual things to compile the package, please try -to figure out how `configure' could check whether to do them, and mail -diffs or instructions to the address given in the `README' so they can -be considered for the next release. If you are using the cache, and at -some point `config.cache' contains results you don't want to keep, you -may remove or edit it. - - The file `configure.ac' (or `configure.in') is used to create -`configure' by a program called `autoconf'. You need `configure.ac' if -you want to change it or regenerate `configure' using a newer version -of `autoconf'. - - The simplest way to compile this package is: - - 1. `cd' to the directory containing the package's source code and type - `./configure' to configure the package for your system. - - Running `configure' might take a while. While running, it prints - some messages telling which features it is checking for. - - 2. Type `make' to compile the package. - - 3. Optionally, type `make check' to run any self-tests that come with - the package, generally using the just-built uninstalled binaries. - - 4. Type `make install' to install the programs and any data files and - documentation. When installing into a prefix owned by root, it is - recommended that the package be configured and built as a regular - user, and only the `make install' phase executed with root - privileges. - - 5. Optionally, type `make installcheck' to repeat any self-tests, but - this time using the binaries in their final installed location. - This target does not install anything. Running this target as a - regular user, particularly if the prior `make install' required - root privileges, verifies that the installation completed - correctly. - - 6. You can remove the program binaries and object files from the - source code directory by typing `make clean'. To also remove the - files that `configure' created (so you can compile the package for - a different kind of computer), type `make distclean'. There is - also a `make maintainer-clean' target, but that is intended mainly - for the package's developers. If you use it, you may have to get - all sorts of other programs in order to regenerate files that came - with the distribution. - - 7. Often, you can also type `make uninstall' to remove the installed - files again. In practice, not all packages have tested that - uninstallation works correctly, even though it is required by the - GNU Coding Standards. - - 8. Some packages, particularly those that use Automake, provide `make - distcheck', which can by used by developers to test that all other - targets like `make install' and `make uninstall' work correctly. - This target is generally not run by end users. - -Compilers and Options -===================== - - Some systems require unusual options for compilation or linking that -the `configure' script does not know about. Run `./configure --help' -for details on some of the pertinent environment variables. - - You can give `configure' initial values for configuration parameters -by setting variables in the command line or in the environment. Here -is an example: - - ./configure CC=c99 CFLAGS=-g LIBS=-lposix - - *Note Defining Variables::, for more details. - -Compiling For Multiple Architectures -==================================== - - You can compile the package for more than one kind of computer at the -same time, by placing the object files for each architecture in their -own directory. To do this, you can use GNU `make'. `cd' to the -directory where you want the object files and executables to go and run -the `configure' script. `configure' automatically checks for the -source code in the directory that `configure' is in and in `..'. This -is known as a "VPATH" build. - - With a non-GNU `make', it is safer to compile the package for one -architecture at a time in the source code directory. After you have -installed the package for one architecture, use `make distclean' before -reconfiguring for another architecture. - - On MacOS X 10.5 and later systems, you can create libraries and -executables that work on multiple system types--known as "fat" or -"universal" binaries--by specifying multiple `-arch' options to the -compiler but only a single `-arch' option to the preprocessor. Like -this: - - ./configure CC="gcc -arch i386 -arch x86_64 -arch ppc -arch ppc64" \ - CXX="g++ -arch i386 -arch x86_64 -arch ppc -arch ppc64" \ - CPP="gcc -E" CXXCPP="g++ -E" - - This is not guaranteed to produce working output in all cases, you -may have to build one architecture at a time and combine the results -using the `lipo' tool if you have problems. - -Installation Names -================== - - By default, `make install' installs the package's commands under -`/usr/local/bin', include files under `/usr/local/include', etc. You -can specify an installation prefix other than `/usr/local' by giving -`configure' the option `--prefix=PREFIX', where PREFIX must be an -absolute file name. - - You can specify separate installation prefixes for -architecture-specific files and architecture-independent files. If you -pass the option `--exec-prefix=PREFIX' to `configure', the package uses -PREFIX as the prefix for installing programs and libraries. -Documentation and other data files still use the regular prefix. - - In addition, if you use an unusual directory layout you can give -options like `--bindir=DIR' to specify different values for particular -kinds of files. Run `configure --help' for a list of the directories -you can set and what kinds of files go in them. In general, the -default for these options is expressed in terms of `${prefix}', so that -specifying just `--prefix' will affect all of the other directory -specifications that were not explicitly provided. - - The most portable way to affect installation locations is to pass the -correct locations to `configure'; however, many packages provide one or -both of the following shortcuts of passing variable assignments to the -`make install' command line to change installation locations without -having to reconfigure or recompile. - - The first method involves providing an override variable for each -affected directory. For example, `make install -prefix=/alternate/directory' will choose an alternate location for all -directory configuration variables that were expressed in terms of -`${prefix}'. Any directories that were specified during `configure', -but not in terms of `${prefix}', must each be overridden at install -time for the entire installation to be relocated. The approach of -makefile variable overrides for each directory variable is required by -the GNU Coding Standards, and ideally causes no recompilation. -However, some platforms have known limitations with the semantics of -shared libraries that end up requiring recompilation when using this -method, particularly noticeable in packages that use GNU Libtool. - - The second method involves providing the `DESTDIR' variable. For -example, `make install DESTDIR=/alternate/directory' will prepend -`/alternate/directory' before all installation names. The approach of -`DESTDIR' overrides is not required by the GNU Coding Standards, and -does not work on platforms that have drive letters. On the other hand, -it does better at avoiding recompilation issues, and works well even -when some directory options were not specified in terms of `${prefix}' -at `configure' time. - -Optional Features -================= - - If the package supports it, you can cause programs to be installed -with an extra prefix or suffix on their names by giving `configure' the -option `--program-prefix=PREFIX' or `--program-suffix=SUFFIX'. - - Some packages pay attention to `--enable-FEATURE' options to -`configure', where FEATURE indicates an optional part of the package. -They may also pay attention to `--with-PACKAGE' options, where PACKAGE -is something like `gnu-as' or `x' (for the X Window System). The -`README' should mention any `--enable-' and `--with-' options that the -package recognizes. - - For packages that use the X Window System, `configure' can usually -find the X include and library files automatically, but if it doesn't, -you can use the `configure' options `--x-includes=DIR' and -`--x-libraries=DIR' to specify their locations. - - Some packages offer the ability to configure how verbose the -execution of `make' will be. For these packages, running `./configure ---enable-silent-rules' sets the default to minimal output, which can be -overridden with `make V=1'; while running `./configure ---disable-silent-rules' sets the default to verbose, which can be -overridden with `make V=0'. - -Particular systems -================== - - On HP-UX, the default C compiler is not ANSI C compatible. If GNU -CC is not installed, it is recommended to use the following options in -order to use an ANSI C compiler: - - ./configure CC="cc -Ae -D_XOPEN_SOURCE=500" - -and if that doesn't work, install pre-built binaries of GCC for HP-UX. - - On OSF/1 a.k.a. Tru64, some versions of the default C compiler cannot -parse its `' header file. The option `-nodtk' can be used as -a workaround. If GNU CC is not installed, it is therefore recommended -to try - - ./configure CC="cc" - -and if that doesn't work, try - - ./configure CC="cc -nodtk" - - On Solaris, don't put `/usr/ucb' early in your `PATH'. This -directory contains several dysfunctional programs; working variants of -these programs are available in `/usr/bin'. So, if you need `/usr/ucb' -in your `PATH', put it _after_ `/usr/bin'. - - On Haiku, software installed for all users goes in `/boot/common', -not `/usr/local'. It is recommended to use the following options: - - ./configure --prefix=/boot/common - -Specifying the System Type -========================== - - There may be some features `configure' cannot figure out -automatically, but needs to determine by the type of machine the package -will run on. Usually, assuming the package is built to be run on the -_same_ architectures, `configure' can figure that out, but if it prints -a message saying it cannot guess the machine type, give it the -`--build=TYPE' option. TYPE can either be a short name for the system -type, such as `sun4', or a canonical name which has the form: - - CPU-COMPANY-SYSTEM - -where SYSTEM can have one of these forms: - - OS - KERNEL-OS - - See the file `config.sub' for the possible values of each field. If -`config.sub' isn't included in this package, then this package doesn't -need to know the machine type. - - If you are _building_ compiler tools for cross-compiling, you should -use the option `--target=TYPE' to select the type of system they will -produce code for. - - If you want to _use_ a cross compiler, that generates code for a -platform different from the build platform, you should specify the -"host" platform (i.e., that on which the generated programs will -eventually be run) with `--host=TYPE'. - -Sharing Defaults -================ - - If you want to set default values for `configure' scripts to share, -you can create a site shell script called `config.site' that gives -default values for variables like `CC', `cache_file', and `prefix'. -`configure' looks for `PREFIX/share/config.site' if it exists, then -`PREFIX/etc/config.site' if it exists. Or, you can set the -`CONFIG_SITE' environment variable to the location of the site script. -A warning: not all `configure' scripts look for a site script. - -Defining Variables -================== - - Variables not defined in a site shell script can be set in the -environment passed to `configure'. However, some packages may run -configure again during the build, and the customized values of these -variables may be lost. In order to avoid this problem, you should set -them in the `configure' command line, using `VAR=value'. For example: - - ./configure CC=/usr/local2/bin/gcc - -causes the specified `gcc' to be used as the C compiler (unless it is -overridden in the site shell script). - -Unfortunately, this technique does not work for `CONFIG_SHELL' due to -an Autoconf bug. Until the bug is fixed you can use this workaround: - - CONFIG_SHELL=/bin/bash /bin/bash ./configure CONFIG_SHELL=/bin/bash - -`configure' Invocation -====================== - - `configure' recognizes the following options to control how it -operates. - -`--help' -`-h' - Print a summary of all of the options to `configure', and exit. - -`--help=short' -`--help=recursive' - Print a summary of the options unique to this package's - `configure', and exit. The `short' variant lists options used - only in the top level, while the `recursive' variant lists options - also present in any nested packages. - -`--version' -`-V' - Print the version of Autoconf used to generate the `configure' - script, and exit. - -`--cache-file=FILE' - Enable the cache: use and save the results of the tests in FILE, - traditionally `config.cache'. FILE defaults to `/dev/null' to - disable caching. - -`--config-cache' -`-C' - Alias for `--cache-file=config.cache'. - -`--quiet' -`--silent' -`-q' - Do not print messages saying which checks are being made. To - suppress all normal output, redirect it to `/dev/null' (any error - messages will still be shown). - -`--srcdir=DIR' - Look for the package's source code in directory DIR. Usually - `configure' can determine that directory automatically. - -`--prefix=DIR' - Use DIR as the installation prefix. *note Installation Names:: - for more details, including other options available for fine-tuning - the installation locations. - -`--no-create' -`-n' - Run the configure checks, but stop before creating any output - files. - -`configure' also accepts some other, not widely useful, options. Run -`configure --help' for more details. - diff -Nru osm2pgsql-0.82.0/install-postgis-osm-db.sh osm2pgsql-0.86.0/install-postgis-osm-db.sh --- osm2pgsql-0.82.0/install-postgis-osm-db.sh 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/install-postgis-osm-db.sh 2014-10-25 06:42:27.000000000 +0000 @@ -21,7 +21,12 @@ sudo -u postgres createlang plpgsql $DBNAME || true - if [ -e /usr/share/postgresql/9.1/contrib/postgis-1.5/postgis.sql ] ; then + if [ -e /usr/share/postgresql/9.3/extension/postgis.control ]; then + echo "Initializing Spatial Extentions for postgresql 9.3" + echo "CREATE EXTENSION postgis;" | sudo -u postgres psql $DBNAME + echo "Initializing hstore" + echo "CREATE EXTENSION hstore;" | sudo -u postgres psql $DBNAME + else if [ -e /usr/share/postgresql/9.1/contrib/postgis-1.5/postgis.sql ] ; then echo "Initializing Spatial Extentions for postgresql 9.1" file_postgis=/usr/share/postgresql/9.1/contrib/postgis-1.5/postgis.sql file_spatial_ref=/usr/share/postgresql/9.1/contrib/postgis-1.5/spatial_ref_sys.sql @@ -44,7 +49,7 @@ echo "Initializing hstore" file_hstore=/usr/share/postgresql/8.4/contrib/hstore.sql sudo -u postgres psql $DBNAME <$file_hstore >/dev/null 2>&1 - fi + fi fi echo "Setting ownership to user $DBOWNER" diff -Nru osm2pgsql-0.82.0/legacy/build_geometry.cpp osm2pgsql-0.86.0/legacy/build_geometry.cpp --- osm2pgsql-0.82.0/legacy/build_geometry.cpp 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/legacy/build_geometry.cpp 1970-01-01 00:00:00.000000000 +0000 @@ -1,163 +0,0 @@ -/* -#----------------------------------------------------------------------------- -# Part of osm2pgsql utility -#----------------------------------------------------------------------------- -# By Artem Pavlenko, Copyright 2007 -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -#----------------------------------------------------------------------------- -*/ - -#include -#include - -#if (GEOS_VERSION_MAJOR==3) -/* geos trunk (3.0.0rc) */ -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -using namespace geos::geom; -using namespace geos::io; -using namespace geos::operation::linemerge; -#else -/* geos-2.2 */ -#include -#include -#include -using namespace geos; -#endif - -#include "build_geometry.h" - - -struct Segment -{ - Segment(double x0_,double y0_,double x1_,double y1_) - :x0(x0_),y0(y0_),x1(x1_),y1(y1_) {} - - double x0; - double y0; - double x1; - double y1; -}; - -static std::vector segs; -static std::vector wkts; - -typedef std::auto_ptr geom_ptr; - -int is_simple(const char* wkt) -{ - GeometryFactory factory; - WKTReader reader(&factory); - geom_ptr geom(reader.read(wkt)); - if (geom->isSimple()) return 1; - return 0; -} - -void add_segment(double x0,double y0,double x1,double y1) -{ - segs.push_back(Segment(x0,y0,x1,y1)); -} - -const char * get_wkt(size_t index) -{ - return wkts[index].c_str(); -} - -void clear_wkts() -{ - wkts.clear(); -} - -size_t build_geometry(int polygon) -{ - size_t wkt_size = 0; - GeometryFactory factory; - geom_ptr segment(0); - std::auto_ptr > lines(new std::vector); - std::vector::const_iterator pos=segs.begin(); - std::vector::const_iterator end=segs.end(); - bool first=true; - try { - while (pos != end) - { - if (pos->x0 != pos->x1 || pos->y0 != pos->y1) - { - std::auto_ptr coords(factory.getCoordinateSequenceFactory()->create(0,2)); - coords->add(Coordinate(pos->x0,pos->y0)); - coords->add(Coordinate(pos->x1,pos->y1)); - geom_ptr linestring(factory.createLineString(coords.release())); - if (first) - { - segment = linestring; - first=false; - } - else - { - lines->push_back(linestring.release()); - } - } - ++pos; - } - - segs.clear(); - - if (segment.get()) - { - geom_ptr mline (factory.createMultiLineString(lines.release())); - geom_ptr noded (segment->Union(mline.get())); - LineMerger merger; - merger.add(noded.get()); - std::auto_ptr > merged(merger.getMergedLineStrings()); - WKTWriter writer; - - for (unsigned i=0 ;i < merged->size(); ++i) - { - std::auto_ptr pline ((*merged ) [i]); - - if (polygon == 1 && pline->getNumPoints() > 3 && pline->isClosed()) - { - std::auto_ptr ring(factory.createLinearRing(pline->getCoordinates())); - geom_ptr poly(factory.createPolygon(ring.release(),0)); - std::string text = writer.write(poly.get()); - - wkts.push_back(text); - ++wkt_size; - } - else - { - std::string text = writer.write(pline.get()); - wkts.push_back(text); - ++wkt_size; - } - } - } - } - catch (...) - { - std::cerr << "excepton caught \n"; - wkt_size = 0; - } - return wkt_size; -} - diff -Nru osm2pgsql-0.82.0/legacy/build_geometry.h osm2pgsql-0.86.0/legacy/build_geometry.h --- osm2pgsql-0.82.0/legacy/build_geometry.h 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/legacy/build_geometry.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,40 +0,0 @@ -/* -#----------------------------------------------------------------------------- -# Part of osm2pgsql utility -#----------------------------------------------------------------------------- -# By Artem Pavlenko, Copyright 2007 -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -#----------------------------------------------------------------------------- -*/ - -#ifndef BUILD_GEOMETRY_H -#define BUILD_GEOMETRY_H - -#ifdef __cplusplus -extern "C" { -#endif - -int is_simple(const char* wkt); -void add_segment(double x0,double y0,double x1, double y1); -const char* get_wkt(size_t index); -size_t build_geometry(int polygon); -void clear_wkts(); - -#ifdef __cplusplus -} -#endif - -#endif diff -Nru osm2pgsql-0.82.0/legacy/Makefile.am osm2pgsql-0.86.0/legacy/Makefile.am --- osm2pgsql-0.82.0/legacy/Makefile.am 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/legacy/Makefile.am 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -bin_PROGRAMS = osm2pgsql-legacy - -osm2pgsql_legacy_SOURCES = build_geometry.cpp osm2pgsql.c - -AM_CFLAGS=@XML2_CFLAGS@ @GEOS_CFLAGS@ -AM_CPPFLAGS=@XML2_CFLAGS@ @GEOS_CFLAGS@ - -AM_LDFLAGS=@XML2_LDFLAGS@ @GEOS_LDFLAGS@ @GEOS_LIBS@ - -noinst_HEADERS = build_geometry.h - diff -Nru osm2pgsql-0.82.0/legacy/osm2pgsql.c osm2pgsql-0.86.0/legacy/osm2pgsql.c --- osm2pgsql-0.82.0/legacy/osm2pgsql.c 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/legacy/osm2pgsql.c 1970-01-01 00:00:00.000000000 +0000 @@ -1,642 +0,0 @@ -/* - #----------------------------------------------------------------------------- - # osm2pgsql - converts planet.osm file into PostgreSQL - # compatible output suitable to be rendered by mapnik - # Use: osm2pgsql planet.osm > planet.sql - #----------------------------------------------------------------------------- - # Original Python implementation by Artem Pavlenko - # Re-implementation by Jon Burgess, Copyright 2006 - # - # This program is free software; you can redistribute it and/or - # modify it under the terms of the GNU General Public License - # as published by the Free Software Foundation; either version 2 - # of the License, or (at your option) any later version. - # - # This program is distributed in the hope that it will be useful, - # but WITHOUT ANY WARRANTY; without even the implied warranty of - # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - # GNU General Public License for more details. - # - # You should have received a copy of the GNU General Public License - # along with this program; if not, write to the Free Software - # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - #----------------------------------------------------------------------------- -*/ - -#define _GNU_SOURCE - -#include -#include -#include -#include -#include - -#include -#include - -#include "build_geometry.h" - -#if 0 -#define DEBUG printf -#else -#define DEBUG(x, ...) -#endif - -struct tagDesc { - const char *name; - const char *type; - const int polygon; -}; - -static struct tagDesc exportTags[] = { - {"name", "text", 0}, - {"place", "text", 0}, - {"landuse", "text", 1}, - {"leisure", "text", 1}, - {"natural", "text", 1}, - {"man_made","text", 0}, - {"waterway","text", 0}, - {"highway", "text", 0}, - {"foot", "text", 0}, - {"horse", "text", 0}, - {"bicycle", "text", 0}, - {"motorcar","text", 0}, - {"residence","text", 0}, - {"railway", "text", 0}, - {"amenity", "text", 1}, - {"tourism", "text", 1}, - {"learning","text", 0}, - {"building","text", 1}, - {"bridge", "text", 0}, - {"layer", "text", 0}, - {"junction","text", 0}, - {"sport", "text", 1}, - {"route", "text", 0}, - {"aeroway", "text", 0} -}; - -static const char *table_name_point = "planet_osm_point"; -static const char *table_name_line = "planet_osm_line"; -static const char *table_name_polygon = "planet_osm_polygon"; - -#define MAX_ID_NODE (35000000) -#define MAX_ID_SEGMENT (35000000) - -struct osmNode { - double lon; - double lat; -}; - -struct osmSegment { - unsigned int from; - unsigned int to; -}; - -struct osmWay { - char *values; - char *wkt; -}; - -static struct osmNode nodes[MAX_ID_NODE+1]; -static struct osmSegment segments[MAX_ID_SEGMENT+1]; - -static int count_node, count_all_node, max_node; -static int count_segment, count_all_segment, max_segment; -static int count_way, count_all_way, max_way; -static int count_way_seg; - -struct keyval { - char *key; - char *value; - struct keyval *next; - struct keyval *prev; -}; - - -static struct keyval keys, tags, segs; - - -void usage(const char *arg0) -{ - fprintf(stderr, "Usage error:\n\t%s planet.osm > planet.sql\n", arg0); - fprintf(stderr, "or\n\tgzip -dc planet.osm.gz | %s - | gzip -c > planet.sql.gz\n", arg0); -} - -void initList(struct keyval *head) -{ - head->next = head; - head->prev = head; - head->key = NULL; - head->value = NULL; -} - -void freeItem(struct keyval *p) -{ - free(p->key); - free(p->value); - free(p); -} - - -unsigned int countList(struct keyval *head) -{ - struct keyval *p = head->next; - unsigned int count = 0; - - while(p != head) { - count++; - p = p->next; - } - return count; -} - -int listHasData(struct keyval *head) -{ - return (head->next != head); -} - - -char *getItem(struct keyval *head, const char *name) -{ - struct keyval *p = head->next; - while(p != head) { - if (!strcmp(p->key, name)) - return p->value; - p = p->next; - } - return NULL; -} - - -struct keyval *popItem(struct keyval *head) -{ - struct keyval *p = head->next; - if (p == head) - return NULL; - - head->next = p->next; - p->next->prev = head; - - p->next = NULL; - p->prev = NULL; - - return p; -} - - -void pushItem(struct keyval *head, struct keyval *item) -{ - item->next = head; - item->prev = head->prev; - head->prev->next = item; - head->prev = item; -} - -int addItem(struct keyval *head, const char *name, const char *value, int noDupe) -{ - struct keyval *item; - - if (noDupe) { - item = head->next; - while (item != head) { - if (!strcmp(item->value, value) && !strcmp(item->key, name)) { - //fprintf(stderr, "Discarded %s=%s\n", name, value); - return 1; - } - item = item->next; - } - } - - item = malloc(sizeof(struct keyval)); - - if (!item) { - fprintf(stderr, "Error allocating keyval\n"); - return 2; - } - - item->key = strdup(name); - item->value = strdup(value); - - item->next = head->next; - item->prev = head; - head->next->prev = item; - head->next = item; - - return 0; -} - -void resetList(struct keyval *head) -{ - struct keyval *item; - - while((item = popItem(head))) - freeItem(item); -} - -size_t WKT(int polygon) -{ - while (listHasData(&segs)) - { - struct keyval *p; - unsigned int id, to, from; - double x0, y0, x1, y1; - p = popItem(&segs); - id = strtoul(p->value, NULL, 10); - freeItem(p); - - from = segments[id].from; - to = segments[id].to; - - x0 = nodes[from].lon; - y0 = nodes[from].lat; - x1 = nodes[to].lon; - y1 = nodes[to].lat; - add_segment(x0,y0,x1,y1); - } - return build_geometry(polygon); -} - - -void StartElement(xmlTextReaderPtr reader, const xmlChar *name) -{ - xmlChar *xid, *xlat, *xlon, *xfrom, *xto, *xk, *xv; - unsigned int id, to, from; - double lon, lat; - char *k; - - if (xmlStrEqual(name, BAD_CAST "node")) { - struct osmNode *node; - xid = xmlTextReaderGetAttribute(reader, BAD_CAST "id"); - xlon = xmlTextReaderGetAttribute(reader, BAD_CAST "lon"); - xlat = xmlTextReaderGetAttribute(reader, BAD_CAST "lat"); - assert(xid); assert(xlon); assert(xlat); - id = strtoul((char *)xid, NULL, 10); - lon = strtod((char *)xlon, NULL); - lat = strtod((char *)xlat, NULL); - - assert(id > 0); - assert(id < MAX_ID_NODE); - - if (id > max_node) - max_node = id; - - count_all_node++; - if (count_all_node%10000 == 0) - fprintf(stderr, "\rProcessing: Node(%dk)", count_all_node/1000); - - node = &nodes[id]; - node->lon = lon; - node->lat = lat; - - DEBUG("NODE(%d) %f %f\n", id, lon, lat); - addItem(&keys, "id", (char *)xid, 0); - - xmlFree(xid); - xmlFree(xlon); - xmlFree(xlat); - } else if (xmlStrEqual(name, BAD_CAST "segment")) { - xid = xmlTextReaderGetAttribute(reader, BAD_CAST "id"); - xfrom = xmlTextReaderGetAttribute(reader, BAD_CAST "from"); - xto = xmlTextReaderGetAttribute(reader, BAD_CAST "to"); - assert(xid); assert(xfrom); assert(xto); - id = strtoul((char *)xid, NULL, 10); - from = strtoul((char *)xfrom, NULL, 10); - to = strtoul((char *)xto, NULL, 10); - - assert(id > 0); - assert(id < MAX_ID_SEGMENT); - - if (id > max_segment) - max_segment = id; - - if (count_all_segment == 0) - fprintf(stderr, "\n"); - - count_all_segment++; - if (count_all_segment%10000 == 0) - fprintf(stderr, "\rProcessing: Segment(%dk)", count_all_segment/1000); - - if (!nodes[to].lat && !nodes[to].lon) { - DEBUG("SEGMENT(%d), NODE(%d) is missing\n", id, to); - } else if (!nodes[from].lat && !nodes[from].lon) { - DEBUG("SEGMENT(%d), NODE(%d) is missing\n", id, from); - } else { - if (from != to) { - struct osmSegment *segment; - segment = &segments[id]; - segment->to = to; - segment->from = from; - - count_segment++; - DEBUG("SEGMENT(%d) %d, %d\n", id, from, to); - } - } - - xmlFree(xid); - xmlFree(xfrom); - xmlFree(xto); - } else if (xmlStrEqual(name, BAD_CAST "tag")) { - char *p; - xk = xmlTextReaderGetAttribute(reader, BAD_CAST "k"); - xv = xmlTextReaderGetAttribute(reader, BAD_CAST "v"); - assert(xk); assert(xv); - k = (char *)xmlStrdup(xk); - - while ((p = strchr(k, ':'))) - *p = '_'; - while ((p = strchr(k, ' '))) - *p = '_'; - - addItem(&tags, k, (char *)xv, 0); - DEBUG("\t%s = %s\n", xk, xv); - xmlFree(k); - xmlFree(xk); - xmlFree(xv); - } else if (xmlStrEqual(name, BAD_CAST "way")) { - xid = xmlTextReaderGetAttribute(reader, BAD_CAST "id"); - assert(xid); - id = strtoul((char *)xid, NULL, 10); - addItem(&keys, "id", (char *)xid, 0); - DEBUG("WAY(%s)\n", xid); - - if (id > max_way) - max_way = id; - - if (count_all_way == 0) - fprintf(stderr, "\n"); - - count_all_way++; - if (count_all_way%1000 == 0) - fprintf(stderr, "\rProcessing: Way(%dk)", count_all_way/1000); - - xmlFree(xid); - } else if (xmlStrEqual(name, BAD_CAST "seg")) { - xid = xmlTextReaderGetAttribute(reader, BAD_CAST "id"); - assert(xid); - id = strtoul((char *)xid, NULL, 10); - if (!id || (id > MAX_ID_SEGMENT)) - DEBUG("\tSEG(%s) - invalid segment ID\n", xid); - else if (!segments[id].from || !segments[id].to) - DEBUG("\tSEG(%s) - missing segment\n", xid); - else { - if (addItem(&segs, "id", (char *)xid, 1)) { - const char *way_id = getItem(&keys, "id"); - if (!way_id) way_id = "???"; - //fprintf(stderr, "Way %s with duplicate segment id %d\n", way_id, id); - count_way_seg++; - } - DEBUG("\tSEG(%s)\n", xid); - } - xmlFree(xid); - } else if (xmlStrEqual(name, BAD_CAST "osm")) { - /* ignore */ - } else { - fprintf(stderr, "%s: Unknown element name: %s\n", __FUNCTION__, name); - } -} - -void EndElement(xmlTextReaderPtr reader, const xmlChar *name) -{ - unsigned int id; - - DEBUG("%s: %s\n", __FUNCTION__, name); - - if (xmlStrEqual(name, BAD_CAST "node")) { - int i; - char *values = NULL, *names = NULL; - char *osm_id = getItem(&keys, "id"); - if (!osm_id) { - fprintf(stderr, "%s: Node ID not in keys\n", __FUNCTION__); - resetList(&keys); - resetList(&tags); - return; - } - id = strtoul(osm_id, NULL, 10); - //assert(nodes[id].lat && nodes[id].lon); - for (i=0; i < sizeof(exportTags) / sizeof(exportTags[0]); i++) { - char *v; - if ((v = getItem(&tags, exportTags[i].name))) { - if (values) { - char *oldval = values, *oldnam = names; - asprintf(&names, "%s,\"%s\"", oldnam, exportTags[i].name); - asprintf(&values, "%s,$$%s$$", oldval, v); - free(oldnam); - free(oldval); - } else { - asprintf(&names, "\"%s\"", exportTags[i].name); - asprintf(&values, "$$%s$$", v); - } - } - } - if (values) { - count_node++; - printf("insert into %s (osm_id,%s,way) values " - "(%s,%s,GeomFromText('POINT(%.15g %.15g)',4326));\n", - table_name_point,names,osm_id,values,nodes[id].lon, nodes[id].lat); - } - resetList(&keys); - resetList(&tags); - free(values); - free(names); - } else if (xmlStrEqual(name, BAD_CAST "segment")) { - resetList(&tags); - } else if (xmlStrEqual(name, BAD_CAST "tag")) { - /* Separate tag list so tag stack unused */ - } else if (xmlStrEqual(name, BAD_CAST "way")) { - int i, polygon = 0; - char *values = NULL, *names = NULL; - char *osm_id = getItem(&keys, "id"); - - if (!osm_id) { - fprintf(stderr, "%s: WAY ID not in keys\n", __FUNCTION__); - resetList(&keys); - resetList(&tags); - resetList(&segs); - return; - } - - if (!listHasData(&segs)) { - DEBUG("%s: WAY(%s) has no segments\n", __FUNCTION__, osm_id); - resetList(&keys); - resetList(&tags); - resetList(&segs); - return; - } - id = strtoul(osm_id, NULL, 10); - - for (i=0; i < sizeof(exportTags) / sizeof(exportTags[0]); i++) { - char *v; - if ((v = getItem(&tags, exportTags[i].name))) { - if (values) { - char *oldval = values, *oldnam = names; - asprintf(&names, "%s,\"%s\"", oldnam, exportTags[i].name); - asprintf(&values, "%s,$$%s$$", oldval, v); - free(oldnam); - free(oldval); - } else { - asprintf(&names, "\"%s\"", exportTags[i].name); - asprintf(&values, "$$%s$$", v); - } - polygon |= exportTags[i].polygon; - } - } - if (values) { - size_t wkt_size = WKT(polygon); - - if (wkt_size) - { - unsigned i; - for (i=0;i]])], [[#include ] ]) ]) diff -Nru osm2pgsql-0.82.0/m4/ax_lua.m4 osm2pgsql-0.86.0/m4/ax_lua.m4 --- osm2pgsql-0.82.0/m4/ax_lua.m4 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/m4/ax_lua.m4 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,607 @@ +# =========================================================================== +# http://www.gnu.org/software/autoconf-archive/ax_lua.html +# =========================================================================== +# +# SYNOPSIS +# +# AX_PROG_LUA[([MINIMUM-VERSION], [TOO-BIG-VERSION], [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])] +# AX_LUA_HEADERS[([ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])] +# AX_LUA_LIBS[([ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])] +# AX_LUA_READLINE[([ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])] +# +# DESCRIPTION +# +# Detect a Lua interpreter, optionally specifying a minimum and maximum +# version number. Set up important Lua paths, such as the directories in +# which to install scripts and modules (shared libraries). +# +# Also detect Lua headers and libraries. The Lua version contained in the +# header is checked to match the Lua interpreter version exactly. When +# searching for Lua libraries, the version number is used as a suffix. +# This is done with the goal of supporting multiple Lua installs (5.1 and +# 5.2 side-by-side). +# +# A note on compatibility with previous versions: This file has been +# mostly rewritten for serial 18. Most developers should be able to use +# these macros without needing to modify configure.ac. Care has been taken +# to preserve each macro's behavior, but there are some differences: +# +# 1) AX_WITH_LUA is deprecated; it now expands to the exact same thing as +# AX_PROG_LUA with no arguments. +# +# 2) AX_LUA_HEADERS now checks that the version number defined in lua.h +# matches the interpreter version. AX_LUA_HEADERS_VERSION is therefore +# unnecessary, so it is deprecated and does not expand to anything. +# +# 3) The configure flag --with-lua-suffix no longer exists; the user +# should instead specify the LUA precious variable on the command line. +# See the AX_PROG_LUA description for details. +# +# Please read the macro descriptions below for more information. +# +# This file was inspired by Andrew Dalke's and James Henstridge's +# python.m4 and Tom Payne's, Matthieu Moy's, and Reuben Thomas's ax_lua.m4 +# (serial 17). Basically, this file is a mash-up of those two files. I +# like to think it combines the best of the two! +# +# AX_PROG_LUA: Search for the Lua interpreter, and set up important Lua +# paths. Adds precious variable LUA, which may contain the path of the Lua +# interpreter. If LUA is blank, the user's path is searched for an +# suitable interpreter. +# +# If MINIMUM-VERSION is supplied, then only Lua interpreters with a +# version number greater or equal to MINIMUM-VERSION will be accepted. If +# TOO-BIG- VERSION is also supplied, then only Lua interpreters with a +# version number greater or equal to MINIMUM-VERSION and less than +# TOO-BIG-VERSION will be accepted. +# +# Version comparisons require the AX_COMPARE_VERSION macro, which is +# provided by ax_compare_version.m4 from the Autoconf Archive. +# +# The Lua version number, LUA_VERSION, is found from the interpreter, and +# substituted. LUA_PLATFORM is also found, but not currently supported (no +# standard representation). +# +# Finally, the macro finds four paths: +# +# luadir Directory to install Lua scripts. +# pkgluadir $luadir/$PACKAGE +# luaexecdir Directory to install Lua modules. +# pkgluaexecdir $luaexecdir/$PACKAGE +# +# These paths a found based on $prefix, $exec_prefix, Lua's package.path, +# and package.cpath. The first path of package.path beginning with $prefix +# is selected as luadir. The first path of package.cpath beginning with +# $exec_prefix is used as luaexecdir. This should work on all reasonable +# Lua installations. If a path cannot be determined, a default path is +# used. Of course, the user can override these later when invoking make. +# +# luadir Default: $prefix/share/lua/$LUA_VERSION +# luaexecdir Default: $exec_prefix/lib/lua/$LUA_VERSION +# +# These directories can be used by Automake as install destinations. The +# variable name minus 'dir' needs to be used as a prefix to the +# appropriate Automake primary, e.g. lua_SCRIPS or luaexec_LIBRARIES. +# +# If an acceptable Lua interpreter is found, then ACTION-IF-FOUND is +# performed, otherwise ACTION-IF-NOT-FOUND is preformed. If ACTION-IF-NOT- +# FOUND is blank, then it will default to printing an error. To prevent +# the default behavior, give ':' as an action. +# +# AX_LUA_HEADERS: Search for Lua headers. Requires that AX_PROG_LUA be +# expanded before this macro. Adds precious variable LUA_INCLUDE, which +# may contain Lua specific include flags, e.g. -I/usr/include/lua5.1. If +# LUA_INCLUDE is blank, then this macro will attempt to find suitable +# flags. +# +# LUA_INCLUDE can be used by Automake to compile Lua modules or +# executables with embedded interpreters. The *_CPPFLAGS variables should +# be used for this purpose, e.g. myprog_CPPFLAGS = $(LUA_INCLUDE). +# +# This macro searches for the header lua.h (and others). The search is +# performed with a combination of CPPFLAGS, CPATH, etc, and LUA_INCLUDE. +# If the search is unsuccessful, then some common directories are tried. +# If the headers are then found, then LUA_INCLUDE is set accordingly. +# +# The paths automatically searched are: +# +# * /usr/include/luaX.Y +# * /usr/include/lua/X.Y +# * /usr/include/luaXY +# * /usr/local/include/luaX.Y +# * /usr/local/include/lua/X.Y +# * /usr/local/include/luaXY +# +# (Where X.Y is the Lua version number, e.g. 5.1.) +# +# The Lua version number found in the headers is always checked to match +# the Lua interpreter's version number. Lua headers with mismatched +# version numbers are not accepted. +# +# If headers are found, then ACTION-IF-FOUND is performed, otherwise +# ACTION-IF-NOT-FOUND is performed. If ACTION-IF-NOT-FOUND is blank, then +# it will default to printing an error. To prevent the default behavior, +# set the action to ':'. +# +# AX_LUA_LIBS: Search for Lua libraries. Requires that AX_PROG_LUA be +# expanded before this macro. Adds precious variable LUA_LIB, which may +# contain Lua specific linker flags, e.g. -llua5.1. If LUA_LIB is blank, +# then this macro will attempt to find suitable flags. +# +# LUA_LIB can be used by Automake to link Lua modules or executables with +# embedded interpreters. The *_LIBADD and *_LDADD variables should be used +# for this purpose, e.g. mymod_LIBADD = $(LUA_LIB). +# +# This macro searches for the Lua library. More technically, it searches +# for a library containing the function lua_load. The search is performed +# with a combination of LIBS, LIBRARY_PATH, and LUA_LIB. +# +# If the search determines that some linker flags are missing, then those +# flags will be added to LUA_LIB. +# +# If libraries are found, then ACTION-IF-FOUND is performed, otherwise +# ACTION-IF-NOT-FOUND is performed. If ACTION-IF-NOT-FOUND is blank, then +# it will default to printing an error. To prevent the default behavior, +# set the action to ':'. +# +# AX_LUA_READLINE: Search for readline headers and libraries. Requires the +# AX_LIB_READLINE macro, which is provided by ax_lib_readline.m4 from the +# Autoconf Archive. +# +# If a readline compatible library is found, then ACTION-IF-FOUND is +# performed, otherwise ACTION-IF-NOT-FOUND is performed. +# +# LICENSE +# +# Copyright (c) 2013 Tim Perkins +# Copyright (c) 2013 Reuben Thomas +# +# This program is free software: you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program. If not, see . +# +# As a special exception, the respective Autoconf Macro's copyright owner +# gives unlimited permission to copy, distribute and modify the configure +# scripts that are the output of Autoconf when processing the Macro. You +# need not follow the terms of the GNU General Public License when using +# or distributing such scripts, even though portions of the text of the +# Macro appear in them. The GNU General Public License (GPL) does govern +# all other use of the material that constitutes the Autoconf Macro. +# +# This special exception to the GPL applies to versions of the Autoconf +# Macro released by the Autoconf Archive. When you make and distribute a +# modified version of the Autoconf Macro, you may extend this special +# exception to the GPL to apply to your modified version as well. + +#serial 20 + +dnl ========================================================================= +dnl AX_PROG_LUA([MINIMUM-VERSION], [TOO-BIG-VERSION], +dnl [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND]) +dnl ========================================================================= +AC_DEFUN([AX_PROG_LUA], +[ + dnl Make LUA a precious variable. + AC_ARG_VAR([LUA], [The Lua interpreter, e.g. /usr/bin/lua5.1]) + + dnl Find a Lua interpreter. + m4_define_default([_AX_LUA_INTERPRETER_LIST], + [lua lua5.2 lua5.1 lua50]) + + m4_if([$1], [], + [ dnl No version check is needed. Find any Lua interpreter. + AS_IF([test "x$LUA" = 'x'], + [AC_PATH_PROGS([LUA], [_AX_LUA_INTERPRETER_LIST], [:])]) + ax_display_LUA='lua' + + dnl At least check if this is a Lua interpreter. + AC_MSG_CHECKING([if $LUA is a Lua interpreter]) + _AX_LUA_CHK_IS_INTRP([$LUA], + [AC_MSG_RESULT([yes])], + [ AC_MSG_RESULT([no]) + AC_MSG_ERROR([not a Lua interpreter]) + ]) + ], + [ dnl A version check is needed. + AS_IF([test "x$LUA" != 'x'], + [ dnl Check if this is a Lua interpreter. + AC_MSG_CHECKING([if $LUA is a Lua interpreter]) + _AX_LUA_CHK_IS_INTRP([$LUA], + [AC_MSG_RESULT([yes])], + [ AC_MSG_RESULT([no]) + AC_MSG_ERROR([not a Lua interpreter]) + ]) + dnl Check the version. + m4_if([$2], [], + [_ax_check_text="whether $LUA version >= $1"], + [_ax_check_text="whether $LUA version >= $1, < $2"]) + AC_MSG_CHECKING([$_ax_check_text]) + _AX_LUA_CHK_VER([$LUA], [$1], [$2], + [AC_MSG_RESULT([yes])], + [ AC_MSG_RESULT([no]) + AC_MSG_ERROR([version is out of range for specified LUA])]) + ax_display_LUA=$LUA + ], + [ dnl Try each interpreter until we find one that satisfies VERSION. + m4_if([$2], [], + [_ax_check_text="for a Lua interpreter with version >= $1"], + [_ax_check_text="for a Lua interpreter with version >= $1, < $2"]) + AC_CACHE_CHECK([$_ax_check_text], + [ax_cv_pathless_LUA], + [ for ax_cv_pathless_LUA in _AX_LUA_INTERPRETER_LIST none; do + test "x$ax_cv_pathless_LUA" = 'xnone' && break + _AX_LUA_CHK_IS_INTRP([$ax_cv_pathless_LUA], [], [continue]) + _AX_LUA_CHK_VER([$ax_cv_pathless_LUA], [$1], [$2], [break]) + done + ]) + dnl Set $LUA to the absolute path of $ax_cv_pathless_LUA. + AS_IF([test "x$ax_cv_pathless_LUA" = 'xnone'], + [LUA=':'], + [AC_PATH_PROG([LUA], [$ax_cv_pathless_LUA])]) + ax_display_LUA=$ax_cv_pathless_LUA + ]) + ]) + + AS_IF([test "x$LUA" = 'x:'], + [ dnl Run any user-specified action, or abort. + m4_default([$4], [AC_MSG_ERROR([cannot find suitable Lua interpreter])]) + ], + [ dnl Query Lua for its version number. + AC_CACHE_CHECK([for $ax_display_LUA version], [ax_cv_lua_version], + [ ax_cv_lua_version=`$LUA -e "print(_VERSION)" | \ + sed "s|^Lua \(.*\)|\1|" | \ + grep -o "^@<:@0-9@:>@\+\\.@<:@0-9@:>@\+"` + ]) + AS_IF([test "x$ax_cv_lua_version" = 'x'], + [AC_MSG_ERROR([invalid Lua version number])]) + AC_SUBST([LUA_VERSION], [$ax_cv_lua_version]) + AC_SUBST([LUA_SHORT_VERSION], [`echo "$LUA_VERSION" | sed 's|\.||'`]) + + dnl The following check is not supported: + dnl At times (like when building shared libraries) you may want to know + dnl which OS platform Lua thinks this is. + AC_CACHE_CHECK([for $ax_display_LUA platform], [ax_cv_lua_platform], + [ax_cv_lua_platform=`$LUA -e "print('unknown')"`]) + AC_SUBST([LUA_PLATFORM], [$ax_cv_lua_platform]) + + dnl Use the values of $prefix and $exec_prefix for the corresponding + dnl values of LUA_PREFIX and LUA_EXEC_PREFIX. These are made distinct + dnl variables so they can be overridden if need be. However, the general + dnl consensus is that you shouldn't need this ability. + AC_SUBST([LUA_PREFIX], ['${prefix}']) + AC_SUBST([LUA_EXEC_PREFIX], ['${exec_prefix}']) + + dnl Lua provides no way to query the script directory, and instead + dnl provides LUA_PATH. However, we should be able to make a safe educated + dnl guess. If the built-in search path contains a directory which is + dnl prefixed by $prefix, then we can store scripts there. The first + dnl matching path will be used. + AC_CACHE_CHECK([for $ax_display_LUA script directory], + [ax_cv_lua_luadir], + [ AS_IF([test "x$prefix" = 'xNONE'], + [ax_lua_prefix=$ac_default_prefix], + [ax_lua_prefix=$prefix]) + + dnl Initialize to the default path. + ax_cv_lua_luadir="$LUA_PREFIX/share/lua/$LUA_VERSION" + + dnl Try to find a path with the prefix. + _AX_LUA_FND_PRFX_PTH([$LUA], [$ax_lua_prefix], [package.path]) + AS_IF([test "x$ax_lua_prefixed_path" != 'x'], + [ dnl Fix the prefix. + _ax_strip_prefix=`echo "$ax_lua_prefix" | sed 's|.|.|g'` + ax_cv_lua_luadir=`echo "$ax_lua_prefixed_path" | \ + sed "s,^$_ax_strip_prefix,$LUA_PREFIX,"` + ]) + ]) + AC_SUBST([luadir], [$ax_cv_lua_luadir]) + AC_SUBST([pkgluadir], [\${luadir}/$PACKAGE]) + + dnl Lua provides no way to query the module directory, and instead + dnl provides LUA_PATH. However, we should be able to make a safe educated + dnl guess. If the built-in search path contains a directory which is + dnl prefixed by $exec_prefix, then we can store modules there. The first + dnl matching path will be used. + AC_CACHE_CHECK([for $ax_display_LUA module directory], + [ax_cv_lua_luaexecdir], + [ AS_IF([test "x$exec_prefix" = 'xNONE'], + [ax_lua_exec_prefix=$ax_lua_prefix], + [ax_lua_exec_prefix=$exec_prefix]) + + dnl Initialize to the default path. + ax_cv_lua_luaexecdir="$LUA_EXEC_PREFIX/lib/lua/$LUA_VERSION" + + dnl Try to find a path with the prefix. + _AX_LUA_FND_PRFX_PTH([$LUA], + [$ax_lua_exec_prefix], [package.cpathd]) + AS_IF([test "x$ax_lua_prefixed_path" != 'x'], + [ dnl Fix the prefix. + _ax_strip_prefix=`echo "$ax_lua_exec_prefix" | sed 's|.|.|g'` + ax_cv_lua_luaexecdir=`echo "$ax_lua_prefixed_path" | \ + sed "s,^$_ax_strip_prefix,$LUA_EXEC_PREFIX,"` + ]) + ]) + AC_SUBST([luaexecdir], [$ax_cv_lua_luaexecdir]) + AC_SUBST([pkgluaexecdir], [\${luaexecdir}/$PACKAGE]) + + dnl Run any user specified action. + $3 + ]) +]) + +dnl AX_WITH_LUA is now the same thing as AX_PROG_LUA. +AC_DEFUN([AX_WITH_LUA], +[ + AC_MSG_WARN([[$0 is deprecated, please use AX_PROG_LUA]]) + AX_PROG_LUA +]) + + +dnl ========================================================================= +dnl _AX_LUA_CHK_IS_INTRP(PROG, [ACTION-IF-TRUE], [ACTION-IF-FALSE]) +dnl ========================================================================= +AC_DEFUN([_AX_LUA_CHK_IS_INTRP], +[ + dnl Just print _VERSION because all Lua interpreters have this global. + AS_IF([$1 -e "print('Hello ' .. _VERSION .. '!')" &>/dev/null], + [$2], [$3]) +]) + + +dnl ========================================================================= +dnl _AX_LUA_CHK_VER(PROG, MINIMUM-VERSION, [TOO-BIG-VERSION], +dnl [ACTION-IF-TRUE], [ACTION-IF-FALSE]) +dnl ========================================================================= +AC_DEFUN([_AX_LUA_CHK_VER], +[ + _ax_test_ver=`$1 -e "print(_VERSION)" 2>/dev/null | \ + sed "s|^Lua \(.*\)|\1|" | grep -o "^@<:@0-9@:>@\+\\.@<:@0-9@:>@\+"` + AS_IF([test "x$_ax_test_ver" = 'x'], + [_ax_test_ver='0']) + AX_COMPARE_VERSION([$_ax_test_ver], [ge], [$2]) + m4_if([$3], [], [], + [ AS_IF([$ax_compare_version], + [AX_COMPARE_VERSION([$_ax_test_ver], [lt], [$3])]) + ]) + AS_IF([$ax_compare_version], [$4], [$5]) +]) + + +dnl ========================================================================= +dnl _AX_LUA_FND_PRFX_PTH(PROG, PREFIX, LUA-PATH-VARIABLE) +dnl ========================================================================= +AC_DEFUN([_AX_LUA_FND_PRFX_PTH], +[ + dnl Invokes the Lua interpreter PROG to print the path variable + dnl LUA-PATH-VARIABLE, usually package.path or package.cpath. Paths are + dnl then matched against PREFIX. The first path to begin with PREFIX is set + dnl to ax_lua_prefixed_path. + + ax_lua_prefixed_path='' + _ax_package_paths=`$1 -e 'print($3)' 2>/dev/null | sed 's|;|\n|g'` + dnl Try the paths in order, looking for the prefix. + for _ax_package_path in $_ax_package_paths; do + dnl Copy the path, up to the use of a Lua wildcard. + _ax_path_parts=`echo "$_ax_package_path" | sed 's|/|\n|g'` + _ax_reassembled='' + for _ax_path_part in $_ax_path_parts; do + echo "$_ax_path_part" | grep '\?' >/dev/null && break + _ax_reassembled="$_ax_reassembled/$_ax_path_part" + done + dnl Check the path against the prefix. + _ax_package_path=$_ax_reassembled + if echo "$_ax_package_path" | grep "^$2" >/dev/null; then + dnl Found it. + ax_lua_prefixed_path=$_ax_package_path + break + fi + done +]) + + +dnl ========================================================================= +dnl AX_LUA_HEADERS([ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND]) +dnl ========================================================================= +AC_DEFUN([AX_LUA_HEADERS], +[ + dnl Check for LUA_VERSION. + AC_MSG_CHECKING([if LUA_VERSION is defined]) + AS_IF([test "x$LUA_VERSION" != 'x'], + [AC_MSG_RESULT([yes])], + [ AC_MSG_RESULT([no]) + AC_MSG_ERROR([cannot check Lua headers without knowing LUA_VERSION]) + ]) + + dnl Make LUA_INCLUDE a precious variable. + AC_ARG_VAR([LUA_INCLUDE], [The Lua includes, e.g. -I/usr/include/lua5.1]) + + dnl Some default directories to search. + LUA_SHORT_VERSION=`echo "$LUA_VERSION" | sed 's|\.||'` + m4_define_default([_AX_LUA_INCLUDE_LIST], + [ /usr/include/lua$LUA_VERSION \ + /usr/include/lua/$LUA_VERSION \ + /usr/include/lua$LUA_SHORT_VERSION \ + /usr/local/include/lua$LUA_VERSION \ + /usr/local/include/lua/$LUA_VERSION \ + /usr/local/include/lua$LUA_SHORT_VERSION \ + ]) + + dnl Try to find the headers. + _ax_lua_saved_cppflags=$CPPFLAGS + CPPFLAGS="$CPPFLAGS $LUA_INCLUDE" + AC_CHECK_HEADERS([lua.h lualib.h lauxlib.h luaconf.h]) + CPPFLAGS=$_ax_lua_saved_cppflags + + dnl Try some other directories if LUA_INCLUDE was not set. + AS_IF([test "x$LUA_INCLUDE" = 'x' && + test "x$ac_cv_header_lua_h" != 'xyes'], + [ dnl Try some common include paths. + for _ax_include_path in _AX_LUA_INCLUDE_LIST; do + test ! -d "$_ax_include_path" && continue + + AC_MSG_CHECKING([for Lua headers in]) + AC_MSG_RESULT([$_ax_include_path]) + + AS_UNSET([ac_cv_header_lua_h]) + AS_UNSET([ac_cv_header_lualib_h]) + AS_UNSET([ac_cv_header_lauxlib_h]) + AS_UNSET([ac_cv_header_luaconf_h]) + + _ax_lua_saved_cppflags=$CPPFLAGS + CPPFLAGS="$CPPFLAGS -I$_ax_include_path" + AC_CHECK_HEADERS([lua.h lualib.h lauxlib.h luaconf.h]) + CPPFLAGS=$_ax_lua_saved_cppflags + + AS_IF([test "x$ac_cv_header_lua_h" = 'xyes'], + [ LUA_INCLUDE="-I$_ax_include_path" + break + ]) + done + ]) + + AS_IF([test "x$ac_cv_header_lua_h" = 'xyes'], + [ dnl Make a program to print LUA_VERSION defined in the header. + dnl TODO This probably shouldn't be a runtime test. + + AC_CACHE_CHECK([for Lua header version], + [ax_cv_lua_header_version], + [ _ax_lua_saved_cppflags=$CPPFLAGS + CPPFLAGS="$CPPFLAGS $LUA_INCLUDE" + AC_RUN_IFELSE( + [ AC_LANG_SOURCE([[ +#include +#include +#include +int main(int argc, char ** argv) +{ + if(argc > 1) printf("%s", LUA_VERSION); + exit(EXIT_SUCCESS); +} +]]) + ], + [ ax_cv_lua_header_version=`./conftest$EXEEXT p | \ + sed "s|^Lua \(.*\)|\1|" | \ + grep -o "^@<:@0-9@:>@\+\\.@<:@0-9@:>@\+"` + ], + [ax_cv_lua_header_version='unknown']) + CPPFLAGS=$_ax_lua_saved_cppflags + ]) + + dnl Compare this to the previously found LUA_VERSION. + AC_MSG_CHECKING([if Lua header version matches $LUA_VERSION]) + AS_IF([test "x$ax_cv_lua_header_version" = "x$LUA_VERSION"], + [ AC_MSG_RESULT([yes]) + ax_header_version_match='yes' + ], + [ AC_MSG_RESULT([no]) + ax_header_version_match='no' + ]) + ]) + + dnl Was LUA_INCLUDE specified? + AS_IF([test "x$ax_header_version_match" != 'xyes' && + test "x$LUA_INCLUDE" != 'x'], + [AC_MSG_ERROR([cannot find headers for specified LUA_INCLUDE])]) + + dnl Test the final result and run user code. + AS_IF([test "x$ax_header_version_match" = 'xyes'], [$1], + [m4_default([$2], [AC_MSG_ERROR([cannot find Lua includes])])]) +]) + +dnl AX_LUA_HEADERS_VERSION no longer exists, use AX_LUA_HEADERS. +AC_DEFUN([AX_LUA_HEADERS_VERSION], +[ + AC_MSG_WARN([[$0 is deprecated, please use AX_LUA_HEADERS]]) +]) + + +dnl ========================================================================= +dnl AX_LUA_LIBS([ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND]) +dnl ========================================================================= +AC_DEFUN([AX_LUA_LIBS], +[ + dnl TODO Should this macro also check various -L flags? + + dnl Check for LUA_VERSION. + AC_MSG_CHECKING([if LUA_VERSION is defined]) + AS_IF([test "x$LUA_VERSION" != 'x'], + [AC_MSG_RESULT([yes])], + [ AC_MSG_RESULT([no]) + AC_MSG_ERROR([cannot check Lua libs without knowing LUA_VERSION]) + ]) + + dnl Make LUA_LIB a precious variable. + AC_ARG_VAR([LUA_LIB], [The Lua library, e.g. -llua5.1]) + + AS_IF([test "x$LUA_LIB" != 'x'], + [ dnl Check that LUA_LIBS works. + _ax_lua_saved_libs=$LIBS + LIBS="$LIBS $LUA_LIB" + AC_SEARCH_LIBS([lua_load], [], + [_ax_found_lua_libs='yes'], + [_ax_found_lua_libs='no']) + LIBS=$_ax_lua_saved_libs + + dnl Check the result. + AS_IF([test "x$_ax_found_lua_libs" != 'xyes'], + [AC_MSG_ERROR([cannot find libs for specified LUA_LIB])]) + ], + [ dnl First search for extra libs. + _ax_lua_extra_libs='' + + _ax_lua_saved_libs=$LIBS + LIBS="$LIBS $LUA_LIB" + AC_SEARCH_LIBS([exp], [m]) + AC_SEARCH_LIBS([dlopen], [dl]) + LIBS=$_ax_lua_saved_libs + + AS_IF([test "x$ac_cv_search_exp" != 'xno' && + test "x$ac_cv_search_exp" != 'xnone required'], + [_ax_lua_extra_libs="$_ax_lua_extra_libs $ac_cv_search_exp"]) + + AS_IF([test "x$ac_cv_search_dlopen" != 'xno' && + test "x$ac_cv_search_dlopen" != 'xnone required'], + [_ax_lua_extra_libs="$_ax_lua_extra_libs $ac_cv_search_dlopen"]) + + dnl Try to find the Lua libs. + _ax_lua_saved_libs=$LIBS + LIBS="$LIBS $LUA_LIB" + AC_SEARCH_LIBS([lua_load], [lua$LUA_VERSION lua$LUA_SHORT_VERSION lua], + [_ax_found_lua_libs='yes'], + [_ax_found_lua_libs='no'], + [$_ax_lua_extra_libs]) + LIBS=$_ax_lua_saved_libs + + AS_IF([test "x$ac_cv_search_lua_load" != 'xno' && + test "x$ac_cv_search_lua_load" != 'xnone required'], + [LUA_LIB="$ac_cv_search_lua_load $_ax_lua_extra_libs"]) + ]) + + dnl Test the result and run user code. + AS_IF([test "x$_ax_found_lua_libs" = 'xyes'], [$1], + [m4_default([$2], [AC_MSG_ERROR([cannot find Lua libs])])]) +]) + + +dnl ========================================================================= +dnl AX_LUA_READLINE([ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND]) +dnl ========================================================================= +AC_DEFUN([AX_LUA_READLINE], +[ + AX_LIB_READLINE + AS_IF([test "x$ac_cv_header_readline_readline_h" != 'x' && + test "x$ac_cv_header_readline_history_h" != 'x'], + [ LUA_LIBS_CFLAGS="-DLUA_USE_READLINE $LUA_LIBS_CFLAGS" + $1 + ], + [$2]) +]) diff -Nru osm2pgsql-0.82.0/Makefile.am osm2pgsql-0.86.0/Makefile.am --- osm2pgsql-0.82.0/Makefile.am 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/Makefile.am 2014-10-25 06:42:27.000000000 +0000 @@ -1,10 +1,8 @@ ACLOCAL_AMFLAGS = -I m4 -DIST_SUBDIRS = legacy - bin_PROGRAMS = osm2pgsql nodecachefilereader -osm2pgsql_SOURCES = build_geometry.cpp input.c middle.h middle-ram.h output-gazetteer.h output-pgsql.c rb.c sanitizer.h text-tree.h build_geometry.h input.h middle-pgsql.c osm2pgsql.c output.h output-pgsql.h rb.h sprompt.c UTF8sanitizer.c expire-tiles.c keyvals.c middle-pgsql.h osmtypes.h output-null.c parse-o5m.c parse-o5m.h parse-primitive.c parse-primitive.h parse-xml2.c parse-xml2.h pgsql.c reprojection.c sprompt.h expire-tiles.h keyvals.h middle-ram.c output-gazetteer.c output-null.h pgsql.h reprojection.h text-tree.c node-ram-cache.c wildcmp.c node-ram-cache.h node-persistent-cache.c node-persistent-cache.h binarysearcharray.c binarysearcharray.h +osm2pgsql_SOURCES = build_geometry.cpp input.c middle.h middle-ram.h output-gazetteer.h output-pgsql.c rb.c sanitizer.h text-tree.h build_geometry.h input.h middle-pgsql.c osm2pgsql.c output.h output-pgsql.h rb.h sprompt.c UTF8sanitizer.c expire-tiles.c keyvals.c middle-pgsql.h osmtypes.h output-null.c parse-o5m.c parse-o5m.h parse-primitive.c parse-primitive.h parse-xml2.c parse-xml2.h pgsql.c reprojection.c sprompt.h expire-tiles.h keyvals.h middle-ram.c output-gazetteer.c output-null.h pgsql.h reprojection.h text-tree.c node-ram-cache.c wildcmp.c node-ram-cache.h node-persistent-cache.c node-persistent-cache.h binarysearcharray.c binarysearcharray.h tagtransform.c nodecachefilereader_SOURCES = node-persistent-cache-reader.c node-persistent-cache.c node-ram-cache.c binarysearcharray.c @@ -34,14 +32,18 @@ fileformat.pb-c.c fileformat.pb-c.h \ osmformat.pb-c.c osmformat.pb-c.h +CLEANFILES = \ + fileformat.pb-c.c fileformat.pb-c.h \ + osmformat.pb-c.c osmformat.pb-c.h + endif osm2pgsqldir = $(datadir)/osm2pgsql -AM_CFLAGS = @PTHREAD_CFLAGS@ @LFS_CFLAGS@ @POSTGRESQL_CFLAGS@ @XML2_CFLAGS@ @BZIP2_CFLAGS@ @GEOS_CFLAGS@ @PROJ_CFLAGS@ @PROTOBUF_C_CFLAGS@ -DOSM2PGSQL_DATADIR='"$(osm2pgsqldir)"' -DVERSION='"@PACKAGE_VERSION@"' -AM_CPPFLAGS = @PTHREAD_CFLAGS@ @POSTGRESQL_CFLAGS@ @XML2_CFLAGS@ @BZIP2_CFLAGS@ @GEOS_CFLAGS@ @PROJ_CFLAGS@ -DOSM2PGSQL_DATADIR='"$(osm2pgsqldir)"' -Igeos-fallback +AM_CFLAGS = @PTHREAD_CFLAGS@ @LFS_CFLAGS@ @POSTGRESQL_CFLAGS@ @XML2_CFLAGS@ @BZIP2_CFLAGS@ @GEOS_CFLAGS@ @PROJ_CFLAGS@ @PROTOBUF_C_CFLAGS@ @ZLIB_CFLAGS@ -DOSM2PGSQL_DATADIR='"$(osm2pgsqldir)"' -DVERSION='"@PACKAGE_VERSION@"' @LUA_INCLUDE@ +AM_CPPFLAGS = @PTHREAD_CFLAGS@ @POSTGRESQL_CFLAGS@ @XML2_CFLAGS@ @BZIP2_CFLAGS@ @GEOS_CFLAGS@ @PROJ_CFLAGS@ -DOSM2PGSQL_DATADIR='"$(osm2pgsqldir)"' -Igeos-fallback @LUA_INCLUDE@ -AM_LDFLAGS = @PTHREAD_CFLAGS@ @ZLIB_LDFLAGS@ @ZLIB_LIBS@ @POSTGRESQL_LDFLAGS@ @POSTGRESQL_LIBS@ @XML2_LDFLAGS@ @BZIP2_LDFLAGS@ @BZIP2_LIBS@ @GEOS_LDFLAGS@ @GEOS_LIBS@ @PROJ_LDFLAGS@ @PROJ_LIBS@ @PROTOBUF_C_LDFLAGS@ @PROTOBUF_C_LIBS@ +AM_LDFLAGS = @PTHREAD_CFLAGS@ @ZLIB_LDFLAGS@ @ZLIB_LIBS@ @POSTGRESQL_LDFLAGS@ @POSTGRESQL_LIBS@ @XML2_LDFLAGS@ @BZIP2_LDFLAGS@ @BZIP2_LIBS@ @GEOS_LDFLAGS@ @GEOS_LIBS@ @PROJ_LDFLAGS@ @PROJ_LIBS@ @PROTOBUF_C_LDFLAGS@ @PROTOBUF_C_LIBS@ -L/usr/lib/x86_64-linux-gnu @LUA_LIB@ osm2pgsql_DATA = default.style 900913.sql @@ -63,3 +65,7 @@ distclean-local: @rm -f $(PACKAGE).spec @rm -f config.nice + +test: + tests/regression-test.py -f tests/liechtenstein-2013-08-03.osm.pbf + tests/regression-test.py -f tests/liechtenstein-2013-08-03.osm.bz2 diff -Nru osm2pgsql-0.82.0/middle-pgsql.c osm2pgsql-0.86.0/middle-pgsql.c --- osm2pgsql-0.82.0/middle-pgsql.c 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/middle-pgsql.c 2014-10-25 06:42:27.000000000 +0000 @@ -15,6 +15,7 @@ #include #include #include +#include #ifdef HAVE_PTHREAD #include @@ -84,8 +85,8 @@ .prepare = "PREPARE insert_node (" POSTGRES_OSMID_TYPE ", double precision, double precision, text[]) AS INSERT INTO %p_nodes VALUES ($1,$2,$3,$4);\n" #endif "PREPARE get_node (" POSTGRES_OSMID_TYPE ") AS SELECT lat,lon,tags FROM %p_nodes WHERE id = $1 LIMIT 1;\n" + "PREPARE get_node_list(" POSTGRES_OSMID_TYPE "[]) AS SELECT id, lat, lon FROM %p_nodes WHERE id = ANY($1::" POSTGRES_OSMID_TYPE "[]);\n" "PREPARE delete_node (" POSTGRES_OSMID_TYPE ") AS DELETE FROM %p_nodes WHERE id = $1;\n", -.prepare_intarray = "PREPARE get_node_list(" POSTGRES_OSMID_TYPE "[]) AS SELECT id, lat, lon FROM %p_nodes WHERE id = ANY($1::" POSTGRES_OSMID_TYPE "[])", .copy = "COPY %p_nodes FROM STDIN;\n", .analyze = "ANALYZE %p_nodes;\n", .stop = "COMMIT;\n" @@ -103,7 +104,8 @@ "PREPARE way_done(" POSTGRES_OSMID_TYPE ") AS UPDATE %p_ways SET pending = false WHERE id = $1;\n" "PREPARE pending_ways AS SELECT id FROM %p_ways WHERE pending;\n" "PREPARE delete_way(" POSTGRES_OSMID_TYPE ") AS DELETE FROM %p_ways WHERE id = $1;\n", -.prepare_intarray = "PREPARE node_changed_mark(" POSTGRES_OSMID_TYPE ") AS UPDATE %p_ways SET pending = true WHERE nodes && ARRAY[$1] AND NOT pending;\n", +.prepare_intarray = "PREPARE node_changed_mark(" POSTGRES_OSMID_TYPE ") AS UPDATE %p_ways SET pending = true WHERE nodes && ARRAY[$1] AND NOT pending;\n" + "PREPARE rel_delete_mark(" POSTGRES_OSMID_TYPE ") AS UPDATE %p_ways SET pending = true WHERE id IN (SELECT unnest(parts[way_off+1:rel_off]) FROM %p_rels WHERE id = $1) AND NOT pending;\n", .copy = "COPY %p_ways FROM STDIN;\n", .analyze = "ANALYZE %p_ways;\n", .stop = "COMMIT;\n" @@ -467,6 +469,7 @@ } +#if 0 static int pgsql_nodes_get(struct osmNode *out, osmid_t id) { PGresult *res; @@ -497,6 +500,7 @@ PQclear(res); return 0; } +#endif /* Currently not used static int middle_nodes_get(struct osmNode *out, osmid_t id) @@ -872,9 +876,9 @@ if (pid==-1) { #if HAVE_MMAP info[p].finished = HELPER_STATE_FAILED; - fprintf(stderr,"WARNING: Failed to fork helper processes %i. Trying to recover.\n", p); + fprintf(stderr,"WARNING: Failed to fork helper process %i: %s. Trying to recover.\n", p, strerror(errno)); #else - fprintf(stderr,"ERROR: Failed to fork helper processes. Can't recover! \n"); + fprintf(stderr,"ERROR: Failed to fork helper process %i: %s. Can't recover!\n", p, strerror(errno)); exit_nicely(); #endif } @@ -1212,10 +1216,12 @@ char const *paramValues[1]; char buffer[64]; /* Make sure we're out of copy mode */ + pgsql_endCopy( way_table ); pgsql_endCopy( rel_table ); sprintf( buffer, "%" PRIdOSMID, osm_id ); paramValues[0] = buffer; + pgsql_execPrepared(way_table->sql_conn, "rel_delete_mark", 1, paramValues, PGRES_COMMAND_OK ); pgsql_execPrepared(rel_table->sql_conn, "delete_rel", 1, paramValues, PGRES_COMMAND_OK ); return 0; } @@ -1682,7 +1688,7 @@ pgsql_exec(sql_conn, PGRES_COMMAND_OK, "%s", tables[i].prepare); } - if (tables[i].prepare_intarray) { + if (Append && tables[i].prepare_intarray) { pgsql_exec(sql_conn, PGRES_COMMAND_OK, "%s", tables[i].prepare_intarray); } diff -Nru osm2pgsql-0.82.0/middle-ram.c osm2pgsql-0.86.0/middle-ram.c --- osm2pgsql-0.82.0/middle-ram.c 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/middle-ram.c 2014-10-25 06:42:27.000000000 +0000 @@ -58,7 +58,6 @@ static struct ramWay *ways[NUM_BLOCKS]; static struct ramRel *rels[NUM_BLOCKS]; -static int node_blocks; static int way_blocks; static int way_out_count; diff -Nru osm2pgsql-0.82.0/node-persistent-cache.c osm2pgsql-0.86.0/node-persistent-cache.c --- osm2pgsql-0.82.0/node-persistent-cache.c 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/node-persistent-cache.c 2014-10-25 06:42:27.000000000 +0000 @@ -563,7 +563,7 @@ void init_node_persistent_cache(const struct output_options *options, int append) { - int i; + int i, err; scale = options->scale; append_mode = append; node_cache_fname = options->flat_node_file; @@ -610,12 +610,17 @@ { #ifdef HAVE_POSIX_FALLOCATE - if (posix_fallocate(node_cache_fd, 0, - sizeof(struct ramNode) * MAXIMUM_INITIAL_ID) != 0) + if ((err = posix_fallocate(node_cache_fd, 0, + sizeof(struct ramNode) * MAXIMUM_INITIAL_ID)) != 0) { - fprintf(stderr, - "Failed to allocate space for node cache file: %s\n", - strerror(errno)); + if (err == ENOSPC) { + fprintf(stderr, "Failed to allocate space for node cache file: No space on disk\n"); + } else if (err == EFBIG) { + fprintf(stderr, "Failed to allocate space for node cache file: File is too big\n"); + } else { + fprintf(stderr, "Failed to allocate space for node cache file: Internal error %i\n", err); + } + close(node_cache_fd); exit_nicely(); } diff -Nru osm2pgsql-0.82.0/node-persistent-cache.h osm2pgsql-0.86.0/node-persistent-cache.h --- osm2pgsql-0.82.0/node-persistent-cache.h 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/node-persistent-cache.h 2014-10-25 06:42:27.000000000 +0000 @@ -1,4 +1,7 @@ -#define MAXIMUM_INITIAL_ID (1L << 31) +#ifndef NODE_PERSISTENT_CACHE_H +#define NODE_PERSISTENT_CACHE_H + +#define MAXIMUM_INITIAL_ID 2600000000 #define READ_NODE_CACHE_SIZE 10000 #define READ_NODE_BLOCK_SHIFT 10l @@ -22,3 +25,5 @@ int persistent_cache_nodes_get_list(struct osmNode *nodes, osmid_t *ndids, int nd_count); void init_node_persistent_cache(const struct output_options *options, const int append); void shutdown_node_persistent_cache(); + +#endif diff -Nru osm2pgsql-0.82.0/node-ram-cache.c osm2pgsql-0.86.0/node-ram-cache.c --- osm2pgsql-0.82.0/node-ram-cache.c 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/node-ram-cache.c 2014-10-25 06:42:27.000000000 +0000 @@ -164,6 +164,8 @@ int offset = id2offset(id); int i = 0; + if (maxBlocks == 0) return 1; + if (!blocks[block].nodes) { if (((allocStrategy & ALLOC_SPARSE) > 0) && ( usedBlocks < maxBlocks) && ( cacheUsed > cacheSize)) { /* TODO: It is more memory efficient to drop nodes from the sparse node cache than from the dense node cache */ @@ -353,8 +355,8 @@ cacheUsed = 0; cacheSize = (int64_t)cacheSizeMB*(1024*1024); /* How much we can fit, and make sure it's odd */ - maxBlocks = (cacheSize/(PER_BLOCK*sizeof(struct ramNode))) | 1; - maxSparseTuples = (cacheSize/sizeof(struct ramNodeID)) | 1; + maxBlocks = (cacheSize/(PER_BLOCK*sizeof(struct ramNode))); + maxSparseTuples = (cacheSize/sizeof(struct ramNodeID))+1; allocStrategy = strategy; scale = fixpointscale; @@ -432,6 +434,7 @@ free(blockCache); blockCache = 0; } + free(blocks); free(queue); } if ( ((allocStrategy & ALLOC_SPARSE) > 0) && ((allocStrategy & ALLOC_DENSE) == 0)) { diff -Nru osm2pgsql-0.82.0/node-ram-cache.h osm2pgsql-0.86.0/node-ram-cache.h --- osm2pgsql-0.82.0/node-ram-cache.h 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/node-ram-cache.h 2014-10-25 06:42:27.000000000 +0000 @@ -17,7 +17,7 @@ /* Scale is chosen such that 40,000 * SCALE < 2^32 */ #define FIXED_POINT static int scale = 100; -#define DOUBLE_TO_FIX(x) ((int)((x) * scale)) +#define DOUBLE_TO_FIX(x) ((int)((x) * scale + 0.4)) #define FIX_TO_DOUBLE(x) (((double)x) / scale) #define UNUSED __attribute__ ((unused)) diff -Nru osm2pgsql-0.82.0/osm2pgsql.c osm2pgsql-0.86.0/osm2pgsql.c --- osm2pgsql-0.82.0/osm2pgsql.c 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/osm2pgsql.c 2014-10-25 06:42:27.000000000 +0000 @@ -116,129 +116,160 @@ static void long_usage(char *arg0) { - int i; const char *name = basename(arg0); printf("Usage:\n"); printf("\t%s [options] planet.osm\n", name); - printf("\t%s [options] planet.osm.{gz,bz2}\n", name); + printf("\t%s [options] planet.osm.{pbf,gz,bz2}\n", name); printf("\t%s [options] file1.osm file2.osm file3.osm\n", name); printf("\nThis will import the data from the OSM file(s) into a PostgreSQL database\n"); - printf("suitable for use by the Mapnik renderer\n"); - printf("\nOptions:\n"); - printf(" -a|--append\t\tAdd the OSM file into the database without removing\n"); - printf(" \t\texisting data.\n"); - printf(" -b|--bbox\t\tApply a bounding box filter on the imported data\n"); - printf(" \t\tMust be specified as: minlon,minlat,maxlon,maxlat\n"); - printf(" \t\te.g. --bbox -0.5,51.25,0.5,51.75\n"); - printf(" -c|--create\t\tRemove existing data from the database. This is the \n"); - printf(" \t\tdefault if --append is not specified.\n"); - printf(" -d|--database\tThe name of the PostgreSQL database to connect\n"); - printf(" \t\tto (default: gis).\n"); - printf(" -i|--tablespace-index\tThe name of the PostgreSQL tablespace where\n"); - printf(" \t\tall indexes will be created.\n"); - printf(" \t\tThe following options allow more fine-grained control:\n"); - printf(" --tablespace-main-data \ttablespace for main tables\n"); - printf(" --tablespace-main-index\ttablespace for main table indexes\n"); - printf(" --tablespace-slim-data \ttablespace for slim mode tables\n"); - printf(" --tablespace-slim-index\ttablespace for slim mode indexes\n"); - printf(" \t\t(if unset, use db's default; -i is equivalent to setting\n"); - printf(" \t\t--tablespace-main-index and --tablespace-slim-index)\n"); - printf(" -l|--latlong\t\tStore data in degrees of latitude & longitude.\n"); - printf(" -m|--merc\t\tStore data in proper spherical mercator (default)\n"); - printf(" -M|--oldmerc\t\tStore data in the legacy OSM mercator format\n"); - printf(" -E|--proj num\tUse projection EPSG:num\n"); - printf(" -u|--utf8-sanitize\tRepair bad UTF8 input data (present in planet\n"); - printf(" \tdumps prior to August 2007). Adds about 10%% overhead.\n"); - printf(" -p|--prefix\t\tPrefix for table names (default planet_osm)\n"); - printf(" -s|--slim\t\tStore temporary data in the database. This greatly\n"); - printf(" \t\treduces the RAM usage but is much slower. This switch is\n"); - printf(" \t\trequired if you want to update with --append later.\n"); - - if (sizeof(int*) == 4) { - printf(" \t\tThis program was compiled on a 32bit system, so at most\n"); - printf(" \t\t3GB of RAM will be used. If you encounter problems\n"); - printf(" \t\tduring import, you should try this switch.\n"); - } - printf(" --drop\t\tonly with --slim: drop temporary tables after import (no updates).\n"); - - printf(" -S|--style\t\tLocation of the style file. Defaults to " OSM2PGSQL_DATADIR "/default.style\n"); - printf(" -C|--cache\t\tNow required for slim and non-slim modes: \n"); - printf(" \t\tUse up to this many MB for caching nodes (default: 800)\n"); - printf(" -U|--username\tPostgresql user name\n"); - printf(" \t\tpassword can be given by prompt or PGPASS environment variable.\n"); - printf(" -W|--password\tForce password prompt.\n"); - printf(" -H|--host\t\tDatabase server hostname or socket location.\n"); - printf(" -P|--port\t\tDatabase server port.\n"); - printf(" -e|--expire-tiles [min_zoom-]max_zoom\tCreate a tile expiry list.\n"); - printf(" -o|--expire-output filename\tOutput filename for expired tiles list.\n"); - printf(" -r|--input-reader\tInput frontend.\n"); - printf(" \t\tlibxml2 - Parse XML using libxml2. (default)\n"); - printf(" \t\tprimitive - Primitive XML parsing.\n"); -#ifdef BUILD_READER_PBF - printf(" \t\tpbf - OSM binary format.\n"); -#endif - printf(" -O|--output\t\tOutput backend.\n"); - printf(" \t\tpgsql - Output to a PostGIS database. (default)\n"); - printf(" \t\tgazetteer - Output to a PostGIS database suitable for gazetteer\n"); - printf(" \t\tnull - No output. Useful for testing.\n"); - printf(" -x|--extra-attributes\n"); - printf(" \t\tInclude attributes for each object in the database.\n"); - printf(" \t\tThis includes the username, userid, timestamp and version.\n"); - printf(" \t\tNote: this option also requires additional entries in your style file.\n"); - printf(" -k|--hstore\t\tAdd tags without column to an additional hstore (key/value) column to postgresql tables\n"); - printf(" --hstore-match-only\tOnly keep objects that have a value in one of the columns\n"); - printf(" - \t(normal action with --hstore is to keep all objects)\n"); - printf(" -j|--hstore-all\tAdd all tags to an additional hstore (key/value) column in postgresql tables\n"); - printf(" -z|--hstore-column\tAdd an additional hstore (key/value) column containing all tags\n"); - printf(" \tthat start with the specified string, eg --hstore-column \"name:\" will\n"); - printf(" \tproduce an extra hstore column that contains all name:xx tags\n"); - printf(" --hstore-add-index\tAdd index to hstore column.\n"); - printf(" -G|--multi-geometry\tGenerate multi-geometry features in postgresql tables.\n"); - printf(" -K|--keep-coastlines\tKeep coastline data rather than filtering it out.\n"); - printf(" \t\tBy default natural=coastline tagged data will be discarded based on the\n"); - printf(" \t\tassumption that post-processed Coastline Checker shapefiles will be used.\n"); - printf(" --exclude-invalid-polygon\n"); -#ifdef HAVE_FORK - printf(" --number-processes\t\tSpecifies the number of parallel processes used for certain operations\n"); - printf(" \t\tDefault is 1\n"); -#endif - printf(" -I|--disable-parallel-indexing\tDisable indexing all tables concurrently.\n"); - printf(" --unlogged\tUse unlogged tables (lost on crash but faster). Requires PostgreSQL 9.1.\n"); - printf(" --cache-strategy\tSpecifies the method used to cache nodes in ram.\n"); - printf(" \t\tAvailable options are:\n"); - printf(" \t\tdense: caching strategy optimised for full planet import\n"); - printf(" \t\tchunked: caching strategy optimised for non-contigouse memory allocation\n"); - printf(" \t\tsparse: caching strategy optimised for small extracts\n"); - printf(" \t\toptimized: automatically combines dense and sparse strategies for optimal storage efficiency.\n"); - printf(" \t\t optimized may use twice as much virtual memory, but no more physical memory\n"); + printf("suitable for use by the Mapnik renderer.\n\n"); + + printf("%s", "\ +Common options:\n\ + -a|--append Add the OSM file into the database without removing\n\ + existing data.\n\ + -c|--create Remove existing data from the database. This is the\n\ + default if --append is not specified.\n\ + -l|--latlong Store data in degrees of latitude & longitude.\n\ + -m|--merc Store data in proper spherical mercator (default).\n\ + -E|--proj num Use projection EPSG:num.\n\ + -s|--slim Store temporary data in the database. This greatly\n\ + reduces the RAM usage but is much slower. This switch is\n\ + required if you want to update with --append later.\n\ + -S|--style Location of the style file. Defaults to\n"); + printf(" %s/default.style.\n", OSM2PGSQL_DATADIR); + printf("%s", "\ + -C|--cache Use up to this many MB for caching nodes (default: 800)\n\ +\n\ +Database options:\n\ + -d|--database The name of the PostgreSQL database to connect\n\ + to (default: gis).\n\ + -U|--username PostgreSQL user name (specify passsword in PGPASS\n\ + environment variable or use -W).\n\ + -W|--password Force password prompt.\n\ + -H|--host Database server host name or socket location.\n\ + -P|--port Database server port.\n"); + if (verbose) + { + printf("%s", "\ +Hstore options:\n\ + -k|--hstore Add tags without column to an additional hstore\n\ + (key/value) column\n\ + --hstore-match-only Only keep objects that have a value in one of\n\ + the columns (default with --hstore is to keep all objects)\n\ + -j|--hstore-all Add all tags to an additional hstore (key/value) column\n\ + -z|--hstore-column Add an additional hstore (key/value) column containing\n\ + all tags that start with the specified string, eg\n\ + --hstore-column \"name:\" will produce an extra hstore\n\ + column that contains all name:xx tags\n\ + --hstore-add-index Add index to hstore column.\n\ +\n\ +Obsolete options:\n\ + -u|--utf8-sanitize Repair bad UTF8 input data (present in planet\n\ + dumps prior to August 2007). Adds about 10% overhead.\n\ + -M|--oldmerc Store data in the legacy OSM mercator format\n\ +\n\ +Performance options:\n\ + -i|--tablespace-index The name of the PostgreSQL tablespace where\n\ + all indexes will be created.\n\ + The following options allow more fine-grained control:\n\ + --tablespace-main-data tablespace for main tables\n\ + --tablespace-main-index tablespace for main table indexes\n\ + --tablespace-slim-data tablespace for slim mode tables\n\ + --tablespace-slim-index tablespace for slim mode indexes\n\ + (if unset, use db's default; -i is equivalent to setting\n\ + --tablespace-main-index and --tablespace-slim-index)\n\ + --drop only with --slim: drop temporary tables after import \n\ + (no updates are possible).\n\ + --number-processes Specifies the number of parallel processes \n\ + used for certain operations (default is 1).\n\ + -I|--disable-parallel-indexing Disable indexing all tables concurrently.\n\ + --unlogged Use unlogged tables (lost on crash but faster). \n\ + Requires PostgreSQL 9.1.\n\ + --cache-strategy Specifies the method used to cache nodes in ram.\n\ + Available options are:\n\ + dense: caching strategy optimised for full planet import\n\ + chunk: caching strategy optimised for non-contiguous \n\ + memory allocation\n\ + sparse: caching strategy optimised for small extracts\n\ + optimized: automatically combines dense and sparse \n\ + strategies for optimal storage efficiency. This may\n\ + us twice as much virtual memory, but no more physical \n\ + memory.\n"); #ifdef __amd64__ - printf(" \t\t The default is \"optimized\"\n"); + printf(" The default is \"optimized\"\n"); #else /* use "chunked" as a default in 32 bit compilations, as it is less wasteful of virtual memory than "optimized"*/ - printf(" \t\t The default is \"sparse\"\n"); + printf(" The default is \"sparse\"\n"); #endif - printf(" --flat-nodes\tSpecifies the flat file to use to persistently store node information in slim mode instead of in pgsql\n"); - printf(" \t\tThis file is a single > 16Gb large file. This method is only recomended for full planet imports\n"); - printf(" \t\tas it doesn't work well with small extracts. The default is disabled\n"); - printf(" -h|--help\t\tHelp information.\n"); - printf(" -v|--verbose\t\tVerbose output.\n"); - printf("\n"); - if(!verbose) - { - printf("Add -v to display supported projections.\n"); - printf("Use -E to access any espg projections (usually in /usr/share/proj/epsg)\n" ); + printf("%s", "\ + --flat-nodes Specifies the flat file to use to persistently store node \n\ + information in slim mode instead of in PostgreSQL.\n\ + This file is a single > 16Gb large file. Only recommended\n\ + for full planet imports. Default is disabled.\n\ +\n\ +Expiry options:\n\ + -e|--expire-tiles [min_zoom-]max_zoom Create a tile expiry list.\n\ + -o|--expire-output filename Output filename for expired tiles list.\n\ +\n\ +Other options:\n\ + -b|--bbox Apply a bounding box filter on the imported data\n\ + Must be specified as: minlon,minlat,maxlon,maxlat\n\ + e.g. --bbox -0.5,51.25,0.5,51.75\n\ + -p|--prefix Prefix for table names (default planet_osm)\n\ + -r|--input-reader Input frontend.\n\ + libxml2 - Parse XML using libxml2. (default)\n\ + primitive - Primitive XML parsing.\n"); +#ifdef BUILD_READER_PBF + printf(" pbf - OSM binary format.\n"); +#endif + printf("\ + -O|--output Output backend.\n\ + pgsql - Output to a PostGIS database. (default)\n\ + gazetteer - Output to a PostGIS database for Nominatim\n\ + null - No output. Useful for testing.\n"); +#ifdef HAVE_LUA + printf("\ + --tag-transform-script Specify a lua script to handle tag filtering and normalisation\n\ + The script contains callback functions for nodes, ways and relations, which each\n\ + take a set of tags and returns a transformed, filtered set of tags which are then\n\ + written to the database.\n"); +#endif + printf("\ + -x|--extra-attributes\n\ + Include attributes for each object in the database.\n\ + This includes the username, userid, timestamp and version.\n\ + Requires additional entries in your style file.\n\ + -G|--multi-geometry Generate multi-geometry features in postgresql tables.\n\ + -K|--keep-coastlines Keep coastline data rather than filtering it out.\n\ + By default natural=coastline tagged data will be discarded\n\ + because renderers usually have shape files for them.\n\ + --exclude-invalid-polygon do not import polygons with invalid geometries.\n\ + -h|--help Help information.\n\ + -v|--verbose Verbose output.\n"); } else { - printf("Supported projections:\n" ); - for(i=0; i -k \\\n", name); + printf(" --flat-nodes planet-latest.osm.pbf\n"); + printf("where\n"); + printf(" is 20000 on machines with 24GB or more RAM \n"); + printf(" or about 75%% of memory in MB on machines with less\n"); + printf(" is a location where a 19GB file can be saved.\n"); + printf("\n"); + printf("A typical command to update a database imported with the above command is\n"); + printf(" osmosis --rri workingDirectory= --simc --wx - \\\n"); + printf(" | %s -a -d gis --slim -k --flat-nodes \n", name); + printf("where\n"); + printf(" is the same location as above.\n"); + printf(" is the location osmosis replication was initialized to.\n"); + printf("\nRun %s --help --verbose (-h -v) for a full list of options.\n", name); } + } const char *build_conninfo(const char *db, const char *username, const char *password, const char *host, const char *port) @@ -394,6 +425,7 @@ const char *input_reader = "auto"; const char **hstore_columns = NULL; const char *flat_nodes_file = NULL; + const char *tag_transform_script = NULL; int n_hstore_columns = 0; int keep_coastlines=0; int cache = 800; @@ -451,6 +483,7 @@ {"unlogged", 0, 0, 207}, {"flat-nodes",1,0,209}, {"exclude-invalid-polygon",0,0,210}, + {"tag-transform-script",1,0,212}, {0, 0, 0, 0} }; @@ -492,9 +525,11 @@ case 'o': expire_tiles_filename=optarg; break; case 'O': output_backend = optarg; break; case 'x': osmdata.extra_attributes=1; break; - case 'k': enable_hstore=HSTORE_NORM; break; + case 'k': if (enable_hstore != HSTORE_NONE) { fprintf(stderr, "ERROR: You can not specify both --hstore (-k) and --hstore-all (-j)\n"); exit (EXIT_FAILURE); } + enable_hstore=HSTORE_NORM; break; case 208: hstore_match_only = 1; break; - case 'j': enable_hstore=HSTORE_ALL; break; + case 'j': if (enable_hstore != HSTORE_NONE) { fprintf(stderr, "ERROR: You can not specify both --hstore (-k) and --hstore-all (-j)\n"); exit (EXIT_FAILURE); } + enable_hstore=HSTORE_ALL; break; case 'z': n_hstore_columns++; hstore_columns = (const char**)realloc(hstore_columns, sizeof(char *) * n_hstore_columns); @@ -510,9 +545,10 @@ break; case 204: if (strcmp(optarg,"dense") == 0) alloc_chunkwise = ALLOC_DENSE; - if (strcmp(optarg,"chunk") == 0) alloc_chunkwise = ALLOC_DENSE | ALLOC_DENSE_CHUNK; - if (strcmp(optarg,"sparse") == 0) alloc_chunkwise = ALLOC_SPARSE; - if (strcmp(optarg,"optimized") == 0) alloc_chunkwise = ALLOC_DENSE | ALLOC_SPARSE; + else if (strcmp(optarg,"chunk") == 0) alloc_chunkwise = ALLOC_DENSE | ALLOC_DENSE_CHUNK; + else if (strcmp(optarg,"sparse") == 0) alloc_chunkwise = ALLOC_SPARSE; + else if (strcmp(optarg,"optimized") == 0) alloc_chunkwise = ALLOC_DENSE | ALLOC_SPARSE; + else {fprintf(stderr, "ERROR: Unrecognized cache strategy %s.\n", optarg); exit(EXIT_FAILURE); } break; case 205: #ifdef HAVE_FORK @@ -529,6 +565,7 @@ break; case 210: excludepoly = 1; exclude_broken_polygon(); break; case 211: enable_hstore_index = 1; break; + case 212: tag_transform_script = optarg; break; case 'V': exit(EXIT_SUCCESS); case '?': default: @@ -575,6 +612,10 @@ if (cache < 0) cache = 0; + if (cache == 0) { + fprintf(stderr, "WARNING: ram cache is disabled. This will likely slow down processing a lot.\n\n"); + } + if (num_procs < 1) num_procs = 1; if (pass_prompt) @@ -646,6 +687,7 @@ options.flat_node_cache_enabled = flat_node_cache_enabled; options.flat_node_file = flat_nodes_file; options.excludepoly = excludepoly; + options.tag_transform_script = tag_transform_script; if (strcmp("pgsql", output_backend) == 0) { osmdata.out = &out_pgsql; @@ -705,7 +747,7 @@ fprintf(stderr, "ERROR: PBF support has not been compiled into this version of osm2pgsql, please either compile it with pbf support or use one of the other input formats\n"); exit(EXIT_FAILURE); #endif - } else if (strcasecmp(".o5m",argv[optind]+strlen(argv[optind])-4) == 0) { + } else if (strcasecmp(".o5m",argv[optind]+strlen(argv[optind])-4) == 0 || strcasecmp(".o5c",argv[optind]+strlen(argv[optind])-4) == 0) { streamFile = &streamFileO5m; } else { streamFile = &streamFileXML2; diff -Nru osm2pgsql-0.82.0/output-gazetteer.c osm2pgsql-0.86.0/output-gazetteer.c --- osm2pgsql-0.82.0/output-gazetteer.c 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/output-gazetteer.c 2014-10-25 06:42:27.000000000 +0000 @@ -38,6 +38,7 @@ " admin_level INTEGER," \ " housenumber TEXT," \ " street TEXT," \ + " addr_place TEXT," \ " isin TEXT," \ " postcode TEXT," \ " country_code VARCHAR(2)," \ @@ -63,9 +64,6 @@ static PGconn *ConnectionDelete = NULL; static PGconn *ConnectionError = NULL; -static int CopyErrorActive = 0; -static char BufferError[BUFFER_SIZE]; -static unsigned int BufferErrorLen = 0; static FILE * hLog = NULL; @@ -87,7 +85,7 @@ /* Make sure we have an active copy */ if (!CopyActive) { - pgsql_exec(Connection, PGRES_COPY_IN, "COPY place FROM STDIN"); + pgsql_exec(Connection, PGRES_COPY_IN, "COPY place (osm_type, osm_id, class, type, name, admin_level, housenumber, street, addr_place, isin, postcode, country_code, extratags, geometry) FROM STDIN"); CopyActive = 1; } @@ -156,6 +154,7 @@ return; } +#if 0 static void copy_error_data(const char *sql) { unsigned int sqlLen = strlen(sql); @@ -233,10 +232,12 @@ return; } +#endif static int split_tags(struct keyval *tags, unsigned int flags, struct keyval *names, struct keyval *places, struct keyval *extratags, - int* admin_level, struct keyval ** housenumber, struct keyval ** street, char ** isin, struct keyval ** postcode, struct keyval ** countrycode) + int* admin_level, struct keyval ** housenumber, struct keyval ** street, struct keyval ** addr_place, char ** isin, struct keyval ** postcode, struct keyval ** countrycode) { + size_t subval; int placehouse = 0; int placebuilding = 0; int placeadmin = 0; @@ -249,6 +250,7 @@ *admin_level = ADMINLEVEL_NONE; *housenumber = 0; *street = 0; + *addr_place = 0; *isin = 0; int isinsize = 0; *postcode = 0; @@ -298,6 +300,7 @@ strcmp(item->key, "old_name") == 0 || (strncmp(item->key, "old_name:", 9) == 0) || strcmp(item->key, "alt_name") == 0 || + (strncmp(item->key, "alt_name_", 9) == 0) || (strncmp(item->key, "alt_name:", 9) == 0) || strcmp(item->key, "official_name") == 0 || (strncmp(item->key, "official_name:", 14) == 0) || @@ -320,24 +323,48 @@ pushItem(names, item); } } + else if (strcmp(item->key, "emergency") == 0 || + strcmp(item->key, "tourism") == 0 || + strcmp(item->key, "historic") == 0 || + strcmp(item->key, "military") == 0 || + strcmp(item->key, "natural") == 0) + { + if (strcmp(item->value, "no") && strcmp(item->value, "yes")) + { + pushItem(places, item); + } + else + { + freeItem(item); + } + } + else if (strcmp(item->key, "highway") == 0) + { + if (strcmp(item->value, "no") && + strcmp(item->value, "turning_circle") && + strcmp(item->value, "traffic_signals") && + strcmp(item->value, "mini_roundabout") && + strcmp(item->value, "noexit") && + strcmp(item->value, "crossing")) + { + pushItem(places, item); + } + else + { + freeItem(item); + } + } else if (strcmp(item->key, "aerialway") == 0 || strcmp(item->key, "aeroway") == 0 || strcmp(item->key, "amenity") == 0 || strcmp(item->key, "boundary") == 0 || strcmp(item->key, "bridge") == 0 || strcmp(item->key, "craft") == 0 || - strcmp(item->key, "emergency") == 0 || - strcmp(item->key, "highway") == 0 || - strcmp(item->key, "historic") == 0 || strcmp(item->key, "leisure") == 0 || - strcmp(item->key, "military") == 0 || - strcmp(item->key, "natural") == 0 || strcmp(item->key, "office") == 0 || strcmp(item->key, "railway") == 0 || strcmp(item->key, "shop") == 0 || - strcmp(item->key, "tourism") == 0 || - strcmp(item->key, "tunnel") == 0 || - strcmp(item->key, "waterway") == 0 ) + strcmp(item->key, "tunnel") == 0 ) { if (strcmp(item->value, "no")) { @@ -352,6 +379,11 @@ freeItem(item); } } + else if (strcmp(item->key, "waterway") == 0 && + strcmp(item->value, "riverbank") != 0) + { + pushItem(places, item); + } else if (strcmp(item->key, "place") == 0) { place = item; @@ -384,6 +416,10 @@ { *street = item; } + else if (strcmp(item->key, "addr:place") == 0) + { + *addr_place = item; + } else if ((strcmp(item->key, "country_code_iso3166_1_alpha_2") == 0 || strcmp(item->key, "country_code_iso3166_1") == 0 || strcmp(item->key, "country_code_iso3166") == 0 || @@ -437,11 +473,24 @@ addItem(places, "place", "houses", 1); } } + else if (strcmp(item->key, "tiger:county") == 0) + { + /* strip the state and replace it with a county suffix to ensure that + the tag only matches against counties and not against some town + with the same name. + */ + subval = strcspn(item->value, ","); + *isin = realloc(*isin, isinsize + 9 + subval); + *(*isin+isinsize) = ','; + strncpy(*isin+1+isinsize, item->value, subval); + strcpy(*isin+1+isinsize+subval, " county"); + isinsize += 8 + subval; + freeItem(item); + } else if (strcmp(item->key, "is_in") == 0 || (strncmp(item->key, "is_in:", 5) == 0) || - strcmp(item->key, "addr:country")== 0 || + strcmp(item->key, "addr:suburb")== 0 || strcmp(item->key, "addr:county")== 0 || - strcmp(item->key, "tiger:county")== 0 || strcmp(item->key, "addr:city") == 0 || strcmp(item->key, "addr:state_code") == 0 || strcmp(item->key, "addr:state") == 0) @@ -612,13 +661,13 @@ if (landuse) { - if (!listHasData(places)) + if (!listHasData(places) && listHasData(names)) { pushItem(places, landuse); } else { - freeItem(item); + freeItem(landuse); } } @@ -708,7 +757,7 @@ } static void add_place(char osm_type, osmid_t osm_id, const char *class, const char *type, struct keyval *names, struct keyval *extratags, - int adminlevel, struct keyval *housenumber, struct keyval *street, const char *isin, struct keyval *postcode, struct keyval *countrycode, const char *wkt) + int adminlevel, struct keyval *housenumber, struct keyval *street, struct keyval *addr_place, const char *isin, struct keyval *postcode, struct keyval *countrycode, const char *wkt) { int first; struct keyval *name; @@ -779,6 +828,17 @@ copy_data("\\N\t"); } + if (addr_place) + { + escape(sql, sizeof(sql), addr_place->value); + copy_data(sql); + copy_data("\t"); + } + else + { + copy_data("\\N\t"); + } + if (isin) { /* Skip the leading ',' from the contactination */ @@ -851,6 +911,7 @@ return; } +#if 0 static void add_polygon_error(char osm_type, osmid_t osm_id, const char *class, const char *type, struct keyval *names, const char *countrycode, const char *wkt) { @@ -920,6 +981,7 @@ return; } +#endif static void delete_place(char osm_type, osmid_t osm_id) @@ -1045,6 +1107,7 @@ int adminlevel; struct keyval * housenumber; struct keyval * street; + struct keyval * addr_place; char * isin; struct keyval * postcode; struct keyval * countrycode; @@ -1052,7 +1115,7 @@ /* Split the tags */ - split_tags(tags, TAGINFO_NODE, &names, &places, &extratags, &adminlevel, &housenumber, &street, &isin, &postcode, &countrycode); + split_tags(tags, TAGINFO_NODE, &names, &places, &extratags, &adminlevel, &housenumber, &street, &addr_place, &isin, &postcode, &countrycode); /* Feed this node to the middle layer */ Options->mid->nodes_set(id, lat, lon, tags); @@ -1066,12 +1129,13 @@ sprintf(wkt, "POINT(%.15g %.15g)", lon, lat); for (place = firstItem(&places); place; place = nextItem(&places, place)) { - add_place('N', id, place->key, place->value, &names, &extratags, adminlevel, housenumber, street, isin, postcode, countrycode, wkt); + add_place('N', id, place->key, place->value, &names, &extratags, adminlevel, housenumber, street, addr_place, isin, postcode, countrycode, wkt); } } if (housenumber) freeItem(housenumber); if (street) freeItem(street); + if (addr_place) freeItem(addr_place); if (isin) free(isin); if (postcode) freeItem(postcode); if (countrycode) freeItem(countrycode); @@ -1098,6 +1162,7 @@ int adminlevel; struct keyval * housenumber; struct keyval * street; + struct keyval * addr_place; char * isin; struct keyval * postcode; struct keyval * countrycode; @@ -1105,7 +1170,7 @@ /* Split the tags */ - area = split_tags(tags, TAGINFO_WAY, &names, &places, &extratags, &adminlevel, &housenumber, &street, &isin, &postcode, &countrycode); + area = split_tags(tags, TAGINFO_WAY, &names, &places, &extratags, &adminlevel, &housenumber, &street, &addr_place, &isin, &postcode, &countrycode); /* Feed this way to the middle layer */ Options->mid->ways_set(id, ndv, ndc, tags, 0); @@ -1129,7 +1194,7 @@ { for (place = firstItem(&places); place; place = nextItem(&places, place)) { - add_place('W', id, place->key, place->value, &names, &extratags, adminlevel, housenumber, street, isin, postcode, countrycode, wkt); + add_place('W', id, place->key, place->value, &names, &extratags, adminlevel, housenumber, street, addr_place, isin, postcode, countrycode, wkt); } } @@ -1142,6 +1207,7 @@ if (housenumber) freeItem(housenumber); if (street) freeItem(street); + if (addr_place) freeItem(addr_place); if (isin) free(isin); if (postcode) freeItem(postcode); if (countrycode) freeItem(countrycode); @@ -1168,11 +1234,13 @@ int adminlevel; struct keyval * housenumber; struct keyval * street; + struct keyval * addr_place; char * isin; struct keyval * postcode; struct keyval * countrycode; int wkt_size; const char *type; + int cmp_waterway; type = getItem(tags, "type"); if (!type) { @@ -1180,14 +1248,16 @@ return 0; } - if (!strcmp(type, "associatedStreet") || !strcmp(type, "relatedStreet")) + cmp_waterway = strcmp(type, "waterway"); + + if (!strcmp(type, "associatedStreet")) { Options->mid->relations_set(id, members, member_count, tags); if (delete_old) delete_unused_classes('R', id, 0); return 0; } - if (strcmp(type, "boundary") && strcmp(type, "multipolygon")) { + if (strcmp(type, "boundary") && strcmp(type, "multipolygon") && cmp_waterway) { if (delete_old) delete_unused_classes('R', id, 0); return 0; } @@ -1195,7 +1265,7 @@ Options->mid->relations_set(id, members, member_count, tags); /* Split the tags */ - split_tags(tags, TAGINFO_AREA, &names, &places, &extratags, &adminlevel, &housenumber, &street, &isin, &postcode, &countrycode); + split_tags(tags, TAGINFO_AREA, &names, &places, &extratags, &adminlevel, &housenumber, &street, &addr_place, &isin, &postcode, &countrycode); if (delete_old) delete_unused_classes('R', id, &places); @@ -1220,29 +1290,53 @@ count++; } + if (count == 0) + { + if (delete_old) delete_unused_classes('R', id, 0); + free(xcount); + free(xtags); + free(xnodes); + free(xid2); + return 0; + } + count = Options->mid->ways_get_list(xid2, count, &xid, xtags, xnodes, xcount); xnodes[count] = NULL; xcount[count] = 0; - wkt_size = build_geometry(id, xnodes, xcount, 1, 1, 1000000); - for (i=0;ikey, place->value, &names, &extratags, adminlevel, housenumber, street, isin, postcode, countrycode, wkt); + for (place = firstItem(&places); place; place = nextItem(&places, place)) + { + add_place('R', id, place->key, place->value, &names, &extratags, adminlevel, housenumber, street, addr_place, isin, postcode, countrycode, wkt); + } } - } - else - { - /* add_polygon_error('R', id, "boundary", "adminitrative", &names, countrycode, wkt); */ - } - free(wkt); + else + { + /* add_polygon_error('R', id, "boundary", "adminitrative", &names, countrycode, wkt); */ + } + free(wkt); + } + clear_wkts(); + } else { + /* waterways result in multilinestrings */ + char *wkt = get_multiline_geometry(id, xnodes, xcount); + if (wkt && strlen(wkt)) + { + for (place = firstItem(&places); place; place = nextItem(&places, place)) + { + add_place('R', id, place->key, place->value, &names, &extratags, adminlevel, housenumber, street, addr_place, isin, postcode, countrycode, wkt); + } + } + free(wkt); } - clear_wkts(); for( i=0; isrs) @@ -66,11 +67,7 @@ }; #define NUM_TABLES ((signed)(sizeof(tables) / sizeof(tables[0]))) -#define FLAG_POLYGON 1 /* For polygon table */ -#define FLAG_LINEAR 2 /* For lines table */ -#define FLAG_NOCACHE 4 /* Optimisation: don't bother remembering this one */ -#define FLAG_DELETE 8 /* These tags should be simply deleted on sight */ -#define FLAG_PHSTORE 17 /* polygons without own column but listed in hstore this implies FLAG_POLYGON */ + static struct flagsname { char *name; int flag; @@ -83,43 +80,10 @@ }; #define NUM_FLAGS ((signed)(sizeof(tagflags) / sizeof(tagflags[0]))) -/* Table columns, representing key= tags */ -struct taginfo { - char *name; - char *type; - int flags; - int count; -}; -static struct taginfo *exportList[4]; /* Indexed by enum table_id */ -static int exportListCount[4]; -/* Data to generate z-order column and road table - * The name of the roads table is misleading, this table - * is used for any feature to be shown at low zoom. - * This includes railways and administrative boundaries too - */ -static struct { - int offset; - const char *highway; - int roads; -} layers[] = { - { 3, "minor", 0 }, - { 3, "road", 0 }, - { 3, "unclassified", 0 }, - { 3, "residential", 0 }, - { 4, "tertiary_link", 0 }, - { 4, "tertiary", 0 }, - { 6, "secondary_link",1 }, - { 6, "secondary", 1 }, - { 7, "primary_link", 1 }, - { 7, "primary", 1 }, - { 8, "trunk_link", 1 }, - { 8, "trunk", 1 }, - { 9, "motorway_link", 1 }, - { 9, "motorway", 1 } -}; -static const unsigned int nLayers = (sizeof(layers)/sizeof(*layers)); +struct taginfo *exportList[4]; /* Indexed by enum table_id */ +int exportListCount[4]; static int pgsql_delete_way_from_output(osmid_t osm_id); static int pgsql_delete_relation_from_output(osmid_t osm_id); @@ -184,13 +148,6 @@ if( i == NUM_FLAGS ) fprintf( stderr, "Unknown flag '%s' line %d, ignored\n", str, lineno ); } - if (temp.flags==FLAG_PHSTORE) { - if (HSTORE_NONE==(Options->enable_hstore)) { - fprintf( stderr, "Error reading style file line %d (fields=%d)\n", lineno, fields ); - fprintf( stderr, "flag 'phstore' is invalid in non-hstore mode\n"); - exit_nicely(); - } - } if ((temp.flags!=FLAG_DELETE) && ((strchr(temp.name,'?') != NULL) || (strchr(temp.name,'*') != NULL))) { fprintf( stderr, "wildcard '%s' in non-delete style entry\n",temp.name); exit_nicely(); @@ -314,109 +271,6 @@ tables[table].buflen = buflen; } -static int add_z_order(struct keyval *tags, int *roads) -{ - const char *layer = getItem(tags, "layer"); - const char *highway = getItem(tags, "highway"); - const char *bridge = getItem(tags, "bridge"); - const char *tunnel = getItem(tags, "tunnel"); - const char *railway = getItem(tags, "railway"); - const char *boundary= getItem(tags, "boundary"); - - int z_order = 0; - int l; - unsigned int i; - char z[13]; - - l = layer ? strtol(layer, NULL, 10) : 0; - z_order = 10 * l; - *roads = 0; - - if (highway) { - for (i=0; i - * - * becomes: - * - */ -void compress_tag_name(struct keyval *tags) -{ - const char *name = getItem(tags, "name"); - struct keyval *name_ext = getMatches(tags, "name:"); - struct keyval *p; - char out[2048]; - - if (!name_ext) - return; - - out[0] = '\0'; - if (name) { - strncat(out, name, sizeof(out)-1); - strncat(out, " ", sizeof(out)-1); - } - while((p = popItem(name_ext)) != NULL) { - /* Exclude name:source = "dicataphone" and duplicates */ - if (strcmp(p->key, "name:source") && !strstr(out, p->value)) { - strncat(out, p->value, sizeof(out)-1); - strncat(out, " ", sizeof(out)-1); - } - freeItem(p); - } - free(name_ext); - - /* Remove trailing space */ - out[strlen(out)-1] = '\0'; - /* fprintf(stderr, "*** New name: %s\n", out); */ - updateItem(tags, "name", out); -} @@ -652,11 +506,14 @@ static int pgsql_out_node(osmid_t id, struct keyval *tags, double node_lat, double node_lon) { + int filter = tagtransform_filter_node_tags(tags); static char *sql; static size_t sqllen=0; int i; struct keyval *tag; + if (filter) return 1; + if (sqllen==0) { sqllen=2048; sql=malloc(sqllen); @@ -756,7 +613,7 @@ copy_to_table(table, "\n"); } -static int tag_indicates_polygon(enum OsmType type, const char *key) +/*static int tag_indicates_polygon(enum OsmType type, const char *key) { int i; @@ -769,127 +626,9 @@ } return 0; -} +}*/ -/* Go through the given tags and determine the union of flags. Also remove - * any tags from the list that we don't know about */ -unsigned int pgsql_filter_tags(enum OsmType type, struct keyval *tags, int *polygon) -{ - int i, filter = 1; - int flags = 0; - int add_area_tag = 0; - - const char *area; - struct keyval *item; - struct keyval temp; - initList(&temp); - - /* We used to only go far enough to determine if it's a polygon or not, but now we go through and filter stuff we don't need */ - while( (item = popItem(tags)) != NULL ) - { - /* Allow named islands to appear as polygons */ - if (!strcmp("natural",item->key) && !strcmp("coastline",item->value)) - { - add_area_tag = 1; - } - - /* Discard natural=coastline tags (we render these from a shapefile instead) */ - if (!Options->keep_coastlines && !strcmp("natural",item->key) && !strcmp("coastline",item->value)) - { - freeItem( item ); - item = NULL; - continue; - } - - for (i=0; i < exportListCount[type]; i++) - { - if (wildMatch( exportList[type][i].name, item->key )) - { - if( exportList[type][i].flags & FLAG_DELETE ) - { - freeItem( item ); - item = NULL; - break; - } - - filter = 0; - flags |= exportList[type][i].flags; - pushItem( &temp, item ); - item = NULL; - break; - } - } - - /** if tag not found in list of exports: */ - if (i == exportListCount[type]) - { - if (Options->enable_hstore) { - /* with hstore, copy all tags... */ - pushItem(&temp, item); - /* ... but if hstore_match_only is set then don't take this - as a reason for keeping the object */ - if ( - !Options->hstore_match_only - && strcmp("osm_uid",item->key) - && strcmp("osm_user",item->key) - && strcmp("osm_timestamp",item->key) - && strcmp("osm_version",item->key) - && strcmp("osm_changeset",item->key) - ) filter = 0; - } else if (Options->n_hstore_columns) { - /* does this column match any of the hstore column prefixes? */ - int j; - for (j = 0; j < Options->n_hstore_columns; j++) { - char *pos = strstr(item->key, Options->hstore_columns[j]); - if (pos == item->key) { - pushItem(&temp, item); - /* ... but if hstore_match_only is set then don't take this - as a reason for keeping the object */ - if ( - !Options->hstore_match_only - && strcmp("osm_uid",item->key) - && strcmp("osm_user",item->key) - && strcmp("osm_timestamp",item->key) - && strcmp("osm_version",item->key) - && strcmp("osm_changeset",item->key) - ) filter = 0; - break; - } - } - /* if not, skip the tag */ - if (j == Options->n_hstore_columns) { - freeItem(item); - } - } else { - freeItem(item); - } - item = NULL; - } - } - - /* Move from temp list back to original list */ - while( (item = popItem(&temp)) != NULL ) - pushItem( tags, item ); - - *polygon = flags & FLAG_POLYGON; - - /* Special case allowing area= to override anything else */ - if ((area = getItem(tags, "area"))) { - if (!strcmp(area, "yes") || !strcmp(area, "true") ||!strcmp(area, "1")) - *polygon = 1; - else if (!strcmp(area, "no") || !strcmp(area, "false") || !strcmp(area, "0")) - *polygon = 0; - } else { - /* If we need to force this as a polygon, append an area tag */ - if (add_area_tag) { - addItem(tags, "area", "yes", 0); - *polygon = 1; - } - } - - return filter; -} /* COPY planet_osm (osm_id, name, place, landuse, leisure, "natural", man_made, waterway, highway, railway, amenity, tourism, learning, bu @@ -912,9 +651,8 @@ Options->mid->way_changed(id); } - if (pgsql_filter_tags(OSMTYPE_WAY, tags, &polygon) || add_z_order(tags, &roads)) + if (tagtransform_filter_way_tags(tags, &polygon, &roads)) return 0; - /* Split long ways after around 1 degree or 100km */ if (Options->projection == PROJ_LATLONG) split_at = 1; @@ -952,194 +690,27 @@ return 0; } -static int pgsql_out_relation(osmid_t id, struct keyval *rel_tags, struct osmNode **xnodes, struct keyval *xtags, int *xcount, osmid_t *xid, const char **xrole) +static int pgsql_out_relation(osmid_t id, struct keyval *rel_tags, int member_count, struct osmNode **xnodes, struct keyval *xtags, int *xcount, osmid_t *xid, const char **xrole) { int i, wkt_size; - int polygon = 0, roads = 0; + int roads = 0; int make_polygon = 0; int make_boundary = 0; - struct keyval tags, *p, poly_tags; - char *type; + int * members_superseeded; double split_at; -#if 0 - fprintf(stderr, "Got relation with counts:"); - for (i=0; xcount[i]; i++) - fprintf(stderr, " %d", xcount[i]); - fprintf(stderr, "\n"); -#endif - /* Get the type, if there's no type we don't care */ - type = getItem(rel_tags, "type"); - if( !type ) - return 0; - - initList(&tags); - initList(&poly_tags); - - /* Clone tags from relation */ - p = rel_tags->next; - while (p != rel_tags) { - /* For routes, we convert name to route_name */ - if ((strcmp(type, "route") == 0) && (strcmp(p->key, "name") ==0)) - addItem(&tags, "route_name", p->value, 1); - else if (strcmp(p->key, "type")) /* drop type= */ - addItem(&tags, p->key, p->value, 1); - p = p->next; - } - - if( strcmp(type, "route") == 0 ) - { - const char *state = getItem(rel_tags, "state"); - const char *netw = getItem(rel_tags, "network"); - int networknr = -1; - - if (state == NULL) { - state = ""; - } - - if (netw != NULL) { - if (strcmp(netw, "lcn") == 0) { - networknr = 10; - if (strcmp(state, "alternate") == 0) { - addItem(&tags, "lcn", "alternate", 1); - } else if (strcmp(state, "connection") == 0) { - addItem(&tags, "lcn", "connection", 1); - } else { - addItem(&tags, "lcn", "yes", 1); - } - } else if (strcmp(netw, "rcn") == 0) { - networknr = 11; - if (strcmp(state, "alternate") == 0) { - addItem(&tags, "rcn", "alternate", 1); - } else if (strcmp(state, "connection") == 0) { - addItem(&tags, "rcn", "connection", 1); - } else { - addItem(&tags, "rcn", "yes", 1); - } - } else if (strcmp(netw, "ncn") == 0) { - networknr = 12; - if (strcmp(state, "alternate") == 0) { - addItem(&tags, "ncn", "alternate", 1); - } else if (strcmp(state, "connection") == 0) { - addItem(&tags, "ncn", "connection", 1); - } else { - addItem(&tags, "ncn", "yes", 1); - } - - - } else if (strcmp(netw, "lwn") == 0) { - networknr = 20; - if (strcmp(state, "alternate") == 0) { - addItem(&tags, "lwn", "alternate", 1); - } else if (strcmp(state, "connection") == 0) { - addItem(&tags, "lwn", "connection", 1); - } else { - addItem(&tags, "lwn", "yes", 1); - } - } else if (strcmp(netw, "rwn") == 0) { - networknr = 21; - if (strcmp(state, "alternate") == 0) { - addItem(&tags, "rwn", "alternate", 1); - } else if (strcmp(state, "connection") == 0) { - addItem(&tags, "rwn", "connection", 1); - } else { - addItem(&tags, "rwn", "yes", 1); - } - } else if (strcmp(netw, "nwn") == 0) { - networknr = 22; - if (strcmp(state, "alternate") == 0) { - addItem(&tags, "nwn", "alternate", 1); - } else if (strcmp(state, "connection") == 0) { - addItem(&tags, "nwn", "connection", 1); - } else { - addItem(&tags, "nwn", "yes", 1); - } - } - } - - if (getItem(rel_tags, "preferred_color") != NULL) { - const char *a = getItem(rel_tags, "preferred_color"); - if (strcmp(a, "0") == 0 || strcmp(a, "1") == 0 || strcmp(a, "2") == 0 || strcmp(a, "3") == 0 || strcmp(a, "4") == 0) { - addItem(&tags, "route_pref_color", a, 1); - } else { - addItem(&tags, "route_pref_color", "0", 1); - } - } else { - addItem(&tags, "route_pref_color", "0", 1); - } - - if (getItem(rel_tags, "ref") != NULL) { - if (networknr == 10) { - addItem(&tags, "lcn_ref", getItem(rel_tags, "ref"), 1); - } else if (networknr == 11) { - addItem(&tags, "rcn_ref", getItem(rel_tags, "ref"), 1); - } else if (networknr == 12) { - addItem(&tags, "ncn_ref", getItem(rel_tags, "ref"), 1); - } else if (networknr == 20) { - addItem(&tags, "lwn_ref", getItem(rel_tags, "ref"), 1); - } else if (networknr == 21) { - addItem(&tags, "rwn_ref", getItem(rel_tags, "ref"), 1); - } else if (networknr == 22) { - addItem(&tags, "nwn_ref", getItem(rel_tags, "ref"), 1); - } - } - } - else if( strcmp( type, "boundary" ) == 0 ) - { - /* Boundaries will get converted into multiple geometries: - - Linear features will end up in the line and roads tables (useful for admin boundaries) - - Polygon features also go into the polygon table (useful for national_forests) - The edges of the polygon also get treated as linear fetaures allowing these to be rendered seperately. */ - make_boundary = 1; - } - else if( strcmp( type, "multipolygon" ) == 0 && getItem(&tags, "boundary") ) - { - /* Treat type=multipolygon exactly like type=boundary if it has a boundary tag. */ - make_boundary = 1; - } - else if( strcmp( type, "multipolygon" ) == 0 ) - { - make_polygon = 1; - - /* Copy the tags from the outer way(s) if the relation is untagged */ - /* or if there is just a name tag, people seem to like naming relations */ - if (!listHasData(&tags) || ((countList(&tags)==1) && getItem(&tags, "name"))) { - for (i=0; xcount[i]; i++) { - if (xrole[i] && !strcmp(xrole[i], "inner")) - continue; - - p = xtags[i].next; - while (p != &(xtags[i])) { - addItem(&tags, p->key, p->value, 1); - p = p->next; - } - } - } + members_superseeded = calloc(sizeof(int), member_count); - /* Collect a list of polygon-like tags, these are used later to - identify if an inner rings looks like it should be rendered seperately */ - p = tags.next; - while (p != &tags) { - if (tag_indicates_polygon(OSMTYPE_WAY, p->key)) { - addItem(&poly_tags, p->key, p->value, 1); - } - p = p->next; - } - } - else - { - /* Unknown type, just exit */ - resetList(&tags); - resetList(&poly_tags); + if (member_count == 0) { + free(members_superseeded); return 0; } - if (pgsql_filter_tags(OSMTYPE_WAY, &tags, &polygon) || add_z_order(&tags, &roads)) { - resetList(&tags); - resetList(&poly_tags); + if (tagtransform_filter_rel_member_tags(rel_tags, member_count, xtags, xrole, members_superseeded, &make_boundary, &make_polygon, &roads)) { + free(members_superseeded); return 0; } - + /* Split long linear ways after around 1 degree or 100km (polygons not effected) */ if (Options->projection == PROJ_LATLONG) split_at = 1; @@ -1149,13 +720,11 @@ wkt_size = build_geometry(id, xnodes, xcount, make_polygon, Options->enable_multi, split_at); if (!wkt_size) { - resetList(&tags); - resetList(&poly_tags); + free(members_superseeded); return 0; } - for (i=0;i 0.0) && enable_way_area) { char tmp[32]; snprintf(tmp, sizeof(tmp), "%g", area); - addItem(&tags, "way_area", tmp, 0); + addItem(rel_tags, "way_area", tmp, 0); } - write_wkts(-id, &tags, wkt, t_poly); + write_wkts(-id, rel_tags, wkt, t_poly); } else { - write_wkts(-id, &tags, wkt, t_line); + write_wkts(-id, rel_tags, wkt, t_line); if (roads) - write_wkts(-id, &tags, wkt, t_roads); + write_wkts(-id, rel_tags, wkt, t_roads); } } free(wkt); @@ -1180,29 +749,21 @@ clear_wkts(); - /* If we are creating a multipolygon then we - mark each member so that we can skip them during iterate_ways - but only if the polygon-tags look the same as the outer ring */ + /* Tagtransform will have marked those member ways of the relation that + * have fully been dealt with as part of the multi-polygon entry. + * Set them in the database as done and delete their entry to not + * have duplicates */ if (make_polygon) { for (i=0; xcount[i]; i++) { - int match = 0; - struct keyval *p = poly_tags.next; - while (p != &poly_tags) { - const char *v = getItem(&xtags[i], p->key); - if (!v || strcmp(v, p->value)) { - match = 0; - break; - } - match = 1; - p = p->next; - } - if (match) { + if (members_superseeded[i]) { Options->mid->ways_done(xid[i]); pgsql_delete_way_from_output(xid[i]); } } } + free(members_superseeded); + /* If we are making a boundary then also try adding any relations which form complete rings The linear variants will have already been processed above */ if (make_boundary) { @@ -1219,9 +780,9 @@ if ((area > 0.0) && enable_way_area) { char tmp[32]; snprintf(tmp, sizeof(tmp), "%g", area); - addItem(&tags, "way_area", tmp, 0); + addItem(rel_tags, "way_area", tmp, 0); } - write_wkts(-id, &tags, wkt, t_poly); + write_wkts(-id, rel_tags, wkt, t_poly); } } free(wkt); @@ -1229,8 +790,6 @@ clear_wkts(); } - resetList(&tags); - resetList(&poly_tags); return 0; } @@ -1299,7 +858,7 @@ } else { - sprintf(sql, "SELECT srid FROM geometry_columns WHERE f_table_name='%s';", tables[i].name); + sprintf(sql, "SELECT srid FROM geometry_columns WHERE f_table_name='%s' AND f_geometry_column='way';", tables[i].name); res = PQexec(sql_conn, sql); if (!((PQntuples(res) == 1) && (PQnfields(res) == 1))) { @@ -1431,6 +990,10 @@ } free(sql); + if (tagtransform_init(options)) { + fprintf(stderr, "Error: Failed to initialise tag processing.\n"); + exit_nicely(); + } expire_tiles_init(options); options->mid->start(options); @@ -1517,9 +1080,18 @@ fprintf(stderr, "Copying %s to cluster by geometry finished\n", table->name); fprintf(stderr, "Creating geometry index on %s\n", table->name); if (Options->tblsmain_index) { - pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_index ON %s USING GIST (way) TABLESPACE %s;\n", table->name, table->name, Options->tblsmain_index); + /* Use fillfactor 100 for un-updatable imports */ + if (Options->slim && !Options->droptemp) { + pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_index ON %s USING GIST (way) TABLESPACE %s;\n", table->name, table->name, Options->tblsmain_index); + } else { + pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_index ON %s USING GIST (way) WITH (FILLFACTOR=100) TABLESPACE %s;\n", table->name, table->name, Options->tblsmain_index); + } } else { - pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_index ON %s USING GIST (way);\n", table->name, table->name); + if (Options->slim && !Options->droptemp) { + pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_index ON %s USING GIST (way);\n", table->name, table->name); + } else { + pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_index ON %s USING GIST (way) WITH (FILLFACTOR=100);\n", table->name, table->name); + } } /* slim mode needs this to be able to apply diffs */ @@ -1536,17 +1108,36 @@ if (Options->enable_hstore_index) { fprintf(stderr, "Creating hstore indexes on %s\n", table->name); if (Options->tblsmain_index) { - if (HSTORE_NONE != (Options->enable_hstore)) - pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_tags_index ON %s USING GIN (tags) TABLESPACE %s;\n", table->name, table->name, Options->tblsmain_index); + if (HSTORE_NONE != (Options->enable_hstore)) { + if (Options->slim && !Options->droptemp) { + pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_tags_index ON %s USING GIN (tags) TABLESPACE %s;\n", table->name, table->name, Options->tblsmain_index); + } else { + pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_tags_index ON %s USING GIN (tags) TABLESPACE %s;\n", table->name, table->name, Options->tblsmain_index); + } + } for(i_column = 0; i_column < Options->n_hstore_columns; i_column++) { - pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_hstore_%i_index ON %s USING GIN (\"%s\") TABLESPACE %s;\n", + if (Options->slim && !Options->droptemp) { + pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_hstore_%i_index ON %s USING GIN (\"%s\") TABLESPACE %s;\n", + table->name, i_column,table->name, Options->hstore_columns[i_column], Options->tblsmain_index); + } else { + pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_hstore_%i_index ON %s USING GIN (\"%s\") TABLESPACE %s;\n", table->name, i_column,table->name, Options->hstore_columns[i_column], Options->tblsmain_index); + } } } else { - if (HSTORE_NONE != (Options->enable_hstore)) - pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_tags_index ON %s USING GIN (tags);\n", table->name, table->name); + if (HSTORE_NONE != (Options->enable_hstore)) { + if (Options->slim && !Options->droptemp) { + pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_tags_index ON %s USING GIN (tags);\n", table->name, table->name); + } else { + pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_tags_index ON %s USING GIN (tags) ;\n", table->name, table->name); + } + } for(i_column = 0; i_column < Options->n_hstore_columns; i_column++) { - pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_hstore_%i_index ON %s USING GIN (\"%s\");\n", table->name, i_column,table->name, Options->hstore_columns[i_column]); + if (Options->slim && !Options->droptemp) { + pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_hstore_%i_index ON %s USING GIN (\"%s\");\n", table->name, i_column,table->name, Options->hstore_columns[i_column]); + } else { + pgsql_exec(sql_conn, PGRES_COMMAND_OK, "CREATE INDEX %s_hstore_%i_index ON %s USING GIN (\"%s\");\n", table->name, i_column,table->name, Options->hstore_columns[i_column]); + } } } } @@ -1599,6 +1190,8 @@ */ Options->mid->iterate_relations( pgsql_process_relation ); + tagtransform_shutdown(); + #ifdef HAVE_PTHREAD if (Options->parallel_indexing) { for (i=0; imid->nodes_set(id, lat, lon, tags); - if( !filter ) - pgsql_out_node(id, tags, lat, lon); + pgsql_out_node(id, tags, lat, lon); + return 0; } static int pgsql_add_way(osmid_t id, osmid_t *nds, int nd_count, struct keyval *tags) { int polygon = 0; + int roads = 0; + /* Check whether the way is: (1) Exportable, (2) Maybe a polygon */ - int filter = pgsql_filter_tags(OSMTYPE_WAY, tags, &polygon); + int filter = tagtransform_filter_way_tags(tags, &polygon, &roads); /* If this isn't a polygon then it can not be part of a multipolygon Hence only polygons are "pending" */ @@ -1686,6 +1278,15 @@ if(exists) pgsql_delete_relation_from_output(id); + if (tagtransform_filter_rel_tags(tags)) { + free(xid2); + free(xrole); + free(xcount); + free(xtags); + free(xnodes); + return 1; + } + count = 0; for( i=0; i #include -#include #include "osmtypes.h" #include "output.h" @@ -55,6 +54,7 @@ return ntohl(*((size_t *)buf)); } +#if 0 static void *realloc_or_free(void *p, size_t len) { void *new = realloc(p, len); @@ -65,6 +65,7 @@ return new; } +#endif static BlockHeader *read_header(FILE *input, void *buf) { @@ -154,30 +155,8 @@ return bmsg->raw_size; } else if (bmsg->has_bzip2_data) { - int ret; - bz_stream strm; - strm.bzalloc = NULL; - strm.bzfree = NULL; - strm.opaque = NULL; - strm.avail_in = bmsg->bzip2_data.len; - strm.next_in = (char *) bmsg->bzip2_data.data; - strm.avail_out = bmsg->raw_size; - strm.next_out = buf; - - ret = BZ2_bzDecompressInit(&strm, 0, 0); - if (ret != BZ_OK) { - fprintf(stderr, "Bzip2 init failed\n"); - return 0; - } - - (void)BZ2_bzDecompressEnd(&strm); - - if (ret != BZ_STREAM_END) { - fprintf(stderr, "Bzip2 compression failed\n"); - return 0; - } - - return bmsg->raw_size; + fprintf(stderr, "Can't uncompress bz2 data\n"); + return 0; } else if (bmsg->has_lzma_data) { fprintf(stderr, "Can't uncompress LZMA data\n"); return 0; @@ -241,6 +220,7 @@ memcpy(username, user.data, user.len); addItem(head, "osm_user", username, 0); + free(username); } /* TODO timestamp */ @@ -256,7 +236,7 @@ return 0; } - header_block__free_unpacked (hmsg, &protobuf_c_system_allocator); + header_block__free_unpacked (hmsg, NULL); return 1; } @@ -338,15 +318,14 @@ addIntItem(&(osmdata->tags), "osm_version", denseinfo->version[node_id], 0); addIntItem(&(osmdata->tags), "osm_changeset", deltachangeset, 0); -#if 0 - /* TODO */ if (deltauid != -1) { /* osmosis devs failed to read the specs */ - printuser(string_table->s[deltauser_sid]); - printnumericattribute("osm_uid", deltauid); + char * valstr; + addIntItem(&(osmdata->tags), "osm_uid", deltauid, 0); + valstr = calloc(string_table->s[deltauser_sid].len + 1, 1); + memcpy(valstr, string_table->s[deltauser_sid].data, string_table->s[deltauser_sid].len); + addItem(&(osmdata->tags), "osm_user", valstr, 0); + free(valstr); } - - printtimestamp("osm_timestamp", deltatimestamp); -#endif } if (l < dense->n_keys_vals) { @@ -541,7 +520,7 @@ if (!processOsmDataRelations(osmdata, group, string_table)) return 0; } - primitive_block__free_unpacked (pmsg, &protobuf_c_system_allocator); + primitive_block__free_unpacked (pmsg, NULL); return 1; } @@ -606,8 +585,8 @@ } } - blob__free_unpacked (blob_msg, &protobuf_c_system_allocator); - block_header__free_unpacked (header_msg, &protobuf_c_system_allocator); + blob__free_unpacked (blob_msg, NULL); + block_header__free_unpacked (header_msg, NULL); } while (!feof(input)); if (!feof(input)) { diff -Nru osm2pgsql-0.82.0/parse-pbf.h osm2pgsql-0.86.0/parse-pbf.h --- osm2pgsql-0.82.0/parse-pbf.h 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/parse-pbf.h 2014-10-25 06:42:27.000000000 +0000 @@ -23,7 +23,7 @@ */ #ifndef PARSE_PBF_H -#define PARSE_PBF_h +#define PARSE_PBF_H int streamFilePbf(char *filename, int sanitize, struct osmdata_t *osmdata); diff -Nru osm2pgsql-0.82.0/parse-xml2.c osm2pgsql-0.86.0/parse-xml2.c --- osm2pgsql-0.82.0/parse-xml2.c 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/parse-xml2.c 2014-10-25 06:42:27.000000000 +0000 @@ -100,13 +100,17 @@ xid = xmlTextReaderGetAttribute(reader, BAD_CAST "id"); xlon = xmlTextReaderGetAttribute(reader, BAD_CAST "lon"); xlat = xmlTextReaderGetAttribute(reader, BAD_CAST "lat"); - assert(xid); assert(xlon); assert(xlat); + assert(xid); osmdata->osm_id = strtoosmid((char *)xid, NULL, 10); - osmdata->node_lon = strtod((char *)xlon, NULL); - osmdata->node_lat = strtod((char *)xlat, NULL); osmdata->action = ParseAction( reader , osmdata); + if (osmdata->action != ACTION_DELETE) { + assert(xlon); assert(xlat); + osmdata->node_lon = strtod((char *)xlon, NULL); + osmdata->node_lat = strtod((char *)xlat, NULL); + } + if (osmdata->osm_id > osmdata->max_node) osmdata->max_node = osmdata->osm_id; @@ -259,6 +263,12 @@ addItem(&(osmdata->tags), "osm_timestamp", (char *)xtmp, 0); xmlFree(xtmp); } + + xtmp = xmlTextReaderGetAttribute(reader, BAD_CAST "changeset"); + if (xtmp) { + addItem(&(osmdata->tags), "osm_changeset", (char *)xtmp, 0); + xmlFree(xtmp); + } } } diff -Nru osm2pgsql-0.82.0/parse-xml2.h osm2pgsql-0.86.0/parse-xml2.h --- osm2pgsql-0.82.0/parse-xml2.h 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/parse-xml2.h 2014-10-25 06:42:27.000000000 +0000 @@ -23,7 +23,7 @@ */ #ifndef PARSE_XML2_H -#define PARSE_XML2_h +#define PARSE_XML2_H int streamFileXML2(char *filename, int sanitize, struct osmdata_t *osmdata); diff -Nru osm2pgsql-0.82.0/pgsql.h osm2pgsql-0.86.0/pgsql.h --- osm2pgsql-0.82.0/pgsql.h 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/pgsql.h 2014-10-25 06:42:27.000000000 +0000 @@ -3,7 +3,12 @@ /* Current middle and output-pgsql do a lot of things similarly, this should * be used to abstract to commonalities */ +#ifndef PGSQL_H +#define PGSQL_H + PGresult *pgsql_execPrepared( PGconn *sql_conn, const char *stmtName, int nParams, const char *const * paramValues, ExecStatusType expect); int pgsql_CopyData(const char *context, PGconn *sql_conn, const char *sql); int pgsql_exec(PGconn *sql_conn, ExecStatusType expect, const char *fmt, ...) __attribute__ ((format (printf, 3, 4))); void escape(char *out, int len, const char *in); + +#endif diff -Nru osm2pgsql-0.82.0/README osm2pgsql-0.86.0/README --- osm2pgsql-0.82.0/README 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/README 2014-10-25 06:42:27.000000000 +0000 @@ -1,301 +1,136 @@ -osm2pgsql -========= -Converts OSM planet.osm data to a PostgreSQL / PostGIS database suitable -for specific applications like rendering into map tiles by Mapnik -or geocoding with Nominatim. - -osm2pgsql currently supports two different database schemas -1) A database schema that is optimized for ease of rendering -by Mapnik. -2) A database schema that is optimized for geocoding with Nominatim, -emphasizing the spatially hierarchical organizations of objects. - -Both schemas were specifically optimized for the purpose they were -intended for and they may therefore be less suitable for other -general purpose processing. Nevertheless, the rendering schema -might be useful for other purposes as well, and has been used -for a variety of additionally purposes. - - -For a broader view of the whole map rendering tool chain see -http://wiki.openstreetmap.org/index.php/Mapnik -http://wiki.openstreetmap.org/index.php/Osm2pgsql -http://wiki.openstreetmap.org/index.php/Slippy_Map +# osm2pgsql # -You may find that the wiki pages are more up to date than this -readme and may include answers to issues not mentioned here. - -Any questions should be directed at the osm dev list -http://wiki.openstreetmap.org/index.php/Mailing_lists - -Features -======== -- Converts OSM files to a PostgreSQL DB -- Conversion of tags to columns is configurable in the style file -- Able to read .gz, .bz2, .pbf and .o5m files directly -- Can apply diffs to keep the database up to data -- Support the choice of output projection -- Configurable table names -- Gazetteer back-end for Nominatim - http://wiki.openstreetmap.org/wiki/Nominatim -- Support for hstore field type to store the complete set of tags in one database +osm2pgsql is a tool for loading OpenStreetMap data into a PostgreSQL / PostGIS +database suitable for applications like rendering into a map, geocoding with +Nominatim, or general analysis. + +## Features ## + +* Converts OSM files to a PostgreSQL DB +* Conversion of tags to columns is configurable in the style file +* Able to read .gz, .bz2, .pbf and .o5m files directly +* Can apply diffs to keep the database up to date +* Support the choice of output projection +* Configurable table names +* Gazetteer back-end for [Nominatim](http://wiki.openstreetmap.org/wiki/Nominatim) +* Support for hstore field type to store the complete set of tags in one database field if desired -Source code -=========== +## Installing ## + The latest source code is available in the OSM git repository on github and can be downloaded as follows: +```sh $ git clone git://github.com/openstreetmap/osm2pgsql.git +``` -Build requirements -================== -The code is written in C and C++ and relies on the libraries -below: -- libxml2 http://xmlsoft.org/ -- geos http://geos.refractions.net/ -- proj http://www.remotesensing.org/proj/ -- bzip2 http://www.bzip.org/ -- zlib http://www.zlib.net/ -- PostgreSQL http://www.postgresql.org/ -- PostGIS http://postgis.refractions.net/ - -To make use of the database generated by this tool you will -probably also want to install: -- Mapnik from http://mapnik.org/ +## Building ## +Osm2pgsql uses the [GNU Build System](http://www.gnu.org/software/automake/manual/html_node/GNU-Build-System.html) +to configure and build itself and requires +* [libxml2](http://xmlsoft.org/) +* [geos](http://geos.osgeo.org/) +* [proj](http://proj.osgeo.org/) +* [bzip2](http://www.bzip.org/) +* [zlib](http://www.zlib.net/) +* [Protocol Buffers](https://developers.google.com/protocol-buffers/) +* [PostgreSQL](http://www.postgresql.org/) client libraries +* [Lua](http://www.lua.org/) (Optional, used for [Lua tag transforms](docs/lua.md)) + +It also requires access to a database server running +[PostgreSQL](http://www.postgresql.org/) and [PostGIS](http://www.postgis.net/). -Building -======== Make sure you have installed the development packages for the libraries mentioned in the requirements section and a C and C++ compiler. -e.g. on Fedora: -# yum install geos-devel proj-devel postgresql-devel libxml2-devel bzip2-devel gcc-c++ +To install on a Debian or Ubuntu system, first install the prerequisites: + +```sh +sudo apt-get install autoconf automake libtool make g++ libxml2-dev libgeos-dev + libgeos++-dev libpq-dev libbz2-dev libproj-dev protobuf-c-compiler + libprotobuf-c0-dev lua5.2 liblua5.2-dev +``` + +To install on a Fedora system, use + +```sh +sudo yum install gcc-c++ libxml2-devel geos-develpostgresql-devel bzip2-devel + proj-devel protobuf-compiler +``` + +Then you should be able to bootstrap the build system: + + ./autogen.sh + +And then run the standard GNU build install: + + ./configure && make && make install + +Please see `./configure --help` for more options on how to control the build +process. -on Debian: -# aptitude install libxml2-dev libgeos-dev libgeos++-dev libpq-dev libbz2-dev libproj-dev protobuf-c-compiler libprotobuf-c0-dev autoconf automake libtool make g++ +## Usage ## -On most Unix-like systems the program can be compiled by -running './autogen.sh && ./configure && make'. +Osm2pgsql has one program, the executable itself, which has **43** command line +options. + +Before loading into a database, the database must be created and the PostGIS +and optionally hstore extensions must be loaded. A full guide to PostgreSQL +setup is beyond the scope of this readme, but with reasonably recent versions +of PostgreSQL and PostGIS this can be done with + +```sh +createdb gis +psql -d gis -c 'CREATE EXTENSION postgis; CREATE EXTENSION hstore;' +``` + +A basic invocation to load the data into the database ``gis`` for rendering would be + +```sh +osm2pgsql --create --database gis data.osm.pbf +``` + +This will load the data from ``data.osm.pbf`` into the ``planet_osm_point``, +``planet_osm_line``, ``planet_osm_roads``, and ``planet_osm_polygon`` tables. + +When importing a large amount of data such as the complete planet, a typical +command line would be + +```sh + osm2pgsql -c -d gis --slim -C \ + --flat-nodes planet-latest.osm.pbf +``` +where +* ```` is 24000 on machines with 32GiB or more RAM + or about 75% of memory in MiB on machines with less +* ```` is a location where a 24GiB file can be saved. + +The databases from either of these commands can be used immediately by +[Mapnik](http://mapnik.org/) for rendering maps with standard tools like +[renderd/mod_tile](https://github.com/openstreetmap/mod_tile), +[TileMill](https://www.mapbox.com/tilemill/), [Nik4](https://github.com/Zverik/Nik4), +among others. It can also be used for [spatial analysis](docs/analysis.md) or +[shapefile exports](docs/export.md). + +[Additional documentation is available on writing command lines](docs/usage.md). + +## Alternate backends ## + +In addition to the standard [pgsql](docs/pgsql.md) backend designed for +rendering there is also the [gazetteer](docs/gazetteer.md) database for +geocoding, principally with [Nominatim](http://www.nominatim.org/), and the +null backend for testing. + +Any questions should be directed at the osm dev list +http://wiki.openstreetmap.org/index.php/Mailing_lists -Operation -========= -You must create a PostgreSQL user and a database with the -PostGIS functions enabled. This requires access as the -database administrator, normally the 'postgres' user. - -The default name for this database is 'gis' but this may -be changed by using the osm2pgsql --database option. - -If the matches the unix user id running the import -and rendering then this allows the PostgreSQL 'ident sameuser' -authentication to be used which avoids the need to enter a -password when accessing the database. This is setup by default -on many Unix installs but does not work on Windows (due to the -lack of unix sockets). - -Some example commands are given below but you may find -this wiki page has more up to data information: -http://wiki.openstreetmap.org/wiki/Mapnik/PostGIS - -$ sudo -u postgres createuser -$ sudo -u postgres createdb -E UTF8 -O -$ sudo -u postgres createlang plpgsql - -Adding the PostGIS extensions. Note the location of the -files may vary. - -$ sudo -u postgres psql < /usr/share/postgresql/8.4/contrib/postgis-1.5/postgis.sql -$ sudo -u postgres psql < /usr/share/postgresql/8.4/contrib/postgis-1.5/spatial_ref_sys.sql - -Next we need to give the access to update the postgis -meta-data tables - -$ sudo -u postgres psql -d -c "ALTER TABLE geometry_columns OWNER TO " -$ sudo -u postgres psql -d -c "ALTER TABLE spatial_ref_sys OWNER TO " - -The 900913 is not normally included with PostGIS. To add it you -should run: - -$ sudo psql -u postgres psql -d -f 900913.sql - -If you want to use hstore support then you will also need to enable the PostgreSQL -hstore-new extension. - -$ sudo -u postgres psql < /usr/share/postgresql/8.4/contrib/hstore.sql - -On PostgreSQL 9.1 and above, you can install it by running "CREATE EXTENSION hstore;" -in your database. - -Now you can run osm2pgsql to import the OSM data. -This will perform the following actions: - -1) Osm2pgsql connects to database and creates the following 4 tables -when used with the default output back-end (pgsql): - - planet_osm_point - - planet_osm_line - - planet_osm_roads - - planet_osm_polygon -The prefix "planet_osm" can be changed with the --prefix option, -the above is the default. - -If you are using --slim mode, it will create the following additional 3 tables: - - planet_osm_nodes - - planet_osm_ways - - planet_osm_rels - - -2) Runs an XML parser on the input file (typically planet.osm) - and processes the nodes, ways and relations. - -3) If a node has a tag declared in the style file then it is - added to planet_osm_point. If it has no such tag then - the position is noted, but not added to the database. - -4) Ways are read in converted into WKT geometries by using the - positions of the nodes read in earlier. If the tags on the way - are listed in the style file then the way will be written into - the line or roads tables. - -5) If the way has one or more tags marked as 'polygon' and - forms a closed ring then it will be added to the planet_osm_polygon - table. - -6) The relations are parsed. Osm2pgsql has special handling for a - limited number of types: multipolygon, route, boundary - The code will build the appropriate geometries by referencing the - members and outputting these into the database. - -7) Indexes are added to speed up the queries by Mapnik. - -Tuning PostgreSQL -================= - -For an efficient operation of PostgreSQL you will need to tune the config -parameters of PostgreSQL from its default values. These are set in the -config file at /etc/postgresql/8.4/main/postgresql.conf - -The values you need to set will depend on the hardware you have available, -but you will likely need to increase the values for the following parameters: - -- shared_buffers -- checkpoint_segments -- work_mem -- maintenance_work_mem -- effective_cache_size - - -A quick note on projections -=========================== - -Depending on the command-line switches you can select which projection you -want the database in. You have three choices: - -4326: The standard lat/long coordinates -900913: The spherical Mercator projection, used by TileCache, Google Earth etc. -3395: The legacy (broken) WGS84 Mercator projection - -Depending on what you're using one or the other is appropriate. The default -Mapnik style (osm.xml) assumes that the data is stored in 900913 and this -is the default for osm2pgsql. - -Combining the -v and -h switches will tell about the exact definitions of -the projections. - -In case you want to use some completely different projection there is the -E -option. It will initialize the projection as +init=epsg:. This allows -you to use any projection recognized by proj4, which is useful if you want -to make a map in a different projection. These projections are usually -defined in /usr/share/proj/epsg. - -Database Access Examples -======================== -If you wish to access the data from the database then the -queries below should give you some hints. Note that these -examples all use the 'latlong' projection which is not the -default. - -$ psql gis -gis=> \d - List of relations - Schema | Name | Type | Owner ---------+--------------------+-------+---------- -... - public | planet_osm_line | table | jburgess - public | planet_osm_point | table | jburgess - public | planet_osm_polygon | table | jburgess - public | planet_osm_roads | table | jburgess -... - -gis=> \d planet_osm_line - Table "public.planet_osm_line" - Column | Type | Modifiers ------------+----------+----------- - osm_id | integer | - name | text | - place | text | - landuse | text | -... [ lots of stuff deleted ] ... - way | geometry | not null - z_order | integer | default 0 - - -Each of the tables contains a subset of the planet.osm file representing -a particular geometry type -- Point contains nodes which have interesting tags - e.g. place=city, name=London - -- Line contains ways with interesting tags - e.g. highway=motorway, ref=M25 - -- Polygon contains ways which form an enclosed area - e.g. landuse=reservoir - -The DB columns are used as follows: -- osm_id = the planet.osm ID of the node(point) or way(line,polygon) -- name, place, landuse, ... = the value of the given key, if present on -the node/way. If the tag is not present, the value is NULL. Only a -subset of all possible tags are stored in the DB. Only ones rendered in -the osm.xml are actually interesting to mapnik. -- way = PostGIS geometry describing the physical layout of the object. - - -Querying specific data requires knowlege of SQL and the OSM key/value -system, e.g. - -gis=> select osm_id,astext(way),name from planet_osm_point where amenity='cinema' limit 5; - osm_id | astext | name -----------+-------------------------------------------+-------------------- - 26236284 | POINT(-79.7160836579093 43.6802306464618) | - 26206699 | POINT(51.4051989797638 35.7066045032235) | Cinema Felestin - 26206700 | POINT(51.3994885141459 35.7058460359352) | Cinema Asr-e Jadid - 20979630 | POINT(151.225781789807 -33.8943079539886) | Paris Cinema - 20979684 | POINT(151.226855394904 -33.8946830511095) | Hoyts -(5 rows) - -Mapnik renders the data in each table by applying the rules in the -osm.xml file. - - -> How could I get e.g. all highways in a given bounding box? - -The 'way' column contains the geo info and is the one which you need to -use in your WHERE clause. e.g. - -gis=> select osm_id,highway,name from planet_osm_line where highway is not null and way && GeomFromText('POLYGON((0 52, 0.1 52, 0.1 52.1, 0 52.1, 0 52))',4326); - -osm_id | highway | name ----------+--------------+------------------ - 4273848 | unclassified | - 3977133 | trunk | to Royston (tbc) - 4004841 | trunk | - 4019198 | trunk | - 4019199 | trunk | - 4238966 | unclassified | +## Contributing ## +We welcome contributions to osm2pgsql. If you would like to report an issue, +please use the [issue tracker on GitHub](https://github.com/openstreetmap/osm2pgsql/issues). -See the Postgis docs for details, e.g. -http://postgis.refractions.net/docs/ch04.html +General queries can be sent to the tile-serving@ or dev@ +[mailing lists](http://wiki.openstreetmap.org/wiki/Mailing_lists). diff -Nru osm2pgsql-0.82.0/README.md osm2pgsql-0.86.0/README.md --- osm2pgsql-0.82.0/README.md 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/README.md 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,136 @@ +# osm2pgsql # + +osm2pgsql is a tool for loading OpenStreetMap data into a PostgreSQL / PostGIS +database suitable for applications like rendering into a map, geocoding with +Nominatim, or general analysis. + +## Features ## + +* Converts OSM files to a PostgreSQL DB +* Conversion of tags to columns is configurable in the style file +* Able to read .gz, .bz2, .pbf and .o5m files directly +* Can apply diffs to keep the database up to date +* Support the choice of output projection +* Configurable table names +* Gazetteer back-end for [Nominatim](http://wiki.openstreetmap.org/wiki/Nominatim) +* Support for hstore field type to store the complete set of tags in one database + field if desired + +## Installing ## + +The latest source code is available in the OSM git repository on github +and can be downloaded as follows: + +```sh +$ git clone git://github.com/openstreetmap/osm2pgsql.git +``` + +## Building ## + +Osm2pgsql uses the [GNU Build System](http://www.gnu.org/software/automake/manual/html_node/GNU-Build-System.html) +to configure and build itself and requires + +* [libxml2](http://xmlsoft.org/) +* [geos](http://geos.osgeo.org/) +* [proj](http://proj.osgeo.org/) +* [bzip2](http://www.bzip.org/) +* [zlib](http://www.zlib.net/) +* [Protocol Buffers](https://developers.google.com/protocol-buffers/) +* [PostgreSQL](http://www.postgresql.org/) client libraries +* [Lua](http://www.lua.org/) (Optional, used for [Lua tag transforms](docs/lua.md)) + +It also requires access to a database server running +[PostgreSQL](http://www.postgresql.org/) and [PostGIS](http://www.postgis.net/). + +Make sure you have installed the development packages for the +libraries mentioned in the requirements section and a C and C++ +compiler. + +To install on a Debian or Ubuntu system, first install the prerequisites: + +```sh +sudo apt-get install autoconf automake libtool make g++ libxml2-dev libgeos-dev + libgeos++-dev libpq-dev libbz2-dev libproj-dev protobuf-c-compiler + libprotobuf-c0-dev lua5.2 liblua5.2-dev +``` + +To install on a Fedora system, use + +```sh +sudo yum install gcc-c++ libxml2-devel geos-develpostgresql-devel bzip2-devel + proj-devel protobuf-compiler +``` + +Then you should be able to bootstrap the build system: + + ./autogen.sh + +And then run the standard GNU build install: + + ./configure && make && make install + +Please see `./configure --help` for more options on how to control the build +process. + +## Usage ## + +Osm2pgsql has one program, the executable itself, which has **43** command line +options. + +Before loading into a database, the database must be created and the PostGIS +and optionally hstore extensions must be loaded. A full guide to PostgreSQL +setup is beyond the scope of this readme, but with reasonably recent versions +of PostgreSQL and PostGIS this can be done with + +```sh +createdb gis +psql -d gis -c 'CREATE EXTENSION postgis; CREATE EXTENSION hstore;' +``` + +A basic invocation to load the data into the database ``gis`` for rendering would be + +```sh +osm2pgsql --create --database gis data.osm.pbf +``` + +This will load the data from ``data.osm.pbf`` into the ``planet_osm_point``, +``planet_osm_line``, ``planet_osm_roads``, and ``planet_osm_polygon`` tables. + +When importing a large amount of data such as the complete planet, a typical +command line would be + +```sh + osm2pgsql -c -d gis --slim -C \ + --flat-nodes planet-latest.osm.pbf +``` +where +* ```` is 24000 on machines with 32GiB or more RAM + or about 75% of memory in MiB on machines with less +* ```` is a location where a 24GiB file can be saved. + +The databases from either of these commands can be used immediately by +[Mapnik](http://mapnik.org/) for rendering maps with standard tools like +[renderd/mod_tile](https://github.com/openstreetmap/mod_tile), +[TileMill](https://www.mapbox.com/tilemill/), [Nik4](https://github.com/Zverik/Nik4), +among others. It can also be used for [spatial analysis](docs/analysis.md) or +[shapefile exports](docs/export.md). + +[Additional documentation is available on writing command lines](docs/usage.md). + +## Alternate backends ## + +In addition to the standard [pgsql](docs/pgsql.md) backend designed for +rendering there is also the [gazetteer](docs/gazetteer.md) database for +geocoding, principally with [Nominatim](http://www.nominatim.org/), and the +null backend for testing. + +Any questions should be directed at the osm dev list +http://wiki.openstreetmap.org/index.php/Mailing_lists + +## Contributing ## + +We welcome contributions to osm2pgsql. If you would like to report an issue, +please use the [issue tracker on GitHub](https://github.com/openstreetmap/osm2pgsql/issues). + +General queries can be sent to the tile-serving@ or dev@ +[mailing lists](http://wiki.openstreetmap.org/wiki/Mailing_lists). diff -Nru osm2pgsql-0.82.0/style.lua osm2pgsql-0.86.0/style.lua --- osm2pgsql-0.82.0/style.lua 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/style.lua 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,178 @@ +polygon_keys = { 'building', 'landuse', 'amenity', 'harbour', 'historic', 'leisure', + 'man_made', 'military', 'natural', 'office', 'place', 'power', + 'public_transport', 'shop', 'sport', 'tourism', 'waterway', + 'wetland', 'water', 'aeroway' } + +generic_keys = {'access','addr:housename','addr:housenumber','addr:interpolation','admin_level','aerialway','aeroway','amenity','area','barrier', + 'bicycle','brand','bridge','boundary','building','capital','construction','covered','culvert','cutting','denomination','disused','ele', + 'embarkment','foot','generation:source','harbour','highway','historic','hours','intermittent','junction','landuse','layer','leisure','lock', + 'man_made','military','motor_car','name','natural','office','oneway','operator','place','poi','population','power','power_source','public_transport', + 'railway','ref','religion','route','service','shop','sport','surface','toll','tourism','tower:type', 'tracktype','tunnel','water','waterway', + 'wetland','width','wood','type'} + +function add_z_order(keyvalues) + z_order = 0 + if (keyvalues["layer"] ~= nil and tonumber(keyvalues["layer"])) then + z_order = 10*keyvalues["layer"] + end + + + zordering_tags = {{ 'railway', nil, 5, 1}, { 'boundary', 'administrative', 0, 1}, + { 'bridge', 'yes', 10, 0 }, { 'bridge', 'true', 10, 0 }, { 'bridge', 1, 10, 0 }, + { 'tunnel', 'yes', -10, 0}, { 'tunnel', 'true', -10, 0}, { 'tunnel', 1, -10, 0}, + { 'highway', 'minor', 3, 0}, { 'highway', 'road', 3, 0 }, { 'highway', 'unclassified', 3, 0 }, + { 'highway', 'residential', 3, 0 }, { 'highway', 'tertiary_link', 4, 0}, { 'highway', 'tertiary', 4, 0}, + { 'highway', 'secondary_link', 6, 1}, { 'highway', 'secondary', 6, 1}, + { 'highway', 'primary_link', 7, 1}, { 'highway', 'primary', 7, 1}, + { 'highway', 'trunk_link', 8, 1}, { 'highway', 'trunk', 8, 1}, + { 'highway', 'motorway_link', 9, 1}, { 'highway', 'motorway', 9, 1}, +} + + for i,k in ipairs(zordering_tags) do + if ((k[2] and keyvalues[k[1]] == k[2]) or (k[2] == nil and keyvalues[k[1]] ~= nil)) then + if (k[4] == 1) then + roads = 1 + end + z_order = z_order + k[3] + end + end + + keyvalues["z_order"] = z_order + + return keyvalues, roads + +end + +function filter_tags_generic(keyvalues, nokeys) + filter = 0 + tagcount = 0 + + if nokeys == 0 then + filter = 1 + return filter, keyvalues + end + + delete_tags = { 'FIXME', 'note', 'source' } + + for i,k in ipairs(delete_tags) do + keyvalues[k] = nil + end + + for k,v in pairs(keyvalues) do + for i, k2 in ipairs(generic_keys) do if k2 == k then tagcount = tagcount + 1; end end + end + if tagcount == 0 then + filter = 1 + end + + return filter, keyvalues +end + +function filter_tags_node (keyvalues, nokeys) + return filter_tags_generic(keyvalues, nokeys) +end + +function filter_basic_tags_rel (keyvalues, nokeys) + + filter, keyvalues = filter_tags_generic(keyvalues, nokeys) + if filter == 1 then + return filter, keyvalues + end + + if ((keyvalues["type"] ~= "route") and (keyvalues["type"] ~= "multipolygon") and (keyvalues["type"] ~= "boundary")) then + filter = 1 + return filter, keyvalues + end + + return filter, keyvalues +end + +function filter_tags_way (keyvalues, nokeys) + filter = 0 + poly = 0 + tagcount = 0 + roads = 0 + + filter, keyvalues = filter_tags_generic(keyvalues, nokeys) + if filter == 1 then + return filter, keyvalues, poly, roads + end + + + for i,k in ipairs(polygon_keys) do + if keyvalues[k] then + poly=1 + break + end + end + + + if ((keyvalues["area"] == "yes") or (keyvalues["area"] == "1") or (keyvalues["area"] == "true")) then + poly = 1; + elseif ((keyvalues["area"] == "no") or (keyvalues["area"] == "0") or (keyvalues["area"] == "false")) then + poly = 0; + end + + keyvalues, roads = add_z_order(keyvalues) + + + return filter, keyvalues, poly, roads +end + +function filter_tags_relation_member (keyvalues, keyvaluemembers, roles, membercount) + + filter = 0 + boundary = 0 + polygon = 0 + roads = 0 + membersuperseeded = {} + for i = 1, membercount do + membersuperseeded[i] = 0 + end + + type = keyvalues["type"] + keyvalues["type"] = nil + + + if (type == "boundary") then + boundary = 1 + end + if ((type == "multipolygon") and keyvalues["boundary"]) then + boundary = 1 + elseif (type == "multipolygon") then + polygon = 1 + polytagcount = 0; + for i,k in ipairs(polygon_keys) do + if keyvalues[k] then + polytagcount = polytagcount + 1 + end + end + if (polytagcount == 0) then + for i = 1,membercount do + if (roles[i] == "outer") then + for k,v in pairs(keyvaluemembers[i]) do + keyvalues[k] = v + end + end + end + end + for i = 1,membercount do + superseeded = 1 + for k,v in pairs(keyvaluemembers[i]) do + if ((keyvalues[k] == nil) or (keyvalues[k] ~= v)) then + for j,k2 in ipairs(generic_keys) do + if (k == k2) then + superseeded = 0; + break + end + end + end + end + membersuperseeded[i] = superseeded + end + end + + keyvalues, roads = add_z_order(keyvalues) + + return filter, keyvalues, membersuperseeded, boundary, polygon, roads +end diff -Nru osm2pgsql-0.82.0/tagtransform.c osm2pgsql-0.86.0/tagtransform.c --- osm2pgsql-0.82.0/tagtransform.c 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/tagtransform.c 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,756 @@ + +#include +#include +#include +#include "osmtypes.h" +#include "keyvals.h" +#include "tagtransform.h" +#include "output-pgsql.h" +#include "config.h" +#include "wildcmp.h" + +#ifdef HAVE_LUA +static lua_State *L; +#endif + +static struct { + int offset; + const char *highway; + int roads; +} layers[] = { + { 3, "minor", 0 }, + { 3, "road", 0 }, + { 3, "unclassified", 0 }, + { 3, "residential", 0 }, + { 4, "tertiary_link", 0 }, + { 4, "tertiary", 0 }, + { 6, "secondary_link",1 }, + { 6, "secondary", 1 }, + { 7, "primary_link", 1 }, + { 7, "primary", 1 }, + { 8, "trunk_link", 1 }, + { 8, "trunk", 1 }, + { 9, "motorway_link", 1 }, + { 9, "motorway", 1 } +}; + +static const unsigned int nLayers = (sizeof(layers)/sizeof(*layers)); + +const struct output_options *options; + +extern struct taginfo *exportList[4]; /* Indexed by enum table_id */ +extern int exportListCount[4]; + +int transform_method = 0; + +static int add_z_order(struct keyval *tags, int *roads) { + const char *layer = getItem(tags, "layer"); + const char *highway = getItem(tags, "highway"); + const char *bridge = getItem(tags, "bridge"); + const char *tunnel = getItem(tags, "tunnel"); + const char *railway = getItem(tags, "railway"); + const char *boundary = getItem(tags, "boundary"); + + int z_order = 0; + int l; + unsigned int i; + char z[13]; + + l = layer ? strtol(layer, NULL, 10) : 0; + z_order = 10 * l; + *roads = 0; + + if (highway) { + for (i = 0; i < nLayers; i++) { + if (!strcmp(layers[i].highway, highway)) { + z_order += layers[i].offset; + *roads = layers[i].roads; + break; + } + } + } + + if (railway && strlen(railway)) { + z_order += 5; + *roads = 1; + } + /* Administrative boundaries are rendered at low zooms so we prefer to use the roads table */ + if (boundary && !strcmp(boundary, "administrative")) + *roads = 1; + + if (bridge + && (!strcmp(bridge, "true") || !strcmp(bridge, "yes") + || !strcmp(bridge, "1"))) + z_order += 10; + + if (tunnel + && (!strcmp(tunnel, "true") || !strcmp(tunnel, "yes") + || !strcmp(tunnel, "1"))) + z_order -= 10; + + snprintf(z, sizeof(z), "%d", z_order); + addItem(tags, "z_order", z, 0); + + return 0; +} + +static unsigned int tagtransform_lua_filter_basic_tags(enum OsmType type, struct keyval *tags, int * polygon, int * roads) { +#ifdef HAVE_LUA + int idx = 0; + int filter; + int count = 0; + struct keyval *item; + const char * key, * value; + + *polygon = 0; *roads = 0; + + switch (type) { + case OSMTYPE_NODE: { + lua_getglobal(L, "filter_tags_node"); + break; + } + case OSMTYPE_WAY: { + lua_getglobal(L, "filter_tags_way"); + break; + } + case OSMTYPE_RELATION: { + lua_getglobal(L, "filter_basic_tags_rel"); + break; + } + } + + lua_newtable(L); /* key value table */ + + idx = 1; + while( (item = popItem(tags)) != NULL ) { + lua_pushstring(L, item->key); + lua_pushstring(L, item->value); + lua_rawset(L, -3); + freeItem(item); + count++; + } + + //printf("C count %i\n", count); + lua_pushinteger(L, count); + + if (lua_pcall(L,2,type == OSMTYPE_WAY ? 4 : 2,0)) { + fprintf(stderr, "Failed to execute lua function for basic tag processing: %s\n", lua_tostring(L, -1)); + /* lua function failed */ + return 1; + } + + if (type == OSMTYPE_WAY) { + *roads = lua_tointeger(L, -1); + lua_pop(L,1); + *polygon = lua_tointeger(L, -1); + lua_pop(L,1); + } + + lua_pushnil(L); + while (lua_next(L,-2) != 0) { + key = lua_tostring(L,-2); + value = lua_tostring(L,-1); + addItem(tags, key, value, 0); + lua_pop(L,1); + } + + filter = lua_tointeger(L, -2); + + lua_pop(L,2); + + return filter; +#else + return 1; +#endif +} + + +/* Go through the given tags and determine the union of flags. Also remove + * any tags from the list that we don't know about */ +static unsigned int tagtransform_c_filter_basic_tags(enum OsmType type, + struct keyval *tags, int *polygon, int * roads) { + int i, filter = 1; + int flags = 0; + int add_area_tag = 0; + enum OsmType export_type; + + const char *area; + struct keyval *item; + struct keyval temp; + initList(&temp); + + if (type == OSMTYPE_RELATION) {export_type = OSMTYPE_WAY;} else {export_type = type;} + + /* We used to only go far enough to determine if it's a polygon or not, but now we go through and filter stuff we don't need */ + while ((item = popItem(tags)) != NULL ) { + if (type == OSMTYPE_RELATION && !strcmp("type", item->key)) { + pushItem(&temp, item); + item = NULL; + filter = 0; + continue; + } + /* Allow named islands to appear as polygons */ + if (!strcmp("natural", item->key) + && !strcmp("coastline", item->value)) { + add_area_tag = 1; + } + + /* Discard natural=coastline tags (we render these from a shapefile instead) */ + if (!options->keep_coastlines && !strcmp("natural", item->key) + && !strcmp("coastline", item->value)) { + freeItem(item); + item = NULL; + continue; + } + + for (i = 0; i < exportListCount[export_type]; i++) { + if (wildMatch(exportList[export_type][i].name, item->key)) { + if (exportList[export_type][i].flags & FLAG_DELETE) { + freeItem(item); + item = NULL; + break; + } + + filter = 0; + flags |= exportList[export_type][i].flags; + + pushItem(&temp, item); + item = NULL; + break; + } + } + + /** if tag not found in list of exports: */ + if (i == exportListCount[export_type]) { + if (options->enable_hstore) { + /* with hstore, copy all tags... */ + pushItem(&temp, item); + /* ... but if hstore_match_only is set then don't take this + as a reason for keeping the object */ + if (!options->hstore_match_only && strcmp("osm_uid", item->key) + && strcmp("osm_user", item->key) + && strcmp("osm_timestamp", item->key) + && strcmp("osm_version", item->key) + && strcmp("osm_changeset", item->key)) + filter = 0; + } else if (options->n_hstore_columns) { + /* does this column match any of the hstore column prefixes? */ + int j; + for (j = 0; j < options->n_hstore_columns; j++) { + char *pos = strstr(item->key, options->hstore_columns[j]); + if (pos == item->key) { + pushItem(&temp, item); + /* ... but if hstore_match_only is set then don't take this + as a reason for keeping the object */ + if (!options->hstore_match_only + && strcmp("osm_uid", item->key) + && strcmp("osm_user", item->key) + && strcmp("osm_timestamp", item->key) + && strcmp("osm_version", item->key) + && strcmp("osm_changeset", item->key)) + filter = 0; + break; + } + } + /* if not, skip the tag */ + if (j == options->n_hstore_columns) { + freeItem(item); + } + } else { + freeItem(item); + } + item = NULL; + } + } + + /* Move from temp list back to original list */ + while ((item = popItem(&temp)) != NULL ) + pushItem(tags, item); + + *polygon = flags & FLAG_POLYGON; + + /* Special case allowing area= to override anything else */ + if ((area = getItem(tags, "area"))) { + if (!strcmp(area, "yes") || !strcmp(area, "true") || !strcmp(area, "1")) + *polygon = 1; + else if (!strcmp(area, "no") || !strcmp(area, "false") + || !strcmp(area, "0")) + *polygon = 0; + } else { + /* If we need to force this as a polygon, append an area tag */ + if (add_area_tag) { + addItem(tags, "area", "yes", 0); + *polygon = 1; + } + } + + if (!filter && (type == OSMTYPE_WAY)) { + add_z_order(tags,roads); + } + + return filter; +} + + +static unsigned int tagtransform_lua_filter_rel_member_tags(struct keyval *rel_tags, int member_count, + struct keyval *member_tags,const char **member_role, + int * member_superseeded, int * make_boundary, int * make_polygon, int * roads) { +#ifdef HAVE_LUA + + int i; + int idx = 0; + int filter; + int count = 0; + struct keyval *item; + const char * key, * value; + + lua_getglobal(L, "filter_tags_relation_member"); + + lua_newtable(L); /* relations key value table */ + + idx = 1; + while( (item = popItem(rel_tags)) != NULL ) { + lua_pushstring(L, item->key); + lua_pushstring(L, item->value); + lua_rawset(L, -3); + freeItem(item); + count++; + } + + lua_newtable(L); /* member tags table */ + + for (i = 1; i <= member_count; i++) { + lua_pushnumber(L, i); + lua_newtable(L); /* member key value table */ + while( (item = popItem(&(member_tags[i - 1]))) != NULL ) { + lua_pushstring(L, item->key); + lua_pushstring(L, item->value); + lua_rawset(L, -3); + freeItem(item); + count++; + } + lua_rawset(L, -3); + } + + lua_newtable(L); /* member roles table */ + + for (i = 0; i < member_count; i++) { + lua_pushnumber(L, i + 1); + lua_pushstring(L, member_role[i]); + lua_rawset(L, -3); + } + + lua_pushnumber(L, member_count); + + if (lua_pcall(L,4,6,0)) { + fprintf(stderr, "Failed to execute lua function for relation tag processing: %s\n", lua_tostring(L, -1)); + /* lua function failed */ + return 1; + } + + *roads = lua_tointeger(L, -1); + lua_pop(L,1); + *make_polygon = lua_tointeger(L, -1); + lua_pop(L,1); + *make_boundary = lua_tointeger(L,-1); + lua_pop(L,1); + + lua_pushnil(L); + for (i = 0; i < member_count; i++) { + if (lua_next(L,-2)) { + member_superseeded[i] = lua_tointeger(L,-1); + lua_pop(L,1); + } else { + fprintf(stderr, "Failed to read member_superseeded from lua function\n"); + } + } + lua_pop(L,2); + + lua_pushnil(L); + while (lua_next(L,-2) != 0) { + key = lua_tostring(L,-2); + value = lua_tostring(L,-1); + addItem(rel_tags, key, value, 0); + lua_pop(L,1); + } + lua_pop(L,1); + + filter = lua_tointeger(L, -1); + + lua_pop(L,1); + + return filter; +#else + return 1; +#endif +} + + +static unsigned int tagtransform_c_filter_rel_member_tags( + struct keyval *rel_tags, int member_count, + struct keyval *member_tags, const char **member_role, + int * member_superseeded, int * make_boundary, int * make_polygon, int * roads) { + char *type; + struct keyval tags, *p, *q, *qq, poly_tags; + int i, j; + int first_outerway, contains_tag; + + /* Get the type, if there's no type we don't care */ + type = getItem(rel_tags, "type"); + if (!type) + return 1; + + initList(&tags); + initList(&poly_tags); + + /* Clone tags from relation */ + p = rel_tags->next; + while (p != rel_tags) { + /* For routes, we convert name to route_name */ + if ((strcmp(type, "route") == 0) && (strcmp(p->key, "name") == 0)) + addItem(&tags, "route_name", p->value, 1); + else if (strcmp(p->key, "type")) /* drop type= */ + addItem(&tags, p->key, p->value, 1); + p = p->next; + } + + if (strcmp(type, "route") == 0) { + const char *state = getItem(rel_tags, "state"); + const char *netw = getItem(rel_tags, "network"); + int networknr = -1; + + if (state == NULL ) { + state = ""; + } + + if (netw != NULL ) { + if (strcmp(netw, "lcn") == 0) { + networknr = 10; + if (strcmp(state, "alternate") == 0) { + addItem(&tags, "lcn", "alternate", 1); + } else if (strcmp(state, "connection") == 0) { + addItem(&tags, "lcn", "connection", 1); + } else { + addItem(&tags, "lcn", "yes", 1); + } + } else if (strcmp(netw, "rcn") == 0) { + networknr = 11; + if (strcmp(state, "alternate") == 0) { + addItem(&tags, "rcn", "alternate", 1); + } else if (strcmp(state, "connection") == 0) { + addItem(&tags, "rcn", "connection", 1); + } else { + addItem(&tags, "rcn", "yes", 1); + } + } else if (strcmp(netw, "ncn") == 0) { + networknr = 12; + if (strcmp(state, "alternate") == 0) { + addItem(&tags, "ncn", "alternate", 1); + } else if (strcmp(state, "connection") == 0) { + addItem(&tags, "ncn", "connection", 1); + } else { + addItem(&tags, "ncn", "yes", 1); + } + + } else if (strcmp(netw, "lwn") == 0) { + networknr = 20; + if (strcmp(state, "alternate") == 0) { + addItem(&tags, "lwn", "alternate", 1); + } else if (strcmp(state, "connection") == 0) { + addItem(&tags, "lwn", "connection", 1); + } else { + addItem(&tags, "lwn", "yes", 1); + } + } else if (strcmp(netw, "rwn") == 0) { + networknr = 21; + if (strcmp(state, "alternate") == 0) { + addItem(&tags, "rwn", "alternate", 1); + } else if (strcmp(state, "connection") == 0) { + addItem(&tags, "rwn", "connection", 1); + } else { + addItem(&tags, "rwn", "yes", 1); + } + } else if (strcmp(netw, "nwn") == 0) { + networknr = 22; + if (strcmp(state, "alternate") == 0) { + addItem(&tags, "nwn", "alternate", 1); + } else if (strcmp(state, "connection") == 0) { + addItem(&tags, "nwn", "connection", 1); + } else { + addItem(&tags, "nwn", "yes", 1); + } + } + } + + if (getItem(rel_tags, "preferred_color") != NULL ) { + const char *a = getItem(rel_tags, "preferred_color"); + if (strcmp(a, "0") == 0 || strcmp(a, "1") == 0 + || strcmp(a, "2") == 0 || strcmp(a, "3") == 0 + || strcmp(a, "4") == 0) { + addItem(&tags, "route_pref_color", a, 1); + } else { + addItem(&tags, "route_pref_color", "0", 1); + } + } else { + addItem(&tags, "route_pref_color", "0", 1); + } + + if (getItem(rel_tags, "ref") != NULL ) { + if (networknr == 10) { + addItem(&tags, "lcn_ref", getItem(rel_tags, "ref"), 1); + } else if (networknr == 11) { + addItem(&tags, "rcn_ref", getItem(rel_tags, "ref"), 1); + } else if (networknr == 12) { + addItem(&tags, "ncn_ref", getItem(rel_tags, "ref"), 1); + } else if (networknr == 20) { + addItem(&tags, "lwn_ref", getItem(rel_tags, "ref"), 1); + } else if (networknr == 21) { + addItem(&tags, "rwn_ref", getItem(rel_tags, "ref"), 1); + } else if (networknr == 22) { + addItem(&tags, "nwn_ref", getItem(rel_tags, "ref"), 1); + } + } + } else if (strcmp(type, "boundary") == 0) { + /* Boundaries will get converted into multiple geometries: + - Linear features will end up in the line and roads tables (useful for admin boundaries) + - Polygon features also go into the polygon table (useful for national_forests) + The edges of the polygon also get treated as linear fetaures allowing these to be rendered seperately. */ + *make_boundary = 1; + } else if (strcmp(type, "multipolygon") == 0 + && getItem(&tags, "boundary")) { + /* Treat type=multipolygon exactly like type=boundary if it has a boundary tag. */ + *make_boundary = 1; + } else if (strcmp(type, "multipolygon") == 0) { + *make_polygon = 1; + + /* Collect a list of polygon-like tags, these are used later to + identify if an inner rings looks like it should be rendered separately */ + p = tags.next; + while (p != &tags) { + if (!strcmp(p->key, "area")) { + addItem(&poly_tags, p->key, p->value, 1); + } else { + for (i = 0; i < exportListCount[OSMTYPE_WAY]; i++) { + if (strcmp(exportList[OSMTYPE_WAY][i].name, p->key) == 0) { + if (exportList[OSMTYPE_WAY][i].flags & FLAG_POLYGON) { + addItem(&poly_tags, p->key, p->value, 1); + } + break; + } + } + } + p = p->next; + } + + /* Copy the tags from the outer way(s) if the relation is untagged (with + * respect to tags that influence its polygon nature. Tags like name or fixme should be fine*/ + if (!listHasData(&poly_tags)) { + first_outerway = 1; + for (i = 0; i < member_count; i++) { + if (member_role[i] && !strcmp(member_role[i], "inner")) + continue; + + /* insert all tags of the first outerway to the potential list of copied tags. */ + if (first_outerway) { + p = member_tags[i].next; + while (p != &(member_tags[i])) { + addItem(&poly_tags, p->key, p->value, 1); + p = p->next; + } + } else { + /* Check if all of the tags in the list of potential tags are present on this way, + otherwise remove from the list of potential tags. Tags need to be present on + all outer ways to be copied over to the relation */ + q = poly_tags.next; + while (q != &poly_tags) { + p = getTag(&(member_tags[i]), q->key); + if ((p != NULL) && (strcmp(q->value, p->value) == 0)) { + q = q->next; + } else { + /* This tag is not present on all member outer ways, so don't copy it over to relation */ + qq = q->next; + removeTag(q); + q = qq; + } + } + } + first_outerway = 0; + } + /* Copy the list identified outer way tags over to the relation */ + q = poly_tags.next; + while (q != &poly_tags) { + addItem(&tags, q->key, q->value, 1); + q = q->next; + } + + /* We need to re-check and only keep polygon tags in the list of polytags */ + q = poly_tags.next; + while (q != &poly_tags) { + contains_tag = 0; + for (j = 0; j < exportListCount[OSMTYPE_WAY]; j++) { + if (strcmp(exportList[OSMTYPE_WAY][j].name, q->key) == 0) { + if (exportList[OSMTYPE_WAY][j].flags & FLAG_POLYGON) { + contains_tag = 1; + break; + } + } + } + if (contains_tag == 0) { + qq = q->next; + removeTag(q); + q = qq; + } else { + q = q->next; + } + } + } + resetList(&poly_tags); + } else { + /* Unknown type, just exit */ + resetList(&tags); + resetList(&poly_tags); + return 1; + } + + if (!listHasData(&tags)) { + resetList(&tags); + resetList(&poly_tags); + return 1; + } + + /* If we are creating a multipolygon then we + mark each member so that we can skip them during iterate_ways + but only if the polygon-tags look the same as the outer ring */ + if (make_polygon) { + for (i = 0; i < member_count; i++) { + int match = 1; + struct keyval *p = member_tags[i].next; + while (p != &(member_tags[i])) { + const char *v = getItem(&tags, p->key); + if (!v || strcmp(v, p->value)) { + /* z_order and osm_ are automatically generated tags, so ignore them */ + if ((strcmp(p->key, "z_order") != 0) && (strcmp(p->key, "osm_user") != 0) && + (strcmp(p->key, "osm_version") != 0) && (strcmp(p->key, "osm_uid") != 0) && + (strcmp(p->key, "osm_changeset")) && (strcmp(p->key, "osm_timestamp") != 0)) { + match = 0; + break; + } + } + p = p->next; + } + if (match) { + member_superseeded[i] = 1; + } else { + member_superseeded[i] = 0; + } + } + } + + resetList(rel_tags); + cloneList(rel_tags, &tags); + resetList(&tags); + + add_z_order(rel_tags, roads); + + return 0; +} + +static int tagtransform_lua_init() { +#ifdef HAVE_LUA + L = luaL_newstate(); + luaL_openlibs(L); + luaL_dofile(L, options->tag_transform_script); + + lua_getglobal(L, "filter_tags_node"); + if (!lua_isfunction (L, -1)) { + fprintf(stderr,"Tag transform style does not contain a function filter_tags_node\n"); + return 1; + } + lua_pop(L,1); + + lua_getglobal(L, "filter_tags_way"); + if (!lua_isfunction (L, -1)) { + fprintf(stderr,"Tag transform style does not contain a function filter_tags_way\n"); + return 1; + } + lua_pop(L,1); + + lua_getglobal(L, "filter_basic_tags_rel"); + if (!lua_isfunction (L, -1)) { + fprintf(stderr,"Tag transform style does not contain a function filter_basic_tags_rel\n"); + return 1; + } + + lua_getglobal(L, "filter_tags_relation_member"); + if (!lua_isfunction (L, -1)) { + fprintf(stderr,"Tag transform style does not contain a function filter_tags_relation_member\n"); + return 1; + } + + return 0; +#else + fprintf(stderr,"Error: Could not init lua tag transform, as lua support was not compiled into this version\n"); + return 1; +#endif +} + +void tagtransform_lua_shutdown() { +#ifdef HAVE_LUA + lua_close(L); +#endif +} + +unsigned int tagtransform_filter_node_tags(struct keyval *tags) { + int poly, roads; + if (transform_method) { + return tagtransform_lua_filter_basic_tags(OSMTYPE_NODE, tags, &poly, &roads); + } else { + return tagtransform_c_filter_basic_tags(OSMTYPE_NODE, tags, &poly, &roads); + } +} + +/* + * This function gets called twice during initial import per way. Once from add_way and once from out_way + */ +unsigned int tagtransform_filter_way_tags(struct keyval *tags, int * polygon, int * roads) { + if (transform_method) { + return tagtransform_lua_filter_basic_tags(OSMTYPE_WAY, tags, polygon, roads); + } else { + return tagtransform_c_filter_basic_tags(OSMTYPE_WAY, tags, polygon, roads); + } +} + +unsigned int tagtransform_filter_rel_tags(struct keyval *tags) { + int poly, roads; + if (transform_method) { + return tagtransform_lua_filter_basic_tags(OSMTYPE_RELATION, tags, &poly, &roads); + } else { + return tagtransform_c_filter_basic_tags(OSMTYPE_RELATION, tags, &poly, &roads); + } +} + +unsigned int tagtransform_filter_rel_member_tags(struct keyval *rel_tags, int member_count, struct keyval *member_tags,const char **member_role, int * member_superseeded, int * make_boundary, int * make_polygon, int * roads) { + if (transform_method) { + return tagtransform_lua_filter_rel_member_tags(rel_tags, member_count, member_tags, member_role, member_superseeded, make_boundary, make_polygon, roads); + } else { + return tagtransform_c_filter_rel_member_tags(rel_tags, member_count, member_tags, member_role, member_superseeded, make_boundary, make_polygon, roads); + } +} + +int tagtransform_init(const struct output_options *opts) { + options = opts; + if (opts->tag_transform_script) { + transform_method = 1; + fprintf(stderr, "Using lua based tag processing pipeline with script %s\n", opts->tag_transform_script); + return tagtransform_lua_init(); + } else { + transform_method = 0; + fprintf(stderr, "Using built-in tag processing pipeline\n"); + return 0; //Nothing to initialise + } +} + +void tagtransform_shutdown() { + if (transform_method) + tagtransform_lua_shutdown(); +} diff -Nru osm2pgsql-0.82.0/tagtransform.h osm2pgsql-0.86.0/tagtransform.h --- osm2pgsql-0.82.0/tagtransform.h 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/tagtransform.h 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,29 @@ + +#ifndef TAGTRANSFORM_H +#define TAGTRANSFORM_H + +#ifdef HAVE_LUA +#include +#include +#include +#endif +#include "output.h" + +#ifdef __cplusplus +extern "C" { +#endif + + +unsigned int tagtransform_filter_node_tags(struct keyval *tags); +unsigned int tagtransform_filter_way_tags(struct keyval *tags, int * polygon, int * roads); +unsigned int tagtransform_filter_rel_tags(struct keyval *tags); +unsigned int tagtransform_filter_rel_member_tags(struct keyval *rel_tags, int member_count, struct keyval *member_tags,const char **member_role, int * member_superseeded, int * make_boundary, int * make_polygon, int * roads); + +int tagtransform_init(const struct output_options *options); +void tagtransform_shutdown(); + +#ifdef __cplusplus +} +#endif + +#endif //TAGTRANSFORM_H Binary files /tmp/KK9OcHYy4p/osm2pgsql-0.82.0/tests/000466354.osc.gz and /tmp/BWPeo2opJ3/osm2pgsql-0.86.0/tests/000466354.osc.gz differ Binary files /tmp/KK9OcHYy4p/osm2pgsql-0.82.0/tests/liechtenstein-2013-08-03.osm.bz2 and /tmp/BWPeo2opJ3/osm2pgsql-0.86.0/tests/liechtenstein-2013-08-03.osm.bz2 differ Binary files /tmp/KK9OcHYy4p/osm2pgsql-0.82.0/tests/liechtenstein-2013-08-03.osm.pbf and /tmp/BWPeo2opJ3/osm2pgsql-0.86.0/tests/liechtenstein-2013-08-03.osm.pbf differ diff -Nru osm2pgsql-0.82.0/tests/regression-test.py osm2pgsql-0.86.0/tests/regression-test.py --- osm2pgsql-0.82.0/tests/regression-test.py 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/tests/regression-test.py 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,628 @@ +#!/usr/bin/env python + +import unittest +import psycopg2 +import os +from pwd import getpwnam +import subprocess + +full_import_file="tests/liechtenstein-2013-08-03.osm.pbf" +multipoly_import_file="tests/test_multipolygon.osm" #This file contains a number of different multi-polygon test cases +diff_import_file="tests/000466354.osc.gz" +diff_multipoly_import_file="tests/test_multipolygon_diff.osc" #This file contains a number of different multi-polygon diff processing test cases + +created_tablespace = 0 + +#**************************************************************** +#**************************************************************** +sql_test_statements=[ + ( 0, 'Basic point count', 'SELECT count(*) FROM planet_osm_point;', 1342 ), + ( 1, 'Basic line count', 'SELECT count(*) FROM planet_osm_line;', 3300 ), + ( 2, 'Basic road count', 'SELECT count(*) FROM planet_osm_roads;', 375 ), + ( 3, 'Basic polygon count', 'SELECT count(*) FROM planet_osm_polygon;', 4128 ), + ( 4, 'Basic latlon line count', 'SELECT count(*) FROM planet_osm_line;', 3298 ), + ( 5, 'Basic latlon road count', 'SELECT count(*) FROM planet_osm_roads;', 374 ), + ( 6, 'Basic post-diff point count', 'SELECT count(*) FROM planet_osm_point;', 1457 ), + ( 7, 'Basic post-diff line count', 'SELECT count(*) FROM planet_osm_line;', 3344 ), + ( 8, 'Basic post-diff road count', 'SELECT count(*) FROM planet_osm_roads;', 381 ), + ( 9, 'Basic post-diff polygon count', 'SELECT count(*) FROM planet_osm_polygon;', 4275 ), + ( 10, 'Absence of nodes table', 'SELECT count(*) FROM pg_tables WHERE tablename = \'planet_osm_nodes\'', 0), + ( 11, 'Absence of way table', 'SELECT count(*) FROM pg_tables WHERE tablename = \'planet_osm_ways\'', 0), + ( 12, 'Absence of rel line', 'SELECT count(*) FROM pg_tables WHERE tablename = \'planet_osm_rels\'', 0), + ( 13, 'Basic polygon area', 'SELECT round(sum(cast(ST_Area(way) as numeric)),0) FROM planet_osm_polygon;', 1223800814), + ( 14, 'Gazetteer place count', 'SELECT count(*) FROM place', 4374), + ( 15, 'Gazetteer place node count', 'SELECT count(*) FROM place WHERE osm_type = \'N\'', 778), + ( 16, 'Gazetteer place way count', 'SELECT count(*) FROM place WHERE osm_type = \'W\'', 3577), + ( 17, 'Gazetteer place rel count', 'SELECT count(*) FROM place WHERE osm_type = \'R\'', 19), + ( 18, 'Gazetteer post-diff place count', 'SELECT count(*) FROM place', 4428), + ( 19, 'Gazetteer post-diff place node count', 'SELECT count(*) FROM place WHERE osm_type = \'N\'', 787), + ( 20, 'Gazetteer post-diff place way count', 'SELECT count(*) FROM place WHERE osm_type = \'W\'', 3622), + ( 21, 'Gazetteer post-diff place rel count', 'SELECT count(*) FROM place WHERE osm_type = \'R\'', 19), + ( 22, 'Gazetteer housenumber count', 'SELECT count(*) FROM place WHERE housenumber is not null', 199), + ( 23, 'Gazetteer post-diff housenumber count count', 'SELECT count(*) FROM place WHERE housenumber is not null', 199), + ( 24, 'Gazetteer isin count', 'SELECT count(*) FROM place WHERE isin is not null', 239), + ( 25, 'Gazetteer post-diff isin count count', 'SELECT count(*) FROM place WHERE isin is not null', 239), + ( 26, 'Multipolygon basic case (Tags from outer way)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -15 and landuse = \'residential\' and name = \'Name_way\'', 12894), + ( 27, 'Multipolygon basic case (Tags from relation)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -1 and landuse = \'residential\' and name = \'Name_rel\'', 12895), + ( 28, 'Multipolygon named inner - outer (Tags from way)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -16 and landuse = \'residential\' and name = \'Name_way2\'', 12895), + ( 29, 'Multipolygon named inner - inner way', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = 4 and landuse = \'farmland\' and name = \'Name_way3\'', 3144), + ( 30, 'Multipolygon named inner - outer (Tags from relation)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -8 and landuse = \'residential\' and name = \'Name_rel2\'', 12894), + ( 31, 'Multipolygon named inner - inner way', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = 5 and landuse = \'farmland\' and name = \'Name_way4\'', 3144), + ( 32, 'Multipolygon named same inner - outer (Tags from way)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -17 and landuse = \'residential\' and name = \'Name_way16\'', 12895), + ( 33, 'Multipolygon named same inner - inner way absent', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = 15', 0), + ( 34, 'Multipolygon non-area inner - outer (Tags from relation)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -14 and landuse = \'residential\' and name = \'Name_way5\'', 12893), + ( 35, 'Multipolygon non-area inner - inner (Tags from way)', + 'SELECT round(ST_Length(way)) FROM planet_osm_line WHERE osm_id = 6 and highway = \'residential\' and name = \'Name_way6\'', 228), + ( 36, 'Multipolygon 2 holes (Tags from way)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -18 and landuse = \'residential\' and name = \'Name_way7\'', 11823), + ( 37, 'Multipolygon 2 holes (Tags from way)', + 'SELECT ST_NumInteriorRing(way) FROM planet_osm_polygon WHERE osm_id = -18 and landuse = \'residential\' and name = \'Name_way7\'', 2), + ( 38, 'Multipolygon from multiple outer ways 0 holes (Tags from relation)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -11 and landuse = \'residential\' and name = \'Name_rel6\'', 11528), + ( 39, 'Multipolygon from multiple outer and multiple inner ways 2 holes (Tags from relation)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -3 and landuse = \'residential\' and name = \'Name_rel11\'', 9286), + ( 40, 'Multipolygon 2 holes (Tags from way)', + 'SELECT ST_NumInteriorRing(way) FROM planet_osm_polygon WHERE osm_id = -3 and landuse = \'residential\' and name = \'Name_rel11\'', 2), + ( 41, 'Multipolygon with touching inner ways 1 hole (Tags from way)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -19 and landuse = \'residential\' and name = \'Name_way8\'', 12167), + ( 42, 'Multipolygon with touching inner ways 1 hole (Tags from way)', + 'SELECT ST_NumInteriorRing(way) FROM planet_osm_polygon WHERE osm_id = -19 and landuse = \'residential\' and name = \'Name_way8\'', 1), + ( 43, 'Multipolygon with 2 outer ways (Tags from relation)', + 'SELECT round(sum(ST_Area(way))) FROM planet_osm_polygon WHERE osm_id = -13 and landuse = \'farmland\' and name = \'Name_rel9\'', 17581), + ( 44, 'Multipolygon with 2 outer ways (Tags from relation)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = -13 and landuse = \'farmland\' and name = \'Name_rel9\'', 2), + ( 45, 'Multipolygon with 2 outer ways (multigeometry)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = -13 and landuse = \'farmland\' and name = \'Name_rel9\'', 1), + ( 46, 'Multipolygon with 2 outer ways (multigeometry)', + 'SELECT ST_NumGeometries(way) FROM planet_osm_polygon WHERE osm_id = -13 and landuse = \'farmland\' and name = \'Name_rel9\'', 2), + ( 47, 'Multipolygon nested outer ways. Both outer and inner ways are from multiple ways (Tags from relation)', + 'SELECT round(sum(ST_Area(way))) FROM planet_osm_polygon WHERE osm_id = -7 and landuse = \'farmland\' and name = \'Name_rel15\'', 16169), + ( 48, 'Multipolygon nested outer ways. Both outer and inner ways are from multiple ways (Tags from relation)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = -7 and landuse = \'farmland\' and name = \'Name_rel15\'', 2), + ( 49, 'Multipolygon nested outer ways. Both outer and inner ways are from multiple ways (multigeometry)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = -7 and landuse = \'farmland\' and name = \'Name_rel15\'', 1), + ( 50, 'Multipolygon nested outer ways. Both outer and inner ways are from multiple ways (multigeometry)', + 'SELECT ST_NumGeometries(way) FROM planet_osm_polygon WHERE osm_id = -7 and landuse = \'farmland\' and name = \'Name_rel15\'', 2), + ( 51, 'Basic hstore point count', 'SELECT count(*) FROM planet_osm_point;', 1360 ), + ( 52, 'Basic hstore line count', 'SELECT count(*) FROM planet_osm_line;', 3323 ), + ( 53, 'Basic hstore road count', 'SELECT count(*) FROM planet_osm_roads;', 375 ), + ( 54, 'Basic hstore polygon count', 'SELECT count(*) FROM planet_osm_polygon;', 4128 ), + ( 55, 'Basic post-diff point count', 'SELECT count(*) FROM planet_osm_point;', 1475 ), + ( 56, 'Basic post-diff line count', 'SELECT count(*) FROM planet_osm_line;', 3367 ), + ( 57, 'Basic post-diff road count', 'SELECT count(*) FROM planet_osm_roads;', 381 ), + ( 58, 'Basic post-diff polygon count', 'SELECT count(*) FROM planet_osm_polygon;', 4275 ), + ( 59, 'Extra hstore full tags point count', + 'SELECT count(*) FROM planet_osm_point WHERE tags ? \'osm_user\' and tags ? \'osm_version\' and tags ? \'osm_uid\' and tags ? \'osm_changeset\'', 1360), + ( 60, 'Extra hstore full tags line count', + 'SELECT count(*) FROM planet_osm_line WHERE tags ? \'osm_user\' and tags ? \'osm_version\' and tags ? \'osm_uid\' and tags ? \'osm_changeset\'', 3323), + ( 61, 'Extra hstore full tags polygon count', + 'SELECT count(*) FROM planet_osm_polygon WHERE tags ? \'osm_user\' and tags ? \'osm_version\' and tags ? \'osm_uid\' and tags ? \'osm_changeset\'', 4128), + ( 62, 'Multipolygon copying of tags from outer with extra tags on relation', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -22', 20879), + ( 63, 'Multipolygon copying of tags from outer with extra tags on relation (abscence of way)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = 84', 0), + ( 64, 'Multipolygon non copying of tags from outer with polygon tags on relation', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -24 and "natural" = \'water\'', 18501), + ( 65, 'Multipolygon non copying of tags from outer with polygon tags on relation (presence of way)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = 83 and "landuse" = \'farmland\'', 24859), + ( 66, 'Multipolygon diff moved point of outer way case (Tags from outer way)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -15 and landuse = \'residential\' and name = \'Name_way\'', 24751), + ( 67, 'Multipolygon diff moved point of inner way case (Tags from relation)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -1 and landuse = \'residential\' and name = \'Name_rel\'', 13949), + ( 68, 'Multipolygon point of inner way case (Tags from relation)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -25 and landuse = \'farmland\' and name = \'my name\'', 23886), + ( 69, 'Multipolygon point of inner way case (Tags from relation)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = 90', 0), + ( 70, 'Multipolygon diff remove relation (tagged outer way gets re added)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = 90 and landuse = \'farmland\'', 32626), + ( 71, 'Multipolygon diff remove relation', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = -25', 0), + ( 72, 'Multipolygon tags on both inner and outer (presence of relation)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -34 and "natural" = \'water\'', 15246), + ( 73, 'Multipolygon tags on both inner and outer (abscence of outer)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = 113', 0), + ( 74, 'Multipolygon tags on both inner and outer (abscence of inner)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = 118', 0), + ( 75, 'Multipolygon tags on both inner and outer diff change outer (presence of relation)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -34 and "landuse" = \'farmland\'', 15246), + ( 76, 'Multipolygon tags on both inner and outer diff change outer (abscence of outer)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = 113', 0), + ( 77, 'Multipolygon tags on both inner and outer diff change on outer (creation of inner)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = 118 and "natural" = \'water\'', 1234), + ( 78, 'Multipolygon tags on outer (presence of relation)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -33 and "natural" = \'water\'', 15612), + ( 79, 'Multipolygon tags on outer (abscence of outer)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = 114', 0), + ( 80, 'Multipolygon tags on outer change of way tags (presence of relation)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -33 and "landuse" = \'cemetery\'', 15612), + ( 81, 'Multipolygon tags on outer (abscence of old relation)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = -33 and "natural" = \'water\'', 0), + ( 82, 'Multipolygon tags on relation two outer (presence of relation)', + 'SELECT round(sum(ST_Area(way))) FROM planet_osm_polygon WHERE osm_id = -29 and "natural" = \'water\'', 68492), + ( 83, 'Multipolygon tags on relation two outer (abscence of outer)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = 109', 0), + ( 84, 'Multipolygon tags on relation two outer (abscence of outer)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = 104', 0), + ( 85, 'Multipolygon tags on relation two outer diff delete way (presence of relation)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -29 and "natural" = \'water\'', 29154), + ( 86, 'Multipolygon tags on relation two outer (presence of relation)', + 'SELECT round(sum(ST_Area(way))) FROM planet_osm_polygon WHERE osm_id = -35 and "natural" = \'water\'', 28730), + ( 87, 'Multipolygon tags on relation two outer (abscence of outer)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = 107', 0), + ( 88, 'Multipolygon tags on relation two outer (abscence of outer)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = 102', 0), + ( 89, 'Multipolygon tags on relation two outer diff remove way from relation (presence of relation)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = -35 and "natural" = \'water\'', 15736), + ( 90, 'Multipolygon tags on relation two outer diff remove way from relation (presence of single way)', + 'SELECT round(ST_Area(way)) FROM planet_osm_polygon WHERE osm_id = 102 and "natural" = \'water\'', 12994), + ( 91, 'Basic line length', 'SELECT round(sum(ST_Length(way))) FROM planet_osm_line;', 4269394), + ( 92, 'Basic line length', 'SELECT round(sum(ST_Length(way))) FROM planet_osm_roads;', 2032023), + ( 93, 'Basic number of hstore points tags', 'SELECT sum(array_length(akeys(tags),1)) FROM planet_osm_point;', 4228), + ( 94, 'Basic number of hstore roads tags', 'SELECT sum(array_length(akeys(tags),1)) FROM planet_osm_roads;', 2316), + ( 95, 'Basic number of hstore lines tags', 'SELECT sum(array_length(akeys(tags),1)) FROM planet_osm_line;', 10897), + ( 96, 'Basic number of hstore polygons tags', 'SELECT sum(array_length(akeys(tags),1)) FROM planet_osm_polygon;', 9540), + ( 97, 'Diff import number of hstore points tags', 'SELECT sum(array_length(akeys(tags),1)) FROM planet_osm_point;', 4352), + ( 98, 'Diff import number of hstore roads tags', 'SELECT sum(array_length(akeys(tags),1)) FROM planet_osm_roads;', 2340), + ( 99, 'Diff import number of hstore lines tags', 'SELECT sum(array_length(akeys(tags),1)) FROM planet_osm_line;', 11020), + ( 100, 'Diff import number of hstore polygons tags', 'SELECT sum(array_length(akeys(tags),1)) FROM planet_osm_polygon;', 9834), + #**** Tests to check if inner polygon appears when outer tags change after initially identicall inner and outer way tags in a multi-polygon **** + #**** These tests are currently broken and noted in trac ticket #2853 **** + ( 101, 'Multipolygon identical tags on inner and outer (presence of relation)', + 'SELECT round(sum(ST_Area(way))) FROM planet_osm_polygon WHERE osm_id = -31 and "natural" = \'heath\'', 32702), + ( 102, 'Multipolygon identical tags on inner and outer (abscence of outer)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = 120', 0), + ( 103, 'Multipolygon identical tags on inner and outer (abscence of inner)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = 112', 0), + ( 104, 'Multipolygon identical tags on inner and outer (presence of relation), post diff', + 'SELECT round(sum(ST_Area(way))) FROM planet_osm_polygon WHERE osm_id = -31 and "natural" = \'water\'', 32702), + ( 105, 'Multipolygon identical tags on inner and outer (presece of inner)', + 'SELECT round(sum(ST_Area(way))) FROM planet_osm_polygon WHERE osm_id = 112 and "natural" = \'heath\'', 1234), + #**** Test to check that only polygon tags that are present on all outer ways get copied over to the multi-polygon relation **** + ( 106, 'Multipolygon copy outer tags (presence of relation)', + 'SELECT round(sum(ST_Area(way))) FROM planet_osm_polygon WHERE osm_id = -38 and "natural" = \'water\'', 29340), + ( 107, 'Multipolygon copy outer tags (absence of partial outer tags)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = -38 and "natural" = \'water\' and "man_made" = \'pier\'', 0), + ( 108, 'Multipolygon copy outer tags (absence of multi-polygon tagged outer way)', + 'SELECT count(*) FROM planet_osm_line WHERE osm_id = 134 OR osm_id = 133', 0), + ( 109, 'Multipolygon copy outer tags (presence of additionally tagged outer way)', + 'SELECT round(sum(ST_length(way))) FROM planet_osm_line WHERE (osm_id = 136 OR osm_id = 132) AND "man_made" = \'pier\'', 407), + ( 110, 'Multipolygon copy outer tags (presence of relation)', + 'SELECT round(sum(ST_Area(way))) FROM planet_osm_polygon WHERE osm_id = -37 and "natural" = \'water\'', 29952), + ( 111, 'Multipolygon copy outer tags (absence of partial outer tags)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = -37 and "natural" = \'water\' and "man_made" = \'pier\'', 0), + ( 112, 'Multipolygon copy outer tags (absence of multi-polygon tagged outer way)', + 'SELECT count(*) FROM planet_osm_line WHERE osm_id = 128 OR osm_id = 125', 0), + ( 113, 'Multipolygon copy outer tags (presence of additionally tagged outer way)', + 'SELECT round(sum(ST_length(way))) FROM planet_osm_line WHERE (osm_id = 126 OR osm_id = 124) AND "man_made" = \'pier\'', 276), + ( 114, 'Multipolygon copy outer tags (absence of multi-polygon tagged inner way)', + 'SELECT count(*) FROM planet_osm_line WHERE osm_id = 123 OR osm_id = 121', 0), + ( 115, 'Multipolygon copy outer tags (presence of additionally tagged inner way)', + 'SELECT round(sum(ST_length(way))) FROM planet_osm_line WHERE (osm_id = 127 OR osm_id = 122) AND "man_made" = \'pier\'', 318), + #**** Test to check that if polygon tags are on both outer ways and relation, polygons don't get duplicated in the db **** + ( 116, 'Multipolygon tags on both outer and relation (presence of relation)', + 'SELECT round(sum(ST_Area(way))) FROM planet_osm_polygon WHERE osm_id = -39 and "landuse" = \'forest\'', 10379), + ( 117, 'Multipolygon tags on both outer and relation (absence of outer way)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = 138', 0), + ( 118, 'Multipolygon tags on both outer and relation with additional tags on relation (presence of relation)', + 'SELECT round(sum(ST_Area(way))) FROM planet_osm_polygon WHERE osm_id = -40 and "landuse" = \'forest\'', 12397), + ( 119, 'Multipolygon tags on both outer and relation with additional tags on relation (absence of outer way)', + 'SELECT count(*) FROM planet_osm_polygon WHERE osm_id = 140', 0), + ] +#**************************************************************** +#**************************************************************** + + +class NonSlimRenderingTestSuite(unittest.TestSuite): + def __init__(self): + unittest.TestSuite.__init__(self,map(ThirdTestCase, + ("testOne", + "testTwo"))) + self.addTest(BasicNonSlimTestCase("basic case",[], [0,1,2,3,10,13, 91, 92])) + self.addTest(BasicNonSlimTestCase("slim --drop case",["--slim","--drop"], [0,1,2,3, 10, 11, 12, 13, 91, 92])) + self.addTest(BasicNonSlimTestCase("Hstore index drop", ["--slim", "--hstore", "--hstore-add-index", "--drop"], [51,52,53,54])) + self.addTest(BasicNonSlimTestCase("lat lon projection",["-l"], [0,4,5,3,10, 11, 12])) + #Failing test 3,13 due to difference in handling mixture of tags on ways and relations, where the correct behaviour is non obvious + #self.addTest(BasicNonSlimTestCase("--tag-transform-script", ["--tag-transform-script", "style.lua"], [0,1,2,3,10,13,91,92])) + self.addTest(BasicNonSlimTestCase("--tag-transform-script", ["--tag-transform-script", "style.lua"], [0,1,2,10,91,92])) + + +class SlimRenderingTestSuite(unittest.TestSuite): + def __init__(self): + unittest.TestSuite.__init__(self,map(ThirdTestCase, + ("testOne", + "testTwo"))) + self.addTest(BasicSlimTestCase("basic case", [], [0,1,2,3,13, 91, 92],[6,7,8,9])) + self.addTest(BasicSlimTestCase("Parallel processing", ["--number-processes", "8", "-C100"], [0,1,2,3,13,91,92],[6,7,8,9])) + self.addTest(BasicSlimTestCase("Parallel processing with non 100% node-cache", ["--number-processes", "8", "-C1", "--cache-strategy=dense"], [0,1,2,3,13,91,92],[6,7,8,9])) + self.addTest(BasicSlimTestCase("Parallel processing with disabled node-cache", ["-C0"], [0,1,2,3,13,91,92],[6,7,8,9])) + # Failes to do correct error checking. This needs fixing in osm2pgsql + # self.addTest(BasicSlimTestCase("Parallel processing with failing database conneciton (connection limit exceeded)", ["--number-processes", "32", "-C100"], [0,1,2,3],[6,7,8,9])) + # Counts are expected to be different in hstore, needs adjusted tests + self.addTest(BasicSlimTestCase("Hstore match only", ["-k", "--hstore-match-only"], [0,1,2,3],[6,7,8,9])) + self.addTest(BasicSlimTestCase("Hstore name column", ["-z", "name:"], [0,1,2,3],[6,7,8,9])) + self.addTest(BasicSlimTestCase("Hstore", ["-k"], [51,52,53,54],[55,56,57,58])) + self.addTest(BasicSlimTestCase("Hstore all", ["-j"], [51,52,53,54,93,94,95,96],[55,56,57,58, 97, 98, 99, 100])) + self.addTest(BasicSlimTestCase("Hstore index", ["--hstore", "--hstore-add-index"], [51,52,53,54],[55,56,57,58])) + #tests dont check for osm_timestamp which is currently missing in the pbf parser + self.addTest(BasicSlimTestCase("Extra tags hstore match only", ["-x", "-k", "--hstore-match-only"], [0,1,2,3],[6,7,8,9])) + self.addTest(BasicSlimTestCase("Extra tags hstore all", ["-j", "-x"], [51,52,53,54,59,60,61],[55,56,57,58])) + + self.addTest(BasicSlimTestCase("--tablespace-main-data", ["--tablespace-main-data", "tablespacetest"], [0,1,2,3,13,91,92],[6,7,8,9])) + self.addTest(BasicSlimTestCase("--tablespace-main-index", ["--tablespace-main-index", "tablespacetest"], [0,1,2,3,13,91,92],[6,7,8,9])) + self.addTest(BasicSlimTestCase("--tablespace-slim-data", ["--tablespace-slim-data", "tablespacetest"], [0,1,2,3,13,91,92],[6,7,8,9])) + self.addTest(BasicSlimTestCase("--tablespace-slim-index", ["--tablespace-slim-index", "tablespacetest"], [0,1,2,3,13,91,92],[6,7,8,9])) + #Failing test 3,13,9 due to difference in handling mixture of tags on ways and relations, where the correct behaviour is non obvious + #self.addTest(BasicNonSlimTestCase("--tag-transform-script", ["--tag-transform-script", "style.lua"], [0,1,2,3,10,13,91,92])) + self.addTest(BasicSlimTestCase("--tag-transform-script", ["--tag-transform-script", "style.lua"], [0,1,2,91,92],[6,7,8])) + + +class SlimGazetteerTestSuite(unittest.TestSuite): + def __init__(self): + unittest.TestSuite.__init__(self,map(ThirdTestCase, + ("testOne", + "testTwo"))) + self.addTest(BasicGazetteerTestCase("basic case", [], [14,15,16,17,22,24],[18,19,20,21,23,25])) + + +class MultiPolygonSlimRenderingTestSuite(unittest.TestSuite): + def __init__(self): + unittest.TestSuite.__init__(self,map(ThirdTestCase, + ("testOne", + "testTwo"))) + #Case 77 currently doesn't work + self.addTest(MultipolygonSlimTestCase("basic case", [], + [26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42, 43, 44, 47, 48, 62, 63, 64, 65, 68, 69, 72, 73, 74, 78, 79, 82, 83, 84, 86, 87, 88, + 106,107,108,109,110,111,112,113,114,115,116,117,118,119], + [28,29,30,31,32,33,34,35,36,37,38,39,40,41,42, 43, 44, 47, 48, 62, 63, 64, 65, 66, 67, 70, 71, 75, 76, 79, 80, 81, 83, 84, 85, 87, 89, 90])) + self.addTest(MultipolygonSlimTestCase("multi geometry", ["-G"], + [26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42, 43, 45, 46, 47, 49, 50, 62, 63, 64, 65, 68, 69, 72, 73, 74, 78, 79, 82, 83, 84, 86, 87, 88, + 106,107,108,109,110,111,112,113,114,115,116,117,118,119], + [28,29,30,31,32,33,34,35,36,37,38,39,40,41,42, 43, 45, 46, 47, 49, 50, 62, 63, 64, 65, 66, 67, 70, 71, 75, 76, 79, 80, 81, 83, 84, 85, 87, 89, 90])) + self.addTest(MultipolygonSlimTestCase("hstore case", ["-k"], + [26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,47,48,62,63,64,65,68,69, 72, 73, 74, 78, 79, 82, 83, 84, 86, 87, 88, + 106,107,108,109,110,111,112,113,114,115,116,117,118,119], + [28,29,30,31,32,33,34,35,36,37,38,39,40,41,42, 43, 44, 47, 48, 62, 63, 64, 65, 66, 67, 70, 71, 75, 76, 79, 80, 81, 83, 84, 85, 87, 89, 90])) + self.addTest(MultipolygonSlimTestCase("hstore case", ["-k", "--hstore-match-only"], + [26,27,28,29,30,31,32,33,34,35,36,37,38, 39, 40,41,42, 43, 44, 47, 48, 62, 63, 64, 65, 68, 69, 72, 73, 74, 78, 79, 82, 83, 84, 86, 87, 88, + 106,107,108,109,110,111,112,113,114,115,116,117,118,119], + [28,29,30,31,32,33,34,35,36,37,38,39,40,41,42, 43, 44, 47, 48, 62, 63, 64, 65, 66, 67, 70, 71, 75, 76, 79, 80, 81, 83, 84, 85, 87, 89, 90])) + self.addTest(MultipolygonSlimTestCase("Extra tags hstore match only", ["-x", "-k", "--hstore-match-only"], + [26,27,28,29,30,31,32,33,34,35,36,37,38, 39, 40,41,42, 43, 44, 47, 48, 62, 63, 64, 65, 68, 69, 72, 73, 74, 78, 79, 82, 83, 84, 86, 87, 88, + 106,107,108,109,110,111,112,113,114,115,116,117,118,119], + [28,29,30,31,32,33,34,35,36,37,38,39,40,41,42, 43, 44, 47, 48, 62, 63, 64, 65, 66, 67, 70, 71, 75, 76, 79, 80, 81, 83, 84, 85, 87, 89, 90])) + self.addTest(MultipolygonSlimTestCase("Extra tags hstore match only", ["-x", "-j"], + [26,27,28,29,30,31,32,33,34,35,36,37,38, 39, 40,41,42, 43, 44, 47, 48, 62, 63, 64, 65, 68, 69, 72, 73, 74, 78, 79, 82, 83, 84, 86, 87, 88, + 106,107,108,109,110,111,112,113,114,115,116,117,118,119], + [28,29,30,31,32,33,34,35,36,37,38,39,40,41,42, 43, 44, 47, 48, 62, 63, 64, 65, 66, 67, 70, 71, 75, 76, 79, 80, 81, 83, 84, 85, 87, 89, 90])) + self.addTest(MultipolygonSlimTestCase("lua tagtransform case", ["--tag-transform-script", "style.lua"], + [26,27,28,29,30,31,32,33,34,35,36,37,38, 39, 40, 41, 42, 43, 44, 47, 48, 62, 64, 65,68,69, 72, 73, 74, 78, 79, 82, 83, 84, 86, 87, 88,116,117,118,119], + [28,29,30,31,32,33,34,35,36,37,38,39,40,41,42, 43, 44, 47, 48, 62, 63,64, 65, 66, 67, 70, 71, 75, 76, 79, 80, 81, 83, 84, 85, 87, 89, 90])) + self.addTest(MultipolygonSlimTestCase("lua tagtransform case with hstore", ["--tag-transform-script", "style.lua", "-k"], + [26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,47,48,62,63,64,65,68,69,72,73,74,78,79,82,83,84,86,87,88,116,117,118,119], + [28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,47,48,62,63,64,65,66,67,70,71,75,76,79,80,81,83,84,85,87,89,90])) + + +class CompleteTestSuite(unittest.TestSuite): + def __init__(self): + unittest.TestSuite.__init__(self, map(ThirdTestCase, + ("testOne", + "testTwo"))) + self.addTest(NonSlimRenderingTestSuite()) + self.addTest(SlimRenderingTestSuite()) + self.addTest(MultiPolygonSlimRenderingTestSuite()) + self.addTest(SlimGazetteerTestSuite()) + +#**************************************************************** +class ThirdTestCase(unittest.TestCase): + def testOne(self): + assert 1 == 1 + def testTwo(self): + assert 2 == 2 + +#**************************************************************** + +class BaseTestCase(unittest.TestCase): + def dbConnect(self): + try: + self.conn=psycopg2.connect("dbname='osm2pgsql-test'") + self.conn.autocommit = True + self.cur = self.conn.cursor() + except Exception, e: + print "I am unable to connect to the database." + e + + def dbClose(self): + self.cur.close() + self.conn.close() + + def executeStatements(self, seq): + print "*********************************" + self.dbConnect() + try: + for i in seq: + self.assertEqual(sql_test_statements[i][0], i, "test case numbers don't match up: " + str(i) + " =/=" + str(sql_test_statements[i][0])) + try: + self.cur.execute(sql_test_statements[i][2]) + res = self.cur.fetchall() + except Exception, e: + self.assertEqual(0, 1, str(sql_test_statements[i][0]) + ": Failed to execute " + sql_test_statements[i][1] + + " (" + sql_test_statements[i][2] + ") {" + str(self.parameters) +"}") + if (res == None): + self.assertEqual(0, 1, str(sql_test_statements[i][0]) + ": Sql statement returned no results: " + + sql_test_statements[i][1] + " (" + sql_test_statements[i][2] + ") {" + str(self.parameters) +"}") + self.assertEqual(len(res), 1, str(sql_test_statements[i][0]) + ": Sql statement returned more than one result: " + + str(res) + " -- " + sql_test_statements[i][1] + " (" + sql_test_statements[i][2] + ") {" + str(self.parameters) +"}") + self.assertEqual( res[0][0], sql_test_statements[i][3], + str(sql_test_statements[i][0]) + ": Failed " + sql_test_statements[i][1] + ", expected " + str(sql_test_statements[i][3]) + " but was " + str(res[0][0]) + + " (" + sql_test_statements[i][2] + ") {" + str(self.parameters) +"}") + finally: + self.dbClose() + +#**************************************************************** + +class BaseNonSlimTestCase(BaseTestCase): + + def setUpGeneric(self, parameters, file): + proc = subprocess.Popen(["./osm2pgsql", "-Sdefault.style", "-dosm2pgsql-test", "-C100"] + parameters + [full_import_file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + (outp, outerr) = proc.communicate() + self.assertEqual (proc.returncode, 0, "Execution of osm2pgsql with options: '%s' failed:\n%s\n%s\n" % (str(parameters), outp, outerr)) + +class BaseSlimTestCase(BaseTestCase): + + def setUpGeneric(self, parameters, file): + proc = subprocess.Popen(["./osm2pgsql", "--slim", "-Sdefault.style", "-dosm2pgsql-test", "-C100"] + parameters + [file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + (outp, outerr) = proc.communicate() + self.assertEqual (proc.returncode, 0, "Execution of osm2pgsql --slim with options: '%s' failed:\n%s\n%s\n" % (str(parameters), outp, outerr)) + + def updateGeneric(self, parameters, file): + proc = subprocess.Popen(["./osm2pgsql", "--slim", "--append", "-Sdefault.style", "-dosm2pgsql-test", "-C100"] + parameters + [file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + (outp, outerr) = proc.communicate() + self.assertEqual (proc.returncode, 0, "Execution of osm2pgsql --slim --append with options: '%s' failed:\n%s\n%s\n" % (str(parameters), outp, outerr)) + +class BaseGazetteerTestCase(BaseTestCase): + + def setUpGeneric(self, parameters, file): + proc = subprocess.Popen(["./osm2pgsql", "--slim", "-Ogazetteer", "-Sdefault.style", "-dosm2pgsql-test"] + parameters + [file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + (outp, outerr) = proc.communicate() + self.assertEqual (proc.returncode, 0, "Execution of osm2pgsql --slim gazetteer options: '%s' failed:\n%s\n%s\n" % (str(parameters), outp, outerr)) + + def updateGeneric(self, parameters, file): + proc = subprocess.Popen(["./osm2pgsql", "--slim", "-Ogazetteer", "--append", "-Sdefault.style", "-dosm2pgsql-test"] + parameters + [file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + (outp, outerr) = proc.communicate() + self.assertEqual (proc.returncode, 0, "Execution of osm2pgsql --slim --append gazetteer options: '%s' failed:\n%s\n%s\n" % (str(parameters), outp, outerr)) + + +#**************************************************************** +class BasicNonSlimTestCase(BaseNonSlimTestCase): + + def __init__(self, name, parameters, initialStatements): + BaseNonSlimTestCase.__init__(self) + self.name = name + self.parameters = parameters + self.initialStatements = initialStatements + + def setUp(self): + self.setUpGeneric(self.parameters, full_import_file) + + def runTest(self): + print "****************************************" + print "Running initial import for " + self.name + self.executeStatements(self.initialStatements) + + +class BasicSlimTestCase(BaseSlimTestCase): + + def __init__(self, name, parameters, initialStatements, postDiffStatements): + BaseSlimTestCase.__init__(self) + self.name = name + self.parameters = parameters + self.initialStatements = initialStatements + self.postDiffStatements = postDiffStatements + + def setUp(self): + self.setUpGeneric(self.parameters, full_import_file) + + + def runTest(self): + print "****************************************" + print "Running initial import for " + self.name + self.executeStatements(self.initialStatements) + print "Running diff-import for " + self.name + self.updateGeneric(self.parameters, diff_import_file) + self.executeStatements(self.postDiffStatements) + +class MultipolygonSlimTestCase(BaseSlimTestCase): + + def __init__(self, name, parameters, initialStatements, postDiffStatements): + BaseSlimTestCase.__init__(self) + self.name = name + self.parameters = parameters + self.initialStatements = initialStatements + self.postDiffStatements = postDiffStatements + + def setUp(self): + self.setUpGeneric(self.parameters, multipoly_import_file) + + + def runTest(self): + print "****************************************" + print "Running initial import for " + self.name + self.executeStatements(self.initialStatements) + print "Running diff-import for " + self.name + self.updateGeneric(self.parameters, diff_multipoly_import_file) + self.executeStatements(self.postDiffStatements) + + +class BasicGazetteerTestCase(BaseGazetteerTestCase): + + def __init__(self, name, parameters, initialStatements, postDiffStatements): + BaseGazetteerTestCase.__init__(self) + self.name = name + self.parameters = parameters + self.initialStatements = initialStatements + self.postDiffStatements = postDiffStatements + + def setUp(self): + self.setUpGeneric(self.parameters, full_import_file) + + + def runTest(self): + print "****************************************" + print "Running initial import in gazetteer mode for " + self.name + self.executeStatements(self.initialStatements) + print "Running diff-import in gazetteer mode for " + self.name + self.updateGeneric(self.parameters, diff_import_file) + self.executeStatements(self.postDiffStatements) + + + +#**************************************************************** +#**************************************************************** +def setupDB(): + print "Setting up test database" + try: + gen_conn=psycopg2.connect("dbname='template1'") + gen_conn.autocommit = True + except Exception, e: + print "I am unable to connect to the database." + exit() + + try: + gen_cur = gen_conn.cursor() + except Exception, e: + gen_conn.close() + print "I am unable to connect to the database." + exit() + + try: + gen_cur.execute("""DROP DATABASE IF EXISTS \"osm2pgsql-test\"""") + gen_cur.execute("""CREATE DATABASE \"osm2pgsql-test\" WITH ENCODING 'UTF8'""") + except Exception, e: + print "Failed to create osm2pgsql-test db" + e.pgerror + exit(); + finally: + gen_cur.close() + gen_conn.close() + + try: + test_conn=psycopg2.connect("dbname='osm2pgsql-test'") + test_conn.autocommit = True + except Exception, e: + print "I am unable to connect to the database." + e + exit() + + try: + test_cur = test_conn.cursor() + except Exception, e: + print "I am unable to connect to the database." + e + gen_conn.close() + exit() + + try: + try: + global created_tablespace + test_cur.execute("""SELECT spcname FROM pg_tablespace WHERE spcname = 'tablespacetest'""") + if test_cur.fetchone(): + print "We already have a tablespace, can use that" + created_tablespace = 0 + else: + print "For the test, we need to create a tablespace. This needs root privileges" + created_tablespace = 1 + ### This makes postgresql read from /tmp + ## Does this have security implications like opening this to a possible symlink attack? + try: + os.mkdir("/tmp/psql-tablespace") + returncode = subprocess.call(["/usr/bin/sudo", "/bin/chown", "postgres.postgres", "/tmp/psql-tablespace"]) + test_cur.execute("""CREATE TABLESPACE tablespacetest LOCATION '/tmp/psql-tablespace'""") + except Exception, e: + os.rmdir("/tmp/psql-tablespace") + self.assertEqual(0, 1, "Failed to create tablespace") + except Exception, e: + print "Failed to create directory for tablespace" + str(e) + + + try: + test_cur.execute("""CREATE EXTENSION postgis;""") + except: + test_conn.rollback() + # Guess the directory from the postgres version. + # TODO: make the postgisdir configurable. Probably + # only works on Debian-based distributions at the moment. + postgisdir = ('/usr/share/postgresql/%d.%d/contrib' % + (test_conn.server_version / 10000, (test_conn.server_version / 100) % 100)) + for fl in os.listdir(postgisdir): + if fl.startswith('postgis'): + newdir = os.path.join(postgisdir, fl) + if os.path.isdir(newdir): + postgisdir = newdir + break + else: + raise Exception('Cannot find postgis directory.') + pgscript = open(os.path.join(postgisdir, 'postgis.sql'),'r').read() + test_cur.execute(pgscript) + pgscript = open(os.path.join(postgisdir, 'spatial_ref_sys.sql'), 'r').read() + test_cur.execute(pgscript) + + try: + test_cur.execute("""CREATE EXTENSION hstore;""") + + except Exception, e: + print "I am unable to create extensions: " + e.pgerror + exit() + finally: + test_cur.close() + test_conn.close() + +def tearDownDB(): + print "Cleaning up test database" + try: + gen_conn=psycopg2.connect("dbname='template1'") + gen_conn.autocommit = True + gen_cur = gen_conn.cursor() + except Exception, e: + print "I am unable to connect to the database." + exit() + + try: + gen_cur.execute("""DROP DATABASE IF EXISTS \"osm2pgsql-test\"""") + if (created_tablespace == 1): + gen_cur.execute("""DROP TABLESPACE IF EXISTS \"tablespacetest\"""") + except Exception, e: + print "Failed to clean up osm2pgsql-test db" + e.pgerror + exit(); + + gen_cur.close() + gen_conn.close() + if (created_tablespace == 1): + returncode = subprocess.call(["/usr/bin/sudo", "/bin/rmdir", "/tmp/psql-tablespace"]) + + +if __name__ == "__main__": + + from optparse import OptionParser + + parser = OptionParser() + parser.add_option("-f", dest="osm_file", action="store", metavar="FILE", + default=full_import_file, + help="Import a specific osm file [default=%default]") + (options, args) = parser.parse_args() + + if options.osm_file: + full_import_file = options.osm_file + + +ts2 = CompleteTestSuite() +try: + setupDB() + runner = unittest.TextTestRunner() + runner.run(ts2) +finally: + tearDownDB() diff -Nru osm2pgsql-0.82.0/tests/regression-test.sh osm2pgsql-0.86.0/tests/regression-test.sh --- osm2pgsql-0.82.0/tests/regression-test.sh 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/tests/regression-test.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,159 +0,0 @@ -#!/bin/bash -set -e - -trap errorhandler ERR - -errorhandler(){ - echo "!!!!!!TEST failed, please check results!!!!!!" - exit $status -} - -planetfile=$1 -planetdiff=$2 -test_output=`dirname $0`/test_output_$$ - -function setup_db { - echo "" - echo "Initialising test db" - dropdb osm2pgsql-test > /dev/null || true - createdb -E UTF8 osm2pgsql-test - psql -f /usr/share/postgresql/9.1/contrib/postgis-1.5/postgis.sql -d osm2pgsql-test > /dev/null - psql -f /usr/share/postgresql/9.1/contrib/postgis-1.5/spatial_ref_sys.sql -d osm2pgsql-test > /dev/null - psql -c "CREATE EXTENSION hstore;" -d osm2pgsql-test &> /dev/null - sudo rm -rf /tmp/psql-tablespace || true - mkdir /tmp/psql-tablespace - sudo chown postgres.postgres /tmp/psql-tablespace - psql -q -c "DROP TABLESPACE tablespacetest" -d osm2pgsql-test > /dev/null || true - psql -c "CREATE TABLESPACE tablespacetest LOCATION '/tmp/psql-tablespace'" -d osm2pgsql-test -} - -function teardown_db { - dropdb osm2pgsql-test #To remove any objects that might still be in the table space - psql -c "DROP TABLESPACE tablespacetest" -d postgres - sudo rm -rf /tmp/psql-tablespace - rm -f $test_output $test_output.* - dropdb osm2pgsql-test - -} - -function psql_test { - ( echo -n "$1"; psql -c "$2" -t -d osm2pgsql-test ) | tee -a $test_output.tmp -} - -function reset_results { - rm -f $test_output $test_output.* -} - -function compare_results { - if [ ! -r $test_output ]; then - mv $test_output.tmp $test_output - elif diff $test_output $test_output.tmp >/dev/null; then - rm $test_output.tmp - else - errorhandler - fi -} - -function test_osm2pgsql_slim { - trap errorhandler ERR - echo "" - echo "" - echo "@@@Testing osm2pgsql in slim mode with the following parameters: \"" $1 "\"@@@" - setup_db - - dbprefix=${2:-planet_osm} - - ./osm2pgsql --slim --create -d osm2pgsql-test $1 $planetfile - psql_test "Number of points imported" "SELECT count(*) FROM ${dbprefix}_point;" - psql_test "Number of lines imported" "SELECT count(*) FROM ${dbprefix}_line;" - psql_test "Number of roads imported" "SELECT count(*) FROM ${dbprefix}_roads;" - psql_test "Number of polygon imported" "SELECT count(*) FROM ${dbprefix}_polygon;" - psql_test "Number of nodes imported" "SELECT count(*) FROM ${dbprefix}_nodes;" - psql_test "Number of ways imported" "SELECT count(*) FROM ${dbprefix}_ways;" - psql_test "Number of relations imported" "SELECT count(*) FROM ${dbprefix}_rels;" - - echo "***Testing osm2pgsql diff import with the following parameters: \"" $1 "\"***" - ./osm2pgsql --slim --append -d osm2pgsql-test $1 $planetdiff - psql_test "Number of points imported" "SELECT count(*) FROM ${dbprefix}_point;" - psql_test "Number of lines imported" "SELECT count(*) FROM ${dbprefix}_line;" - psql_test "Number of roads imported" "SELECT count(*) FROM ${dbprefix}_roads;" - psql_test "Number of polygon imported" "SELECT count(*) FROM ${dbprefix}_polygon;" - psql_test "Number of nodes imported" "SELECT count(*) FROM ${dbprefix}_nodes;" - psql_test "Number of ways imported" "SELECT count(*) FROM ${dbprefix}_ways;" - psql_test "Number of relations imported" "SELECT count(*) FROM ${dbprefix}_rels;" - compare_results -} - -function test_osm2pgsql_gazetteer { - trap errorhandler ERR - echo "" - echo "" - echo "@@@Testing osm2pgsql in gazetteer mode with the following parameters: \"" $1 "\"@@@" - setup_db - - dbprefix=${2:-planet_osm} - - ./osm2pgsql --slim --create -l -O gazetteer -d osm2pgsql-test $1 $planetfile - psql_test "Number of places imported" "SELECT count(*) FROM place;" - psql_test "Number of nodes imported" "SELECT count(*) FROM ${dbprefix}_nodes;" - psql_test "Number of ways imported" "SELECT count(*) FROM ${dbprefix}_ways;" - psql_test "Number of relations imported" "SELECT count(*) FROM ${dbprefix}_rels;" - - echo "***Testing osm2pgsql diff import with the following parameters: \"" $1 "\"***" - ./osm2pgsql --slim --append -l -O gazetteer -d osm2pgsql-test $1 $planetdiff - psql_test "Number of places imported" "SELECT count(*) FROM place;" - psql_test "Number of nodes imported" "SELECT count(*) FROM ${dbprefix}_nodes;" - psql_test "Number of ways imported" "SELECT count(*) FROM ${dbprefix}_ways;" - psql_test "Number of relations imported" "SELECT count(*) FROM ${dbprefix}_rels;" - compare_results -} - -function test_osm2pgsql_nonslim { - trap errorhandler ERR - echo "" - echo "" - echo "@@@Testing osm2pgsql with the following parameters: \"" $1 "\"@@@" - setup_db - ./osm2pgsql --create -d osm2pgsql-test $1 $planetfile - psql_test "Number of points imported" "SELECT count(*) FROM planet_osm_point;" - psql_test "Number of lines imported" "SELECT count(*) FROM planet_osm_line;" - psql_test "Number of roads imported" "SELECT count(*) FROM planet_osm_roads;" - psql_test "Number of polygon imported" "SELECT count(*) FROM planet_osm_polygon;" - compare_results -} - - -test_osm2pgsql_nonslim "-S default.style -C 100" -test_osm2pgsql_nonslim "-S default.style -C 100" -echo ========== OK SO FAR ============= -test_osm2pgsql_nonslim "-S default.style -l -C 100" -test_osm2pgsql_nonslim "--slim --drop -S default.style -C 100" -reset_results - -echo ========== NOW DOING SLIM ============= -test_osm2pgsql_slim "-S default.style -C 100" -test_osm2pgsql_slim "-S default.style -l -C 100" -test_osm2pgsql_slim "-k -S default.style -C 100" -test_osm2pgsql_slim "-j -S default.style -C 100" -test_osm2pgsql_slim "-K -S default.style -C 100" -test_osm2pgsql_slim "-x -S default.style -C 100" -test_osm2pgsql_slim "-p planet_osm2 -S default.style -C 100" "planet_osm2" -test_osm2pgsql_slim "--bbox -90.0,-180.0,90.0,180.0 -S default.style -C 100" -test_osm2pgsql_slim "--number-processes 6 -S default.style -C 100" -test_osm2pgsql_slim "-I -S default.style -C 100" -test_osm2pgsql_slim "-e 16:16 -S default.style -C 100" -test_osm2pgsql_slim "--number-processes 6 -e 16:16 -S default.style -C 100" -test_osm2pgsql_slim "-S default.style -C 100 -i tablespacetest" -test_osm2pgsql_slim "-S default.style -C 100 --tablespace-main-data tablespacetest" -test_osm2pgsql_slim "-S default.style -C 100 --tablespace-main-index tablespacetest" -test_osm2pgsql_slim "-S default.style -C 100 --tablespace-slim-data tablespacetest" -test_osm2pgsql_slim "-S default.style -C 100 --tablespace-slim-index tablespacetest" -reset_results - -#test_osm2pgsql_gazetteer "-C 100" -#test_osm2pgsql_gazetteer "--bbox -90.0,-180.0,90.0,180.0 -C 100" - -teardown_db - - - diff -Nru osm2pgsql-0.82.0/tests/test_multipolygon_diff.osc osm2pgsql-0.86.0/tests/test_multipolygon_diff.osc --- osm2pgsql-0.82.0/tests/test_multipolygon_diff.osc 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/tests/test_multipolygon_diff.osc 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,193 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -Nru osm2pgsql-0.82.0/tests/test_multipolygon.osm osm2pgsql-0.86.0/tests/test_multipolygon.osm --- osm2pgsql-0.82.0/tests/test_multipolygon.osm 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/tests/test_multipolygon.osm 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,1508 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -Nru osm2pgsql-0.82.0/tests/test_multipolygon_postdiff.osm osm2pgsql-0.86.0/tests/test_multipolygon_postdiff.osm --- osm2pgsql-0.82.0/tests/test_multipolygon_postdiff.osm 1970-01-01 00:00:00.000000000 +0000 +++ osm2pgsql-0.86.0/tests/test_multipolygon_postdiff.osm 2014-10-25 06:42:27.000000000 +0000 @@ -0,0 +1,987 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -Nru osm2pgsql-0.82.0/wildcmp.h osm2pgsql-0.86.0/wildcmp.h --- osm2pgsql-0.82.0/wildcmp.h 2013-04-15 07:06:03.000000000 +0000 +++ osm2pgsql-0.86.0/wildcmp.h 2014-10-25 06:42:27.000000000 +0000 @@ -1,5 +1,10 @@ +#ifndef WILDCMP_H +#define WILDCMP_H + #define NO_MATCH 0 #define FULL_MATCH 1 #define WC_MATCH 2 int wildMatch(char *wildCard, char *string); + +#endif