diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..9babcda3 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,59 @@ +## +# tegola/osm +# +# This creates an Ubuntu derived base image and installs the necessary +# dependencies for running osm import and updates for tegola. + +FROM ubuntu:bionic + +# Set the session to noninteractive. Only applies for life of dockerfile. +ARG DEBIAN_FRONTEND=noninteractive + +# Install basic dependencies +RUN apt-get update -y && \ + apt-get install -y \ + apt-utils \ + software-properties-common \ + unzip \ + curl \ + wget + +# Install Java runtime +RUN apt-get update -y && \ + apt-get install -y default-jre + +# Install gdal/ogr v2.1.3 from the ubuntugis ppa +RUN add-apt-repository ppa:ubuntugis/ubuntugis-unstable && \ + apt-get update -y && \ + apt-get install -y \ + gdal-bin + +# Install psql (postgres client) and postgis (contains shp2pgsql) from the postgresql repo +RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \ + add-apt-repository "deb http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -sc)-pgdg main" && \ + apt-get update -y && \ + apt-get install -y \ + postgresql-client-11 \ + postgis + +# Install osmosis +RUN mkdir -p /usr/local/bin/osmosis-src && \ + wget --quiet -O - https://bretth.dev.openstreetmap.org/osmosis-build/osmosis-latest.tgz | tar -xz -C /usr/local/bin/osmosis-src && \ + ln -s /usr/local/bin/osmosis-src/bin/osmosis /usr/local/bin/osmosis + +# Install imposm +RUN mkdir -p /usr/local/bin/imposm-src && \ + wget --quiet -O - https://github.com/omniscale/imposm3/releases/download/v0.6.0-alpha.4/imposm-0.6.0-alpha.4-linux-x86-64.tar.gz | tar -xz -C /usr/local/bin/imposm-src && \ + ln -s /usr/local/bin/imposm-src/imposm-0.6.0-alpha.4-linux-x86-64/imposm /usr/local/bin/imposm && \ + ln -s /usr/local/bin/imposm-src/imposm-0.6.0-alpha.4-linux-x86-64/lib/* /usr/lib/ + +# Install additional packages +RUN apt-get update -y && \ + apt-get install -y \ + jq + +# Install the local scripts +COPY scripts/osm_imposm_import.sh /usr/local/bin/osm_imposm_import.sh +COPY scripts/osm_imposm_update.sh /usr/local/bin/osm_imposm_update.sh +COPY scripts/osm_land_import.sh /usr/local/bin/osm_land_import.sh +COPY scripts/ne_import.sh /usr/local/bin/ne_import.sh \ No newline at end of file diff --git a/README.md b/README.md index 8d570371..09652e52 100644 --- a/README.md +++ b/README.md @@ -1,60 +1,200 @@ # Tegola OSM -This repo houses instructions and configuration files to aid with standing up an OpenStreetMap export and Natural Earth dataset into a PostGIS enabled database that uses [tegola](https://github.com/terranodo/tegola) for creating and serving vector tiles. +This repo houses instructions, configuration files, and a docker container to aid with standing up an OpenStreetMap export and Natural Earth dataset into a PostGIS enabled database that uses [tegola](https://github.com/terranodo/tegola) for creating and serving vector tiles. -## Repo config files +## Dependencies -- imposm3.json - an [imposm3](https://github.com/omniscale/imposm3) mapping file for the OSM PBF file. -- tegola.toml - a [tegola](https://github.com/terranodo/tegola) configuration file for the OSM import produced by imposm3. +* [Docker](https://www.docker.com) +* Postgres server with [PostGIS](http://www.postgis.net) enabled +* tegola ([download](https://github.com/terranodo/tegola/releases)) -## Dependencies +Additional dependencies if not using docker: + +* imposm ([download](https://imposm.org/static/rel/) - linux only) +* [gdal](http://www.gdal.org/) - required for Natural Earth import + +## Example Config + +* `config/osm-imposm-mapping.json`: An [imposm](https://github.com/omniscale/imposm3) mapping file for mapping the osm pbf data to postgis. +* `config/osm-imposm-config.json`: An [imposm](https://github.com/omniscale/imposm3) config file for replication and srid settings. Note that imposm config properties such as the db connection string, cache and diff directories are not specified via the config but are configured as env vars to docker instead. +* `tegola-osm.toml` - A [tegola](https://github.com/terranodo/tegola) configuration file for the OSM import produced by imposm and osm land. +* `tegola-natural-earth.toml` - A [tegola](https://github.com/terranodo/tegola) configuration file for the natural earth data import. -- Postgres server with [PostGIS](http://www.postgis.net) enabled. -- imposm3 ([download](https://imposm.org/static/rel/) - linux only) -- tegola ([download](https://github.com/terranodo/tegola/releases)) -- [gdal](http://www.gdal.org/) - required for Natural Earth import +## Docker Setup -## Download the OSM planet database in PBF format +To build the container run the following command from the root directory of this repo: ```bash -curl -O https://planet.openstreetmap.org/pbf/planet-latest.osm.pbf +docker build -t tegola-osm . ``` -## Import the OSM export into PostGIS using imposm3 +The container can also be pulled down from dockerhub. + +## Available Scripts / Docker Commands + +Each of the bash scripts is installed into the docker container and available as a global command. The scripts could also be run outside of a docker container if you meet the dependencies. The scripts all expect certain env vars to be set. + +### scripts/osm_imposm_import.sh + +Imports an OSM PBF file into a PostgreSQL/PostGIS instance using imposm `import` command. You are required to provide an imposm-cache and imposm-diff folder to which imposm will persist data. Those same directories must then be used for the osm update script. If you run an import against an existing db and cache/diff folders the existing data will be squashed. + +#### Required Env Vars + +* `DB_HOST`: PostgreSQL database host name +* `DB_PORT`: PostgreSQL database port +* `DB_NAME`: PostgreSQL database name +* `DB_USER`: PostgreSQL database username +* `DB_PW`: PostgreSQL database password +* `OSM_SOURCE_PBF`: Full path to the osm pbf file that we will import. Eg: `/osm_data/data/north-america-latest.osm.pbf` +* `IMPOSM_CONFIG`: Full path to the imposm config file we want to use. Eg: `/osm_data/config/osm-imposm-config.json` +* `IMPOSM_MAPPING`: Full path to the imposm mapping file we want to use. Eg: `/osm_data/config/osm-imposm-mapping.json` +* `IMPOSM_CACHE_DIR`: Full path to an empty writeable directory where imposm will persist a cache of the import. Eg: `/osm_data/cache` +* `IMPOSM_DIFF_DIR`: Full path to an empty writeable directory where imposm will persist diffs for the import. Eg: `/osm_data/cache` + +#### Example Docker Command + +All of our data, config files, and directories remain on the host and are exposed to the docker container through bind mounts. We then set the appropriate env vars on the container and reference the internal mount point of each object. This way imposm can not only read data and config dynamically from the host, but it can also persist cache/diff data back. ```bash -./imposm3 import -connection postgis://username:password@host/database-name -mapping imposm3.json -read /path/to/osm/planet-latest.osm.pbf -write -./imposm3 import -connection postgis://username:password@host/database-name -mapping imposm3.json -deployproduction +docker run -i --rm \ + -u "${UID}" \ + --mount "type=bind,source=/osm_data/data/north-america-latest.osm.pbf,target=/osm/data/north-america-latest.osm.pbf" \ + --mount "type=bind,source=/osm_data/config/osm-imposm-config.json,target=/osm/config/imposm-config.json" \ + --mount "type=bind,source=/osm_data/config/osm-imposm-mapping.json,target=/osm/config/imposm-mapping.json" \ + --mount "type=bind,source=/osm_data/cache,target=/osm/cache" \ + --mount "type=bind,source=/osm_data/diff,target=/osm/diff" \ + -e "DB_HOST=localhost" \ + -e "DB_PORT=5432" \ + -e "DB_NAME=osm_data" \ + -e "DB_USER=postgres" \ + -e "DB_PW=postgres" \ + -e "OSM_SOURCE_PBF=/osm/data/north-america-latest.osm.pbf" \ + -e "IMPOSM_CONFIG=/osm/config/imposm-config.json" \ + -e "IMPOSM_MAPPING=/osm/config/imposm-mapping.json" \ + -e "IMPOSM_CACHE_DIR=/osm/cache" \ + -e "IMPOSM_DIFF_DIR=/osm/diff" \ + "tegola-osm:latest" \ + bash -c "osm_import.sh" ``` -## Import the OSM Land and Natural Earth dataset (requires gdal, Natural Earth can be skipped if you're only interested in OSM) +### scripts/osm_imposm_update.sh -### Option 1: Embed Credentials -Update the database credentials inside of `natural_earth.sh` and `osm_land.sh`, then run each file: `./natural_earth.sh && ./osm_land.sh`. This will download the natural earth and osm land datasets and insert it into PostGIS under a database named `natural_earth` and `osm` respectively. +Updates an existing PostgreSQL/PostGIS instance using imposm `diff` command. In this case we also use osmosis to generate a single changes list which we pass to imposm `diff`. This method is robust and supports updates from different data sources and data subsets across any replication interval. You are required to provide the imposm-cache and imposm-diff folder which was populated during the initial import and prior updates. Additionally, updates require a working directory for osmosis which may or may not be persisted. + +#### Required Env Vars + +* `DB_HOST`: PostgreSQL database host name +* `DB_PORT`: PostgreSQL database port +* `DB_NAME`: PostgreSQL database name +* `DB_USER`: PostgreSQL database username +* `DB_PW`: PostgreSQL database password +* `OSMOSIS_DIR`: Full path to a working directory for osmosis to create changes lists. Eg: `/osm_data/osmosis` +* `IMPOSM_CONFIG`: Full path to the imposm config file we want to use. Eg: `/osm_data/config/osm-imposm-config.json` +* `IMPOSM_MAPPING`: Full path to the imposm mapping file we want to use. Eg: `/osm_data/config/osm-imposm-mapping.json` +* `IMPOSM_CACHE_DIR`: Full path to an empty writeable directory where imposm will persist a cache of the import. Eg: `/osm_data/cache` +* `IMPOSM_DIFF_DIR`: Full path to an empty writeable directory where imposm will persist diffs for the import. Eg: `/osm_data/cache` + +#### Example Docker Command -### Option 2: Create a dbcredentials.sh file -Create a `dbcredentials.sh` file which will be shared with the `osm_land` script. This option is ideal for when the `natural_earth` and `osm` databases will reside on the same database server, and will use the same credentials. Ensure that the following variables are defined in your file: ```bash -DB_HOST="mydbhost" -DB_PORT="myport" -DB_USER="myuser" -DB_PW="mypassword" +docker run -i --rm \ + -u "${UID}" \ + --mount "type=bind,source=/osm_data/osmosis,target=/osm/osmosis" \ + --mount "type=bind,source=/osm_data/config/osm-imposm-config.json,target=/osm/config/imposm-config.json" \ + --mount "type=bind,source=/osm_data/config/osm-imposm-mapping.json,target=/osm/config/imposm-mapping.json" \ + --mount "type=bind,source=/osm_data/cache,target=/osm/cache" \ + --mount "type=bind,source=/osm_data/diff,target=/osm/diff" \ + -e "DB_HOST=localhost" \ + -e "DB_PORT=5432" \ + -e "DB_NAME=osm_data" \ + -e "DB_USER=postgres" \ + -e "DB_PW=postgres" \ + -e "OSMOSIS_DIR=/osm/osmosis" \ + -e "IMPOSM_CONFIG=/osm/config/imposm-config.json" \ + -e "IMPOSM_MAPPING=/osm/config/imposm-mapping.json" \ + -e "IMPOSM_CACHE_DIR=/osm/cache" \ + -e "IMPOSM_DIFF_DIR=/osm/diff" \ + "tegola-osm:latest" \ + bash -c "osm_import.sh" ``` -Once you have configured the `dbcredentials.sh` file, run the scripts as above: `./natural_earth.sh && ./osm_land.sh` -### Option 3: -Create separate configuration files in the same pattern as the above `dbcredentials.sh` file and pass the path to the config file using the `-c` option. This is ideal if you have two different servers for the databases. Ensure the file you create follows this format: +### scripts/ne_import.sh + +Imports the Natural Earth data into the specified database. The import process will drop and recreate existing tables. Data is reprojected to 3857 on import but can be changed. + +**Note:** If you are looking for the old `natural_earth.sh` script it can be found in `scripts/deprecated/`. + +#### Command Args + +* `-d` Setting this flag will force the entire database to be dropped and recreated. +* `-s SRID` Setting this will reproject that data to the specified EPSG:SRID during import. +* `-v` Verbose mode + +#### Required Env Vars + +* `DB_HOST`: PostgreSQL database host name +* `DB_PORT`: PostgreSQL database port +* `DB_NAME`: PostgreSQL database name +* `DB_USER`: PostgreSQL database username +* `DB_PW`: PostgreSQL database password +* `NE_MAPPING`: Full path to the json mapping file we want to use. Eg: `/ne/config/ne-mapping.json` +* `TEMP_DATA_DIR`: Full path to the directory where this script will temporarily store data. Script user must have permissions to read/write/delete from this directory. Eg: `/tmp` + +#### Example Docker Command + ```bash -DB_NAME="mydb" -DB_HOST="mydbhost" -DB_PORT="myport" -DB_USER="myuser" -DB_PW="mypassword" +docker run -i --rm \ + -u "${UID}" \ + --mount "type=bind,source=/ne_data/config/ne-mapping.json,target=/ne/mapping.json" \ + --mount "type=bind,source=/tmp,target=/ne/temp" \ + -e "DB_HOST=localhost" \ + -e "DB_PORT=5432" \ + -e "DB_NAME=ne_data" \ + -e "DB_USER=postgres" \ + -e "DB_PW=postgres" \ + -e "NE_MAPPING=/ne/mapping.json" \ + -e "TEMP_DATA_DIR=/ne/temp" \ + "tegola-osm:latest" \ + bash -c "ne_import.sh" ``` -Once you have configured the files, run the scripts with the `-c` flag and provide the path to the credentials file, ie: `./natural_earth.sh -c natural_earth_creds.sh && ./osm_land.sh -c osm_creds.sh` -### Usage: -Both scripts support a `-v` flag for debugging. `natural_earth.sh` also supports a `-d` flag, which will drop the existing natural earth database prior to import if set. Since the `osm_land.sh` imports into a database shared with other data, it lacks this functionality. Instead, only the relevent tables are dropped. +### scripts/osm_land_import.sh + +Imports the OSM land polygons data into the specified database. The import process will drop and recreate existing tables. Also, you will want to write to the same DB as the main OSM data if you are using the sample `tegola-osm.toml` config. + +**Note:** If you are looking for the old `osm_land.sh` script it can be found in `scripts/deprecated/`. + +#### Command Args + +* `-v` Verbose mode + +#### Required Env Vars + +* `DB_HOST`: PostgreSQL database host name +* `DB_PORT`: PostgreSQL database port +* `DB_NAME`: PostgreSQL database name +* `DB_USER`: PostgreSQL database username +* `DB_PW`: PostgreSQL database password +* `OSM_LAND_MAPPING`: Full path to the json mapping file we want to use. Eg: `/osm/config/osm-land-mapping.json` +* `TEMP_DATA_DIR`: Full path to the directory where this script will temporarily store data. Script user must have permissions to read/write/delete from this directory. Eg: `/tmp` + +#### Example Docker Command + +```bash +docker run -i --rm \ + -u "${UID}" \ + --mount "type=bind,source=/osm_data/config/osm-land-mapping.json,target=/osm/config/osm-land-mapping.json" \ + --mount "type=bind,source=/tmp,target=/osm/temp" \ + -e "DB_HOST=localhost" \ + -e "DB_PORT=5432" \ + -e "DB_NAME=ne_data" \ + -e "DB_USER=postgres" \ + -e "DB_PW=postgres" \ + -e "OSM_LAND_MAPPING=/ne/mapping.json" \ + -e "TEMP_DATA_DIR=/ne/temp" \ + "tegola-osm:latest" \ + bash -c "osm_land_import.sh" +``` ## Install SQL helper functions Execute `postgis_helpers.sql` against your OSM database. Currently this contains a single utility function for converting building heights from strings to numbers which is important if you want to extrude buildings for the 3d effect. @@ -72,13 +212,13 @@ psql -U tegola -d database-name -a -f postgis_index.sql ## Launch tegola ```bash -./tegola -config=tegola.toml +./tegola -config=tegola-osm.toml ``` Open your browser to localhost and the port you configured tegola to run on (i.e. localhost:8080) to see the built in viewer. ## Data Layers -To view these data layers in a map and query the features for a better understanding of each data layer, use the [Tegola-OSM Inspector](https://osm.tegola.io). The data layers described here are in the "Tegola-OSM" database as laid out in the tegola.toml (i.e., not the Natural Earth database that is specified in tegola-natural-earth.toml). +To view these data layers in a map and query the features for a better understanding of each data layer, use the [Tegola-OSM Inspector](https://osm.tegola.io). The data layers described here are in the "Tegola-OSM" database as laid out in the tegola-osm.toml (i.e., not the Natural Earth database that is specified in tegola-natural-earth.toml). | source | Description | |--------|-------------| diff --git a/config/ne-mapping.json b/config/ne-mapping.json new file mode 100644 index 00000000..5cd7ad41 --- /dev/null +++ b/config/ne-mapping.json @@ -0,0 +1,342 @@ +[ + { + "layer":"ne_110m_admin_0_boundary_lines_land", + "theme":"cultural", + "zoom":"110m" + }, + { + "layer":"ne_110m_admin_0_countries", + "theme":"cultural", + "zoom":"110m" + }, + { + "layer":"ne_110m_admin_1_states_provinces_lines", + "theme":"cultural", + "zoom":"110m" + }, + { + "layer":"ne_110m_populated_places", + "theme":"cultural", + "zoom":"110m" + }, + { + "layer":"ne_110m_coastline", + "theme":"physical", + "zoom":"110m" + }, + { + "layer":"ne_110m_geography_marine_polys", + "theme":"physical", + "zoom":"110m" + }, + { + "layer":"ne_110m_geography_regions_polys", + "theme":"physical", + "zoom":"110m" + }, + { + "layer":"ne_110m_rivers_lake_centerlines", + "theme":"physical", + "zoom":"110m" + }, + { + "layer":"ne_110m_lakes", + "theme":"physical", + "zoom":"110m" + }, + { + "layer":"ne_110m_glaciated_areas", + "theme":"physical", + "zoom":"110m" + }, + { + "layer":"ne_110m_land", + "theme":"physical", + "zoom":"110m" + }, + { + "layer":"ne_110m_ocean", + "theme":"physical", + "zoom":"110m" + }, + { + "layer":"ne_50m_admin_0_boundary_lines_land", + "theme":"cultural", + "zoom":"50m" + }, + { + "layer":"ne_50m_admin_0_boundary_lines_disputed_areas", + "theme":"cultural", + "zoom":"50m" + }, + { + "layer":"ne_50m_admin_0_boundary_lines_maritime_indicator", + "theme":"cultural", + "zoom":"50m" + }, + { + "layer":"ne_50m_admin_0_countries", + "theme":"cultural", + "zoom":"50m" + }, + { + "layer":"ne_50m_admin_0_map_subunits", + "theme":"cultural", + "zoom":"50m" + }, + { + "layer":"ne_50m_admin_1_states_provinces_lakes", + "theme":"cultural", + "zoom":"50m" + }, + { + "layer":"ne_50m_admin_1_states_provinces_lines", + "theme":"cultural", + "zoom":"50m" + }, + { + "layer":"ne_50m_populated_places", + "theme":"cultural", + "zoom":"50m" + }, + { + "layer":"ne_50m_geographic_lines", + "theme":"physical", + "zoom":"50m" + }, + { + "layer":"ne_50m_coastline", + "theme":"physical", + "zoom":"50m" + }, + { + "layer":"ne_50m_antarctic_ice_shelves_lines", + "theme":"physical", + "zoom":"50m" + }, + { + "layer":"ne_50m_antarctic_ice_shelves_polys", + "theme":"physical", + "zoom":"50m" + }, + { + "layer":"ne_50m_geography_marine_polys", + "theme":"physical", + "zoom":"50m" + }, + { + "layer":"ne_50m_geography_regions_elevation_points", + "theme":"physical", + "zoom":"50m" + }, + { + "layer":"ne_50m_geography_regions_polys", + "theme":"physical", + "zoom":"50m" + }, + { + "layer":"ne_50m_rivers_lake_centerlines_scale_rank", + "theme":"physical", + "zoom":"50m" + }, + { + "layer":"ne_50m_rivers_lake_centerlines", + "theme":"physical", + "zoom":"50m" + }, + { + "layer":"ne_50m_lakes", + "theme":"physical", + "zoom":"50m" + }, + { + "layer":"ne_50m_glaciated_areas", + "theme":"physical", + "zoom":"50m" + }, + { + "layer":"ne_50m_land", + "theme":"physical", + "zoom":"50m" + }, + { + "layer":"ne_50m_ocean", + "theme":"physical", + "zoom":"50m" + }, + { + "layer":"ne_10m_admin_0_boundary_lines_land", + "theme":"cultural", + "zoom":"10m" + }, + { + "layer":"ne_10m_admin_0_boundary_lines_disputed_areas", + "theme":"cultural", + "zoom":"10m" + }, + { + "layer":"ne_10m_parks_and_protected_lands", + "theme":"cultural", + "zoom":"10m" + }, + { + "layer":"ne_10m_admin_0_boundary_lines_map_units", + "theme":"cultural", + "zoom":"10m" + }, + { + "layer":"ne_10m_admin_0_boundary_lines_maritime_indicator", + "theme":"cultural", + "zoom":"10m" + }, + { + "layer":"ne_10m_admin_0_label_points", + "theme":"cultural", + "zoom":"10m" + }, + { + "layer":"ne_10m_admin_0_countries", + "theme":"cultural", + "zoom":"10m" + }, + { + "layer":"ne_10m_admin_0_map_subunits", + "theme":"cultural", + "zoom":"10m" + }, + { + "layer":"ne_10m_admin_1_label_points", + "theme":"cultural", + "zoom":"10m" + }, + { + "layer":"ne_10m_admin_1_states_provinces_lines", + "theme":"cultural", + "zoom":"10m" + }, + { + "layer":"ne_10m_populated_places", + "theme":"cultural", + "zoom":"10m" + }, + { + "layer":"ne_10m_roads", + "theme":"cultural", + "zoom":"10m" + }, + { + "layer":"ne_10m_urban_areas", + "theme":"cultural", + "zoom":"10m" + }, + { + "layer":"ne_10m_geographic_lines", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_coastline", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_antarctic_ice_shelves_lines", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_antarctic_ice_shelves_polys", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_geography_marine_polys", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_geography_regions_elevation_points", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_geography_regions_points", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_geography_regions_polys", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_rivers_north_america", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_rivers_europe", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_rivers_lake_centerlines_scale_rank", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_rivers_lake_centerlines", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_playas", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_reefs", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_lakes_historic", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_lakes_north_america", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_lakes_europe", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_lakes", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_glaciated_areas", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_land", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_minor_islands", + "theme":"physical", + "zoom":"10m" + }, + { + "layer":"ne_10m_ocean", + "theme":"physical", + "zoom":"10m" + }, +] \ No newline at end of file diff --git a/config/osm-imposm-config.json b/config/osm-imposm-config.json new file mode 100644 index 00000000..82fd1426 --- /dev/null +++ b/config/osm-imposm-config.json @@ -0,0 +1,5 @@ +{ + "replication_url": "http://download.geofabrik.de/north-america-updates/", + "replication_interval": "24h", + "srid": 3857 +} \ No newline at end of file diff --git a/imposm3.json b/config/osm-imposm-mapping.json similarity index 100% rename from imposm3.json rename to config/osm-imposm-mapping.json index bbe25b85..8135ac59 100644 --- a/imposm3.json +++ b/config/osm-imposm-mapping.json @@ -8,16 +8,16 @@ ] }, "generalized_tables": { - "water_areas_gen1": { - "source": "water_areas", - "sql_filter": "ST_Area(geometry)>50000.000000", - "tolerance": 50.0 - }, "water_areas_gen0": { "source": "water_areas_gen1", "sql_filter": "ST_Area(geometry)>500000.000000", "tolerance": 200.0 }, + "water_areas_gen1": { + "source": "water_areas", + "sql_filter": "ST_Area(geometry)>50000.000000", + "tolerance": 50.0 + }, "transport_lines_gen0": { "source": "transport_lines_gen1", "sql_filter": null, @@ -38,15 +38,15 @@ "sql_filter": null, "tolerance": 50.0 }, - "landuse_areas_gen1": { - "source": "landuse_areas", - "sql_filter": "ST_Area(geometry)>50000.000000", - "tolerance": 50.0 - }, "landuse_areas_gen0": { "source": "landuse_areas_gen1", "sql_filter": "ST_Area(geometry)>500000.000000", "tolerance": 200.0 + }, + "landuse_areas_gen1": { + "source": "landuse_areas", + "sql_filter": "ST_Area(geometry)>50000.000000", + "tolerance": 50.0 } }, "tables": { diff --git a/config/osm-land-mapping.json b/config/osm-land-mapping.json new file mode 100644 index 00000000..42bec770 --- /dev/null +++ b/config/osm-land-mapping.json @@ -0,0 +1,12 @@ +[ + { + "layer":"osm_land", + "filename":"land-polygons-split-3857.zip", + "srid":"3857" + }, + { + "layer":"osm_land_gen0", + "filename":"simplified-land-polygons-complete-3857.zip", + "srid":"3857" + } +] \ No newline at end of file diff --git a/tegola-natural-earth.toml b/config/tegola-natural-earth.toml similarity index 100% rename from tegola-natural-earth.toml rename to config/tegola-natural-earth.toml diff --git a/tegola.toml b/config/tegola-osm.toml similarity index 100% rename from tegola.toml rename to config/tegola-osm.toml diff --git a/natural_earth.sh b/natural_earth.sh deleted file mode 100755 index 49a6f51b..00000000 --- a/natural_earth.sh +++ /dev/null @@ -1,155 +0,0 @@ -#!/bin/bash - -# This script will install natural earth data (http://www.naturalearthdata.com/downloads/) into a PostGIS database named DB_NAME. -# The script assumes the following utilities are installed: -# - psql: PostgreSQL client -# - ogr2ogr: GDAL vector lib -# - unzip: decompression util -# -# Usage -# Set the database connection variables, then run -# -# ./natural_earth.sh -# -# Important -# - This script is idempotent and will DROP the natural earth database if it already exists -# - In order for this script to work the DB_USER must have access to the 'postgres' database to create a new database - - -set -e - -CONFIG_FILE='' -DROP_DB=false - -while getopts ":c:dv" flag; do - case ${flag} in - c) - if [[ ! -r $OPTARG ]]; then echo "Config File $OPTARG Not Found!"; exit 2; - else echo "Using config file: $OPTARG"; CONFIG_FILE=$OPTARG - fi ;; - v) - echo "Running in Verbose Mode" - set -x ;; - d) - echo "Dropping Existing DB"; DROP_DB=true ;; - \?) - printf '\nUnrecognized option: -%s \nUsage: \n[-c file] Path to Config File \n[-d] drop existing database \n[-v] verbose\n' $OPTARG; exit 2 ;; - :) - echo "Option -$OPTARG requires an argument"; exit 2 ;; - esac -done - -# database connection variables -DB_NAME="natural_earth" -DB_HOST="" -DB_PORT="" -DB_USER="" -DB_PW="" - -# Check if we're using a config file -if [[ -r $CONFIG_FILE ]]; then source $CONFIG_FILE -elif [ -r dbcredentials.sh ]; then source dbcredentials.sh -fi - -# check our connection string before we do any downloading -psql "dbname='postgres' host='$DB_HOST' port='$DB_PORT' user='$DB_USER' password='$DB_PW'" -c "\q" - -# array of natural earth dataset URLs - dataurls=( - "http://naciscdn.org/naturalearth/110m/cultural/ne_110m_admin_0_boundary_lines_land.zip" - "http://naciscdn.org/naturalearth/110m/cultural/ne_110m_admin_0_countries.zip" - "http://naciscdn.org/naturalearth/110m/cultural/ne_110m_admin_1_states_provinces_lines.zip" - "http://naciscdn.org/naturalearth/110m/cultural/ne_110m_populated_places.zip" - "http://naciscdn.org/naturalearth/110m/physical/ne_110m_coastline.zip" - "http://naciscdn.org/naturalearth/110m/physical/ne_110m_geography_marine_polys.zip" - "http://naciscdn.org/naturalearth/110m/physical/ne_110m_geography_regions_polys.zip" - "http://naciscdn.org/naturalearth/110m/physical/ne_110m_rivers_lake_centerlines.zip" - "http://naciscdn.org/naturalearth/110m/physical/ne_110m_lakes.zip" - "http://naciscdn.org/naturalearth/110m/physical/ne_110m_glaciated_areas.zip" - "http://naciscdn.org/naturalearth/110m/physical/ne_110m_land.zip" - "http://naciscdn.org/naturalearth/110m/physical/ne_110m_ocean.zip" - "http://naciscdn.org/naturalearth/50m/cultural/ne_50m_admin_0_boundary_lines_land.zip" - "http://naciscdn.org/naturalearth/50m/cultural/ne_50m_admin_0_boundary_lines_disputed_areas.zip" - "http://naciscdn.org/naturalearth/50m/cultural/ne_50m_admin_0_boundary_lines_maritime_indicator.zip" - "http://naciscdn.org/naturalearth/50m/cultural/ne_50m_admin_0_countries.zip" - "http://naciscdn.org/naturalearth/50m/cultural/ne_50m_admin_0_map_subunits.zip" - "http://naciscdn.org/naturalearth/50m/cultural/ne_50m_admin_1_states_provinces_lakes.zip" - "http://naciscdn.org/naturalearth/50m/cultural/ne_50m_admin_1_states_provinces_lines.zip" - "http://naciscdn.org/naturalearth/50m/cultural/ne_50m_populated_places.zip" - "http://naciscdn.org/naturalearth/50m/physical/ne_50m_geographic_lines.zip" - "http://naciscdn.org/naturalearth/50m/physical/ne_50m_coastline.zip" - "http://naciscdn.org/naturalearth/50m/physical/ne_50m_antarctic_ice_shelves_lines.zip" - "http://naciscdn.org/naturalearth/50m/physical/ne_50m_antarctic_ice_shelves_polys.zip" - "http://naciscdn.org/naturalearth/50m/physical/ne_50m_geography_marine_polys.zip" - "http://naciscdn.org/naturalearth/50m/physical/ne_50m_geography_regions_elevation_points.zip" - "http://naciscdn.org/naturalearth/50m/physical/ne_50m_geography_regions_polys.zip" - "http://naciscdn.org/naturalearth/50m/physical/ne_50m_rivers_lake_centerlines_scale_rank.zip" - "http://naciscdn.org/naturalearth/50m/physical/ne_50m_rivers_lake_centerlines.zip" - "http://naciscdn.org/naturalearth/50m/physical/ne_50m_lakes.zip" - "http://naciscdn.org/naturalearth/50m/physical/ne_50m_glaciated_areas.zip" - "http://naciscdn.org/naturalearth/50m/physical/ne_50m_land.zip" - "http://naciscdn.org/naturalearth/50m/physical/ne_50m_ocean.zip" - "http://naciscdn.org/naturalearth/10m/cultural/ne_10m_admin_0_boundary_lines_land.zip" - "http://naciscdn.org/naturalearth/10m/cultural/ne_10m_admin_0_boundary_lines_disputed_areas.zip" - "http://naciscdn.org/naturalearth/10m/cultural/ne_10m_parks_and_protected_lands.zip" - "http://naciscdn.org/naturalearth/10m/cultural/ne_10m_admin_0_boundary_lines_map_units.zip" - "http://naciscdn.org/naturalearth/10m/cultural/ne_10m_admin_0_boundary_lines_maritime_indicator.zip" - "http://naciscdn.org/naturalearth/10m/cultural/ne_10m_admin_0_label_points.zip" - "http://naciscdn.org/naturalearth/10m/cultural/ne_10m_admin_0_countries.zip" - "http://naciscdn.org/naturalearth/10m/cultural/ne_10m_admin_0_map_subunits.zip" - "http://naciscdn.org/naturalearth/10m/cultural/ne_10m_admin_1_label_points.zip" - "http://naciscdn.org/naturalearth/10m/cultural/ne_10m_admin_1_states_provinces_lines.zip" - "http://naciscdn.org/naturalearth/10m/cultural/ne_10m_populated_places.zip" - "http://naciscdn.org/naturalearth/10m/cultural/ne_10m_roads.zip" - "http://naciscdn.org/naturalearth/10m/cultural/ne_10m_urban_areas.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_geographic_lines.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_coastline.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_antarctic_ice_shelves_lines.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_antarctic_ice_shelves_polys.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_geography_marine_polys.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_geography_regions_elevation_points.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_geography_regions_points.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_geography_regions_polys.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_rivers_north_america.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_rivers_europe.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_rivers_lake_centerlines_scale_rank.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_rivers_lake_centerlines.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_playas.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_reefs.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_lakes_historic.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_lakes_north_america.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_lakes_europe.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_lakes.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_glaciated_areas.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_land.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_minor_islands.zip" - "http://naciscdn.org/naturalearth/10m/physical/ne_10m_ocean.zip" -) - -# remove old database if -d flag is set and create a new one -if [[ "$DROP_DB" = true ]]; -then - psql "dbname='postgres' host='$DB_HOST' port='$DB_PORT' user='$DB_USER' password='$DB_PW'" -c "DROP DATABASE IF EXISTS $DB_NAME" - psql "dbname='postgres' host='$DB_HOST' port='$DB_PORT' user='$DB_USER' password='$DB_PW'" -c "CREATE DATABASE $DB_NAME" -fi - -# Create postgis extension if it doesn't exist -psql "dbname='$DB_NAME' host='$DB_HOST' port='$DB_PORT' user='$DB_USER' password='$DB_PW'" -c "CREATE EXTENSION IF NOT EXISTS postgis" - -# iterate our dataurls -for i in "${!dataurls[@]}"; do - url=${dataurls[$i]} - - echo "fetching $url"; - curl $url > $i.zip; - unzip $i -d $i - - # support for archives with more than one shapefile - for f in $i/*.shp; do - # reproject data to webmercator (3857) and insert into our database - OGR_ENABLE_PARTIAL_REPROJECTION=true ogr2ogr -unsetFieldWidth -t_srs EPSG:3857 -nlt PROMOTE_TO_MULTI -f PostgreSQL PG:"dbname='$DB_NAME' host='$DB_HOST' port='$DB_PORT' user='$DB_USER' password='$DB_PW'" $f - done - - # clean up - rm -rf $i/ $i.zip -done diff --git a/scripts/deprecated/README.md b/scripts/deprecated/README.md new file mode 100644 index 00000000..9edb46a9 --- /dev/null +++ b/scripts/deprecated/README.md @@ -0,0 +1,32 @@ +# Deprecated Scripts + +The scripts in this folder are deprecated and are not included in the docker container. They have been kept intact here with their instructions for backwards compatibility and to help with transitioning to the new docker compatible scripts. + +## Import the OSM Land and Natural Earth dataset (requires gdal, Natural Earth can be skipped if you're only interested in OSM) + +### Option 1: Embed Credentials +Update the database credentials inside of `natural_earth.sh` and `osm_land.sh`, then run each file: `./natural_earth.sh && ./osm_land.sh`. This will download the natural earth and osm land datasets and insert it into PostGIS under a database named `natural_earth` and `osm` respectively. + +### Option 2: Create a dbcredentials.sh file +Create a `dbcredentials.sh` file which will be shared with the `osm_land` script. This option is ideal for when the `natural_earth` and `osm` databases will reside on the same database server, and will use the same credentials. Ensure that the following variables are defined in your file: +```bash +DB_HOST="mydbhost" +DB_PORT="myport" +DB_USER="myuser" +DB_PW="mypassword" +``` +Once you have configured the `dbcredentials.sh` file, run the scripts as above: `./natural_earth.sh && ./osm_land.sh` + +### Option 3: +Create separate configuration files in the same pattern as the above `dbcredentials.sh` file and pass the path to the config file using the `-c` option. This is ideal if you have two different servers for the databases. Ensure the file you create follows this format: +```bash +DB_NAME="mydb" +DB_HOST="mydbhost" +DB_PORT="myport" +DB_USER="myuser" +DB_PW="mypassword" +``` +Once you have configured the files, run the scripts with the `-c` flag and provide the path to the credentials file, ie: `./natural_earth.sh -c natural_earth_creds.sh && ./osm_land.sh -c osm_creds.sh` + +### Usage: +Both scripts support a `-v` flag for debugging. `natural_earth.sh` also supports a `-d` flag, which will drop the existing natural earth database prior to import if set. Since the `osm_land.sh` imports into a database shared with other data, it lacks this functionality. Instead, only the relevent tables are dropped. \ No newline at end of file diff --git a/osm_land.sh b/scripts/deprecated/osm_land_import.sh similarity index 100% rename from osm_land.sh rename to scripts/deprecated/osm_land_import.sh diff --git a/scripts/ne_import.sh b/scripts/ne_import.sh new file mode 100755 index 00000000..939d4e3c --- /dev/null +++ b/scripts/ne_import.sh @@ -0,0 +1,141 @@ +#!/bin/bash + +# This script will install natural earth data (http://www.naturalearthdata.com/downloads/) into a PostGIS database named DB_NAME. +# +# The script assumes the following utilities are installed: +# - psql: PostgreSQL client +# - ogr2ogr: GDAL vector lib +# - unzip: decompression util +# - jq: json parsing util +# +# Usage +# Set the required env vars, then run +# +# ./ne_import.sh +# +# Important +# - This script is idempotent and will drop and recreate any tables if they already exist. (Specifying the -d flag will also drop and recreate the entire database.) +# - In order for this script to work the DB_USER must have access to the 'postgres' database to create a new database + +set -e + +# Validate required env vars + +# DB_HOST, DB_PORT, DB_NAME, DB_USER, DB_PW +# All db connection parameters for postgres. +if [ -z "$DB_HOST" ]; then + printf "Missing env var: DB_HOST\n" + exit 1 +fi +if [ -z "$DB_PORT" ]; then + printf "Missing env var: DB_PORT\n" + exit 1 +fi +if [ -z "$DB_NAME" ]; then + printf "Missing env var: DB_NAME\n" + exit 1 +fi +if [ -z "$DB_USER" ]; then + printf "Missing env var: DB_USER\n" + exit 1 +fi +if [ -z "$DB_PW" ]; then + printf "Missing env var: DB_PW\n" + exit 1 +fi + +# NE_MAPPING +# Full path to the ne mapping file we want to use. +# Eg: /ne/config/ne-mapping.json +if [ -z "$NE_MAPPING" ]; then + printf "Missing env var: NE_MAPPING\n" + exit 1 +fi +if [ ! -f "$NE_MAPPING" ]; then + printf "NE_MAPPING is an invalid file!\n" + exit 1 +fi + +# TEMP_DATA_DIR +# Full path to the directory where this script will temporarily store data. +# Script user must have permissions to read/write/delete from this directory. +# Eg: /tmp +if [ -z "$TEMP_DATA_DIR" ]; then + printf "Missing env var: TEMP_DATA_DIR\n" + exit 1 +fi +if [ ! -d "$TEMP_DATA_DIR" ]; then + printf "TEMP_DATA_DIR is an invalid directory!\n" + exit 1 +fi + +# Internal vars +drop_db=false +srid=3857 + +# Parse command args +while getopts ":sdv" flag; do + case ${flag} in + s) + echo "Reprojecting to SRID: $OPTARG" + srid=$OPTARG + ;; + d) + echo "Dropping Existing DB" + drop_db=true + ;; + v) + echo "Running in Verbose Mode" + set -x + ;; + \?) + printf '\nUnrecognized option: -%s \nUsage: \n[-s SRID] Reproject to SRID \n[-d] Drop existing database \n[-v] Verbose\n' $OPTARG + exit 2 + ;; + :) + echo "Option -$OPTARG requires an argument" + exit 2 + ;; + esac +done + +# check our connection string before we go any farther +psql "dbname='$DB_NAME' host='$DB_HOST' port='$DB_PORT' user='$DB_USER' password='$DB_PW'" -c "\q" + +# If -d flag is set then drop and recreate the database +if [[ "$drop_db" = true ]]; then\ + psql "dbname='postgres' host='$DB_HOST' port='$DB_PORT' user='$DB_USER' password='$DB_PW'" -v ON_ERROR_STOP=1 -c "DROP DATABASE IF EXISTS $DB_NAME" + psql "dbname='postgres' host='$DB_HOST' port='$DB_PORT' user='$DB_USER' password='$DB_PW'" -v ON_ERROR_STOP=1 -c "CREATE DATABASE $DB_NAME" +fi + +# Create postgis extension if it doesn't exist +psql "dbname='$DB_NAME' host='$DB_HOST' port='$DB_PORT' user='$DB_USER' password='$DB_PW'" -v ON_ERROR_STOP=1 -c "CREATE EXTENSION IF NOT EXISTS postgis" + +# iterate on the layers in the mapping file +LAYERS_JSON=$(cat $NE_MAPPING) +for row in $(echo "${LAYERS_JSON}" | jq -r '.[] | @base64'); do + _jq() { + echo ${row} | base64 --decode | jq -r ${1} + } + + layer=$(_jq '.layer') + url="http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/$(_jq '.zoom')/$(_jq '.theme')/${layer}.zip" + + working_dir=${TEMP_DATA_DIR}/${layer} + mkdir ${working_dir} + + echo "Fetching ${layer}.zip"; + curl -L -o "${working_dir}/${layer}.zip" "${url}" + unzip -o "${working_dir}/${layer}.zip" -d "${working_dir}" + + # support for archives with more than one shapefile + for shapefile in ${working_dir}/*.shp; do + # reproject data and insert into our database + echo "Importing ${shapefile} into DB" + OGR_ENABLE_PARTIAL_REPROJECTION=true ogr2ogr -overwrite -unsetFieldWidth -t_srs EPSG:${srid} -nln ${layer} -nlt PROMOTE_TO_MULTI -f PostgreSQL PG:"dbname='$DB_NAME' host='$DB_HOST' port='$DB_PORT' user='$DB_USER' password='$DB_PW'" "${shapefile}" + done + + # cleanup + rm -rf "$working_dir" + +done \ No newline at end of file diff --git a/scripts/osm_imposm_import.sh b/scripts/osm_imposm_import.sh new file mode 100755 index 00000000..b0ea50c6 --- /dev/null +++ b/scripts/osm_imposm_import.sh @@ -0,0 +1,98 @@ +#!/bin/bash +set -e + +# Validate required env vars + +# DB_HOST, DB_PORT, DB_NAME, DB_USER, DB_PW +# All db connection parameters for postgres. +if [ -z "$DB_HOST" ]; then + printf "Missing env var: DB_HOST\n" + exit 1 +fi +if [ -z "$DB_PORT" ]; then + printf "Missing env var: DB_PORT\n" + exit 1 +fi +if [ -z "$DB_NAME" ]; then + printf "Missing env var: DB_NAME\n" + exit 1 +fi +if [ -z "$DB_USER" ]; then + printf "Missing env var: DB_USER\n" + exit 1 +fi +if [ -z "$DB_PW" ]; then + printf "Missing env var: DB_PW\n" + exit 1 +fi + +# OSM_SOURCE_PBF +# Full path to the osm pbf file that we will import. +# Eg: /osm/data/north-america-latest.osm.pbf +if [ -z "$OSM_SOURCE_PBF" ]; then + printf "Missing env var: OSM_SOURCE_DATA\n" + exit 1 +fi +if [ ! -f "$OSM_SOURCE_PBF" ]; then + printf "OSM_SOURCE_DATA is an invalid file!\n" + exit 1 +fi + +# IMPOSM_CONFIG +# Full path to the imposm config file we want to use. +# Eg: /osm/config/imposm-config.json +if [ -z "$IMPOSM_CONFIG" ]; then + printf "Missing env var: IMPOSM_CONFIG\n" + exit 1 +fi +if [ ! -f "$IMPOSM_CONFIG" ]; then + printf "IMPOSM_CONFIG is an invalid file!\n" + exit 1 +fi + +# IMPOSM_MAPPING +# Full path to the imposm mapping file we want to use. +# Eg: /osm/config/imposm-mapping.json +if [ -z "$IMPOSM_MAPPING" ]; then + printf "Missing env var: IMPOSM_MAPPING\n" + exit 1 +fi +if [ ! -f "$IMPOSM_MAPPING" ]; then + printf "IMPOSM_MAPPING is an invalid file!\n" + exit 1 +fi + +# IMPOSM_CACHE_DIR +# Full path to an empty writeable directory where imposm will persist a cache of the import. +# Eg: /osm/cache +if [ -z "$IMPOSM_CACHE_DIR" ]; then + printf "Missing env var: IMPOSM_CACHE_DIR\n" + exit 1 +fi +if [ ! -d "$IMPOSM_CACHE_DIR" ]; then + printf "IMPOSM_CACHE_DIR is an invalid directory!\n" + exit 1 +fi + +# IMPOSM_DIFF_DIR +# Full path to an empty writeable directory where imposm will persist diffs for the import. +# Eg: /osm/cache +if [ -z "$IMPOSM_DIFF_DIR" ]; then + printf "Missing env var: IMPOSM_DIFF_DIR\n" + exit 1 +fi +if [ ! -d "$IMPOSM_DIFF_DIR" ]; then + printf "IMPOSM_DIFF_DIR is an invalid directory!\n" + exit 1 +fi + +# check our db connection before we proceed +psql "dbname='$DB_NAME' host='$DB_HOST' port='$DB_PORT' user='$DB_USER' password='$DB_PW'" -c "\q" + +conn_string="postgis://${DB_USER}:${DB_PW}@${DB_HOST}/${DB_NAME}" + +printf "Running imposm import...\n" +imposm import -config ${IMPOSM_CONFIG} -mapping ${IMPOSM_MAPPING} -connection ${conn_string} -read ${OSM_SOURCE_PBF} -write -diff -cachedir ${IMPOSM_CACHE_DIR} -overwritecache -diffdir ${IMPOSM_DIFF_DIR} + +printf "Running imposm deployproduction...\n" +imposm import -config ${IMPOSM_CONFIG} -mapping ${IMPOSM_MAPPING} -connection ${conn_string} -deployproduction \ No newline at end of file diff --git a/scripts/osm_imposm_update.sh b/scripts/osm_imposm_update.sh new file mode 100755 index 00000000..931c9057 --- /dev/null +++ b/scripts/osm_imposm_update.sh @@ -0,0 +1,106 @@ +#!/bin/bash + +# Validate required env vars + +# DB_HOST, DB_PORT, DB_NAME, DB_USER, DB_PW +# All db connection parameters for postgres. +if [ -z "$DB_HOST" ]; then + printf "Missing env var: DB_HOST\n" + exit 1 +fi +if [ -z "$DB_PORT" ]; then + printf "Missing env var: DB_PORT\n" + exit 1 +fi +if [ -z "$DB_NAME" ]; then + printf "Missing env var: DB_NAME\n" + exit 1 +fi +if [ -z "$DB_USER" ]; then + printf "Missing env var: DB_USER\n" + exit 1 +fi +if [ -z "$DB_PW" ]; then + printf "Missing env var: DB_PW\n" + exit 1 +fi + +# IMPOSM_CONFIG +# Full path to the imposm config file we want to use. +# Eg: /osm/config/imposm-config.json +if [ -z "$IMPOSM_CONFIG" ]; then + printf "Missing env var: IMPOSM_CONFIG\n" + exit 1 +fi +if [ ! -f "$IMPOSM_CONFIG" ]; then + printf "IMPOSM_CONFIG is an invalid file!\n" + exit 1 +fi + +# IMPOSM_MAPPING +# Full path to the imposm mapping file we want to use. +# Eg: /osm/config/imposm-mapping.json +if [ -z "$IMPOSM_MAPPING" ]; then + printf "Missing env var: IMPOSM_MAPPING\n" + exit 1 +fi +if [ ! -f "$IMPOSM_MAPPING" ]; then + printf "IMPOSM_MAPPING is an invalid file!\n" + exit 1 +fi + +# IMPOSM_CACHE_DIR +# Full path to an empty writeable directory where imposm will persist a cache of the import. +# Eg: /osm/cache +if [ -z "$IMPOSM_CACHE_DIR" ]; then + printf "Missing env var: IMPOSM_CACHE_DIR\n" + exit 1 +fi +if [ ! -d "$IMPOSM_CACHE_DIR" ]; then + printf "IMPOSM_CACHE_DIR is an invalid directory!\n" + exit 1 +fi + +# IMPOSM_DIFF_DIR +# Full path to an empty writeable directory where imposm will persist diffs for the import. +# Eg: /osm/cache +if [ -z "$IMPOSM_DIFF_DIR" ]; then + printf "Missing env var: IMPOSM_DIFF_DIR\n" + exit 1 +fi +if [ ! -d "$IMPOSM_DIFF_DIR" ]; then + printf "IMPOSM_DIFF_DIR is an invalid directory!\n" + exit 1 +fi + +# OSMOSIS_DIR +# Full path to a working directory for osmosis to create changes lists +# Eg: /osm/osmosis +if [ -z "$OSMOSIS_DIFF_DIR" ]; then + printf "Missing env var: OSMOSIS_DIFF_DIR\n" + exit 1 +fi +if [ ! -d "$OSMOSIS_DIFF_DIR" ]; then + printf "OSMOSIS_DIFF_DIR is an invalid directory!\n" + exit 1 +fi + +# check our db connection before we proceed +psql "dbname='$DB_NAME' host='$DB_HOST' port='$DB_PORT' user='$DB_USER' password='$DB_PW'" -c "\q" + +conn_string="postgis://${DB_USER}:${DB_PW}@${DB_HOST}/${DB_NAME}" + +printf "Initializing osmosis directory...\n" +cp ${IMPOSM_DIFF_DIR}/last.state.txt ${OSMOSIS_DIFF_DIR}/state.txt + +cat > ${OSMOSIS_DIFF_DIR}/configuration.txt <