From: Michael Orlitzky Date: Sun, 4 Oct 2009 17:18:51 +0000 (-0400) Subject: Added the MD lines import to the makefile. X-Git-Url: http://gitweb.michael.orlitzky.com/?p=dead%2Fcensus-tools.git;a=commitdiff_plain;h=414a6e69330409fc56546f2f08a4d18a9548f954 Added the MD lines import to the makefile. Reworked the data target to be more modular. Made the blocks/lines import via wildcard. Fixed all of the index/append inconsistencies in the shp2pgsql usage. --- diff --git a/makefile b/makefile index 2ab9dba..d32c27f 100644 --- a/makefile +++ b/makefile @@ -133,7 +133,7 @@ ny_blocks: fi; -tiger_lines: dc_lines +tiger_lines: dc_lines md_lines dc_lines: mkdir -p data/census2000/dc/lines @@ -144,95 +144,322 @@ dc_lines: rm dclines.zip; \ fi; -# This task does a couple of things. First, it drops and re-creates -# the DB_NAME database (or schema, whatever). Then, it adds PL/pgSQL -# support to the database. -# -# At that point, we import the two PostGIS files, postgis.sql and -# spatial_ref_sys.sql. The postgis.sql file contains the geometry -# functions, while spatial_ref_sys.sql contains a table of SRIDs, and -# their associated properties. PostGIS requires both. -# -# Then, we import the Tiger data using shp2pgsql. The shapefiles -# should exist, since this task depends on the "data" task, which -# downloads said shapefiles. -# -# Finally, we create the table for the demographic data (obtained from -# the geographic header records), and populate that table with the output -# of the sf1blocks2sql script. -# -db: data - # Ignore the result of dropdb when it fails. - dropdb -U $(DB_USER) $(DB_NAME) || true - createdb -U $(DB_USER) $(DB_NAME) - createlang -U $(DB_USER) plpgsql $(DB_NAME) +md_lines: allegany_lines anne_arundel_lines baltimore_co_lines baltimore_ci_lines calvert_lines caroline_lines carroll_lines cecil_lines charles_lines dorchester_lines frederick_lines garrett_lines harford_lines howard_lines kent_lines montgomery_lines prince_georges_lines queen_annes_lines st_marys_lines somerset_lines talbot_lines washington_lines wicomico_lines worcester_lines - psql -d $(DB_NAME) \ - -U $(DB_USER) \ - -f $(PG_SHAREDIR)/contrib/postgis.sql +allegany_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24001_edges.shp ]; \ + then \ + wget -O alleganylines.zip $(ALLEGANY_LINES_URL); \ + unzip alleganylines.zip -d ./data/census2000/maryland/lines; \ + rm alleganylines.zip; \ + fi; + +anne_arundel_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24003_edges.shp ]; \ + then \ + wget -O aalines.zip $(ANNE_ARUNDEL_LINES_URL); \ + unzip aalines.zip -d ./data/census2000/maryland/lines; \ + rm aalines.zip; \ + fi; - psql -d $(DB_NAME) \ - -U $(DB_USER) \ - -f $(PG_SHAREDIR)/contrib/spatial_ref_sys.sql +baltimore_co_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24005_edges.shp ]; \ + then \ + wget -O bcolines.zip $(BALTIMORE_CO_LINES_URL); \ + unzip bcolines.zip -d ./data/census2000/maryland/lines; \ + rm bcolines.zip; \ + fi; + +baltimore_ci_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24510_edges.shp ]; \ + then \ + wget -O bcilines.zip $(BALTIMORE_CI_LINES_URL); \ + unzip bcilines.zip -d ./data/census2000/maryland/lines; \ + rm bcilines.zip; \ + fi; + +calvert_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24009_edges.shp ]; \ + then \ + wget -O calvertlines.zip $(CALVERT_LINES_URL); \ + unzip calvertlines.zip -d ./data/census2000/maryland/lines; \ + rm calvertlines.zip; \ + fi; + +caroline_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24011_edges.shp ]; \ + then \ + wget -O carolinelines.zip $(CAROLINE_LINES_URL); \ + unzip carolinelines.zip -d ./data/census2000/maryland/lines; \ + rm carolinelines.zip; \ + fi; + +carroll_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24013_edges.shp ]; \ + then \ + wget -O carrolllines.zip $(CARROLL_LINES_URL); \ + unzip carrolllines.zip -d ./data/census2000/maryland/lines; \ + rm carrolllines.zip; \ + fi; + +cecil_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24015_edges.shp ]; \ + then \ + wget -O cecillines.zip $(CECIL_LINES_URL); \ + unzip cecillines.zip -d ./data/census2000/maryland/lines; \ + rm cecillines.zip; \ + fi; + +charles_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24017_edges.shp ]; \ + then \ + wget -O charleslines.zip $(CHARLES_LINES_URL); \ + unzip charleslines.zip -d ./data/census2000/maryland/lines; \ + rm charleslines.zip; \ + fi; + +dorchester_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24019_edges.shp ]; \ + then \ + wget -O dorchesterlines.zip $(DORCHESTER_LINES_URL); \ + unzip dorchesterlines.zip -d ./data/census2000/maryland/lines; \ + rm dorchesterlines.zip; \ + fi; + +frederick_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24021_edges.shp ]; \ + then \ + wget -O fredericklines.zip $(FREDERICK_LINES_URL); \ + unzip fredericklines.zip -d ./data/census2000/maryland/lines; \ + rm fredericklines.zip; \ + fi; -# D.C. Blocks +garrett_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24023_edges.shp ]; \ + then \ + wget -O garrettlines.zip $(GARRETT_LINES_URL); \ + unzip garrettlines.zip -d ./data/census2000/maryland/lines; \ + rm garrettlines.zip; \ + fi; - $(PG_BINDIR)/shp2pgsql \ - -I \ - -s $(TIGER_SRID) \ - -D \ - data/census2000/dc/block/tl_2009_11_tabblock00.shp \ - tiger_blocks \ - | psql -U $(DB_USER) -d $(DB_NAME) +harford_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24025_edges.shp ]; \ + then \ + wget -O harfordlines.zip $(HARFORD_LINES_URL); \ + unzip harfordlines.zip -d ./data/census2000/maryland/lines; \ + rm harfordlines.zip; \ + fi; -# Maryland Blocks +howard_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24027_edges.shp ]; \ + then \ + wget -O howardlines.zip $(HOWARD_LINES_URL); \ + unzip howardlines.zip -d ./data/census2000/maryland/lines; \ + rm howardlines.zip; \ + fi; - $(PG_BINDIR)/shp2pgsql \ - -I \ - -s $(TIGER_SRID) \ - -D \ - data/census2000/maryland/block/tl_2009_24_tabblock00.shp \ - tiger_blocks \ - | psql -U $(DB_USER) -d $(DB_NAME) +kent_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24029_edges.shp ]; \ + then \ + wget -O kentlines.zip $(KENT_LINES_URL); \ + unzip kentlines.zip -d ./data/census2000/maryland/lines; \ + rm kentlines.zip; \ + fi; +montgomery_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24031_edges.shp ]; \ + then \ + wget -O montgomerylines.zip $(MONTGOMERY_LINES_URL); \ + unzip montgomerylines.zip -d ./data/census2000/maryland/lines; \ + rm montgomerylines.zip; \ + fi; -# Virginia Blocks +prince_georges_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24033_edges.shp ]; \ + then \ + wget -O pglines.zip $(PRINCE_GEORGES_LINES_URL); \ + unzip pglines.zip -d ./data/census2000/maryland/lines; \ + rm pglines.zip; \ + fi; - $(PG_BINDIR)/shp2pgsql -a \ - -I \ - -s $(TIGER_SRID) \ - -D \ - data/census2000/virginia/block/tl_2009_51_tabblock00.shp \ - tiger_blocks \ - | psql -U $(DB_USER) -d $(DB_NAME) +queen_annes_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24035_edges.shp ]; \ + then \ + wget -O qalines.zip $(QUEEN_ANNES_LINES_URL); \ + unzip qalines.zip -d ./data/census2000/maryland/lines; \ + rm qalines.zip; \ + fi; +st_marys_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24037_edges.shp ]; \ + then \ + wget -O smlines.zip $(ST_MARYS_LINES_URL); \ + unzip smlines.zip -d ./data/census2000/maryland/lines; \ + rm smlines.zip; \ + fi; -# Pennsylvania Blocks +somerset_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24039_edges.shp ]; \ + then \ + wget -O somersetlines.zip $(SOMERSET_LINES_URL); \ + unzip somersetlines.zip -d ./data/census2000/maryland/lines; \ + rm somersetlines.zip; \ + fi; - $(PG_BINDIR)/shp2pgsql -a \ - -I \ - -s $(TIGER_SRID) \ - -D \ - data/census2000/pennsylvania/block/tl_2009_42_tabblock00.shp \ - tiger_blocks \ - | psql -U $(DB_USER) -d $(DB_NAME) +talbot_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24041_edges.shp ]; \ + then \ + wget -O talbotlines.zip $(TALBOT_LINES_URL); \ + unzip talbotlines.zip -d ./data/census2000/maryland/lines; \ + rm talbotlines.zip; \ + fi; +washington_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24043_edges.shp ]; \ + then \ + wget -O washingtonlines.zip $(WASHINGTON_LINES_URL); \ + unzip washingtonlines.zip -d ./data/census2000/maryland/lines; \ + rm washingtonlines.zip; \ + fi; -# New York Blocks +wicomico_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24045_edges.shp ]; \ + then \ + wget -O wicomicolines.zip $(WICOMICO_LINES_URL); \ + unzip wicomicolines.zip -d ./data/census2000/maryland/lines; \ + rm wicomicolines.zip; \ + fi; - $(PG_BINDIR)/shp2pgsql -a \ - -I \ - -s $(TIGER_SRID) \ - -D \ - data/census2000/new_york/block/tl_2009_36_tabblock00.shp \ - tiger_blocks \ - | psql -U $(DB_USER) -d $(DB_NAME) +worcester_lines: + mkdir -p data/census2000/maryland/lines + if [ ! -f data/census2000/maryland/lines/tl_2009_24047_edges.shp ]; \ + then \ + wget -O worcesterlines.zip $(WORCESTER_LINES_URL); \ + unzip worcesterlines.zip -d ./data/census2000/maryland/lines; \ + rm worcesterlines.zip; \ + fi; - psql -d $(DB_NAME) \ - -U $(DB_USER) \ - -f sql/create-sf1_blocks-table.sql +# This imports the Tiger data using shp2pgsql. The shapefiles +# should exist, since this task depends on the "data" task, which +# downloads said shapefiles. +# +# After the TIGER import is done, we use the sf1blocks2sql script to +# parse and import the geographic header record information. +# +db: data newdb tiger_blocks_table tiger_lines_table sf1_blocks_table +# All Blocks +# +# The table already exists, so we can append to it, and we don't have +# to create the GiST index. + for state in data/census2000/*; do \ + $(PG_BINDIR)/shp2pgsql \ + -a \ + -s $(TIGER_SRID) \ + -D \ + $$state/block/*.shp \ + tiger_blocks \ + | psql -U $(DB_USER) -d $(DB_NAME); \ + done; + +# MD Lines +# +# Since the table and index already exist, we can utilize -a, +# and leave -I out. + for x in data/census2000/maryland/lines/*.shp; do \ + $(PG_BINDIR)/shp2pgsql \ + -a \ + -s $(TIGER_SRID) \ + -D \ + $$x \ + tiger_lines \ + | psql -U $(DB_USER) -d $(DB_NAME); \ + done; bin/sf1blocks2sql src/Tests/Fixtures/SummaryFile1/mdgeo.uf1 sf1_blocks \ - | psql -U postgres -d $(DB_NAME) > /dev/null + | psql -U postgres -d $(DB_NAME) \ + > /dev/null + + + +# First, we drop and re-create the DB_NAME database (or schema, +# whatever). Then, we add PL/pgSQL support to the database. +# +# At that point, we import the two PostGIS files, postgis.sql and +# spatial_ref_sys.sql. The postgis.sql file contains the geometry +# functions, while spatial_ref_sys.sql contains a table of SRIDs, and +# their associated properties. PostGIS requires both. +# +newdb: +# Ignore the result of dropdb when it fails. + dropdb -U $(DB_USER) $(DB_NAME) || true + createdb -U $(DB_USER) $(DB_NAME) + createlang -U $(DB_USER) plpgsql $(DB_NAME) + psql -d $(DB_NAME) \ + -U $(DB_USER) \ + -f $(PG_SHAREDIR)/contrib/postgis.sql \ + > /dev/null + + psql -d $(DB_NAME) \ + -U $(DB_USER) \ + -f $(PG_SHAREDIR)/contrib/spatial_ref_sys.sql \ + > /dev/null + + +# This just runs the SQL script to create the sf1_blocks table. +sf1_blocks_table: + psql -d $(DB_NAME) \ + -U $(DB_USER) \ + -f sql/create-sf1_blocks-table.sql \ + > /dev/null + + +# Create the tiger_blocks table, and create its GiST index. Having the +# table already exist makes importing via shp2pgsql much easier. +# Any blocks file will work as an argument. +tiger_blocks_table: + $(PG_BINDIR)/shp2pgsql \ + -p \ + -I \ + -s $(TIGER_SRID) \ + data/census2000/maryland/block/tl_2009_24_tabblock00.shp \ + tiger_blocks \ + | psql -U postgres -d $(DB_NAME) \ + > /dev/null + + +# Prepare the tiger_lines table, and create the GiST index on its +# geometry column. Any lines shapefile will do here. +tiger_lines_table: + $(PG_BINDIR)/shp2pgsql \ + -p \ + -I \ + -s $(TIGER_SRID) \ + data/census2000/maryland/lines/tl_2009_24510_edges.shp \ + tiger_lines \ + | psql -U postgres -d $(DB_NAME) \ + > /dev/null