-DB_NAME='census'
-DB_USER='postgres'
-TIGER_DATA_URL='http://www2.census.gov/geo/tiger/TIGER2009/24_MARYLAND/tl_2009_24_tabblock00.zip'
-TIGER_SRID='4269'
+DB_NAME=census
+DB_USER=postgres
+TIGER_SRID=4269
+
+# Root folder for the shapefiles.
+TIGER_ROOT=http://www2.census.gov/geo/tiger/TIGER2009
+
+# State-specific folders.
+DC_ROOT=$(TIGER_ROOT)/11_DISTRICT_OF_COLUMBIA
+MD_ROOT=$(TIGER_ROOT)/24_MARYLAND
+VA_ROOT=$(TIGER_ROOT)/51_VIRGINIA
+PA_ROOT=$(TIGER_ROOT)/42_PENNSYLVANIA
+NY_ROOT=$(TIGER_ROOT)/36_NEW_YORK
+
+# URLs for the TIGER/Line block-level shapefiles.
+DC_BLOCKS_URL=$(DC_ROOT)/tl_2009_11_tabblock00.zip
+MD_BLOCKS_URL=$(MD_ROOT)/tl_2009_24_tabblock00.zip
+VA_BLOCKS_URL=$(VA_ROOT)/tl_2009_51_tabblock00.zip
+PA_BLOCKS_URL=$(PA_ROOT)/tl_2009_42_tabblock00.zip
+NY_BLOCKS_URL=$(NY_ROOT)/tl_2009_36_tabblock00.zip
+
+# URLs for the DC county all-lines shapefiles.
+# D.C. just has one Census county.
+DC_LINES_URL=$(DC_ROOT)/11001_District_of_Columbia/tl_2009_11001_edges.zip
+
+# This is where it gets ugly.
+#
+# URLs for the MD county all-lines shapefiles.
+ALLEGANY_LINES_URL=$(MD_ROOT)/24001_Allegany_County/tl_2009_24001_edges.zip
+ANNE_ARUNDEL_LINES_URL=$(MD_ROOT)/24003_Anne_Arundel_County/tl_2009_24003_edges.zip
+BALTIMORE_CO_LINES_URL=$(MD_ROOT)/24005_Baltimore_County/tl_2009_24005_edges.zip
+BALTIMORE_CI_LINES_URL=$(MD_ROOT)/24510_Baltimore_city/tl_2009_24510_edges.zip
+CALVERT_LINES_URL=$(MD_ROOT)/24009_Calvert_County/tl_2009_24009_edges.zip
+CAROLINE_LINES_URL=$(MD_ROOT)/24011_Caroline_County/tl_2009_24011_edges.zip
+CARROLL_LINES_URL=$(MD_ROOT)/24013_Carroll_County/tl_2009_24013_edges.zip
+CECIL_LINES_URL=$(MD_ROOT)/24015_Cecil_County/tl_2009_24015_edges.zip
+CHARLES_LINES_URL=$(MD_ROOT)/24017_Charles_County/tl_2009_24017_edges.zip
+DORCHESTER_LINES_URL=$(MD_ROOT)/24019_Dorchester_County/tl_2009_24019_edges.zip
+FREDERICK_LINES_URL=$(MD_ROOT)/24021_Frederick_County/tl_2009_24021_edges.zip
+GARRETT_LINES_URL=$(MD_ROOT)/24023_Garrett_County/tl_2009_24023_edges.zip
+HARFORD_LINES_URL=$(MD_ROOT)/24025_Harford_County/tl_2009_24025_edges.zip
+HOWARD_LINES_URL=$(MD_ROOT)/24027_Howard_County/tl_2009_24027_edges.zip
+KENT_LINES_URL=$(MD_ROOT)/24029_Kent_County/tl_2009_24029_edges.zip
+MONTGOMERY_LINES_URL=$(MD_ROOT)/24031_Montgomery_County/tl_2009_24031_edges.zip
+PRINCE_GEORGES_LINES_URL=$(MD_ROOT)/24033_Prince_Georges_County/tl_2009_24033_edges.zip
+QUEEN_ANNES_LINES_URL=$(MD_ROOT)/24035_Queen_Annes_County/tl_2009_24035_edges.zip
+ST_MARYS_LINES_URL=$(MD_ROOT)/24037_St_Marys_County/tl_2009_24037_edges.zip
+SOMERSET_LINES_URL=$(MD_ROOT)/24039_Somerset_County/tl_2009_24039_edges.zip
+TALBOT_LINES_URL=$(MD_ROOT)/24041_Talbot_County/tl_2009_24041_edges.zip
+WASHINGTON_LINES_URL=$(MD_ROOT)/24043_Washington_County/tl_2009_24043_edges.zip
+WICOMICO_LINES_URL=$(MD_ROOT)/24045_Wicomico_County/tl_2009_24045_edges.zip
+WORCESTER_LINES_URL=$(MD_ROOT)/24047_Worcester_County/tl_2009_24047_edges.zip
+
# Starting with PostGIS 1.4.0, these paths are calculated at install
# time using the pg_config utility. Rather than try to guess where
# For now, we're only dealing with the Census 2000 Maryland Block
# data, so the filenames are hard-coded. Easy enough to change.
#
-data:
+data: tiger_blocks tiger_lines
+
+tiger_blocks: dc_blocks md_blocks va_blocks pa_blocks ny_blocks
+
+dc_blocks:
+ mkdir -p data/census2000/dc/block
+ if [ ! -f data/census2000/dc/block/tl_2009_11_tabblock00.shp ]; \
+ then \
+ wget -O dcblocks.zip $(DC_BLOCKS_URL); \
+ unzip dcblocks.zip -d ./data/census2000/dc/block; \
+ rm dcblocks.zip; \
+ fi;
+
+md_blocks:
mkdir -p data/census2000/maryland/block
- if [ ! -f data/census2000/maryland/block/tl_2009_24_tabblock00.shp ]; then \
- wget -O tmp.zip $(TIGER_DATA_URL); \
- unzip tmp.zip -d ./data/census2000/maryland/block; \
- rm tmp.zip; \
+ if [ ! -f data/census2000/maryland/block/tl_2009_24_tabblock00.shp ]; \
+ then \
+ wget -O mdblocks.zip $(MD_BLOCKS_URL); \
+ unzip mdblocks.zip -d ./data/census2000/maryland/block; \
+ rm mdblocks.zip; \
fi;
+va_blocks:
+ mkdir -p data/census2000/virginia/block
+ if [ ! -f data/census2000/virginia/block/tl_2009_51_tabblock00.shp ]; \
+ then \
+ wget -O vablocks.zip $(VA_BLOCKS_URL); \
+ unzip vablocks.zip -d ./data/census2000/virginia/block; \
+ rm vablocks.zip; \
+ fi;
+
+pa_blocks:
+ mkdir -p data/census2000/pennsylvania/block
+ if [ ! -f data/census2000/pennsylvania/block/tl_2009_42_tabblock00.shp ]; \
+ then \
+ wget -O pablocks.zip $(PA_BLOCKS_URL); \
+ unzip pablocks.zip -d ./data/census2000/pennsylvania/block; \
+ rm pablocks.zip; \
+ fi;
+
+ny_blocks:
+ mkdir -p data/census2000/new_york/block
+ if [ ! -f data/census2000/new_york/block/tl_2009_36_tabblock00.shp ]; \
+ then \
+ wget -O nyblocks.zip $(NY_BLOCKS_URL); \
+ unzip nyblocks.zip -d ./data/census2000/new_york/block; \
+ rm nyblocks.zip; \
+ fi;
+
+
+tiger_lines: dc_lines md_lines
+
+dc_lines:
+ mkdir -p data/census2000/dc/lines
+ if [ ! -f data/census2000/dc/lines/tl_2009_11001_edges.shp ]; \
+ then \
+ wget -O dclines.zip $(DC_LINES_URL); \
+ unzip dclines.zip -d ./data/census2000/dc/lines; \
+ rm dclines.zip; \
+ fi;
+
+md_lines: allegany_lines anne_arundel_lines baltimore_co_lines baltimore_ci_lines calvert_lines caroline_lines carroll_lines cecil_lines charles_lines dorchester_lines frederick_lines garrett_lines harford_lines howard_lines kent_lines montgomery_lines prince_georges_lines queen_annes_lines st_marys_lines somerset_lines talbot_lines washington_lines wicomico_lines worcester_lines
+
+allegany_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24001_edges.shp ]; \
+ then \
+ wget -O alleganylines.zip $(ALLEGANY_LINES_URL); \
+ unzip alleganylines.zip -d ./data/census2000/maryland/lines; \
+ rm alleganylines.zip; \
+ fi;
+
+anne_arundel_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24003_edges.shp ]; \
+ then \
+ wget -O aalines.zip $(ANNE_ARUNDEL_LINES_URL); \
+ unzip aalines.zip -d ./data/census2000/maryland/lines; \
+ rm aalines.zip; \
+ fi;
+
+baltimore_co_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24005_edges.shp ]; \
+ then \
+ wget -O bcolines.zip $(BALTIMORE_CO_LINES_URL); \
+ unzip bcolines.zip -d ./data/census2000/maryland/lines; \
+ rm bcolines.zip; \
+ fi;
+
+baltimore_ci_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24510_edges.shp ]; \
+ then \
+ wget -O bcilines.zip $(BALTIMORE_CI_LINES_URL); \
+ unzip bcilines.zip -d ./data/census2000/maryland/lines; \
+ rm bcilines.zip; \
+ fi;
+
+calvert_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24009_edges.shp ]; \
+ then \
+ wget -O calvertlines.zip $(CALVERT_LINES_URL); \
+ unzip calvertlines.zip -d ./data/census2000/maryland/lines; \
+ rm calvertlines.zip; \
+ fi;
+
+caroline_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24011_edges.shp ]; \
+ then \
+ wget -O carolinelines.zip $(CAROLINE_LINES_URL); \
+ unzip carolinelines.zip -d ./data/census2000/maryland/lines; \
+ rm carolinelines.zip; \
+ fi;
+
+carroll_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24013_edges.shp ]; \
+ then \
+ wget -O carrolllines.zip $(CARROLL_LINES_URL); \
+ unzip carrolllines.zip -d ./data/census2000/maryland/lines; \
+ rm carrolllines.zip; \
+ fi;
+
+cecil_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24015_edges.shp ]; \
+ then \
+ wget -O cecillines.zip $(CECIL_LINES_URL); \
+ unzip cecillines.zip -d ./data/census2000/maryland/lines; \
+ rm cecillines.zip; \
+ fi;
+
+charles_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24017_edges.shp ]; \
+ then \
+ wget -O charleslines.zip $(CHARLES_LINES_URL); \
+ unzip charleslines.zip -d ./data/census2000/maryland/lines; \
+ rm charleslines.zip; \
+ fi;
+
+dorchester_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24019_edges.shp ]; \
+ then \
+ wget -O dorchesterlines.zip $(DORCHESTER_LINES_URL); \
+ unzip dorchesterlines.zip -d ./data/census2000/maryland/lines; \
+ rm dorchesterlines.zip; \
+ fi;
+
+frederick_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24021_edges.shp ]; \
+ then \
+ wget -O fredericklines.zip $(FREDERICK_LINES_URL); \
+ unzip fredericklines.zip -d ./data/census2000/maryland/lines; \
+ rm fredericklines.zip; \
+ fi;
+
+garrett_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24023_edges.shp ]; \
+ then \
+ wget -O garrettlines.zip $(GARRETT_LINES_URL); \
+ unzip garrettlines.zip -d ./data/census2000/maryland/lines; \
+ rm garrettlines.zip; \
+ fi;
-# This task does a couple of things. First, it drops and re-creates
-# the DB_NAME database (or schema, whatever). Then, it adds PL/pgSQL
-# support to the database.
+harford_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24025_edges.shp ]; \
+ then \
+ wget -O harfordlines.zip $(HARFORD_LINES_URL); \
+ unzip harfordlines.zip -d ./data/census2000/maryland/lines; \
+ rm harfordlines.zip; \
+ fi;
+
+howard_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24027_edges.shp ]; \
+ then \
+ wget -O howardlines.zip $(HOWARD_LINES_URL); \
+ unzip howardlines.zip -d ./data/census2000/maryland/lines; \
+ rm howardlines.zip; \
+ fi;
+
+kent_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24029_edges.shp ]; \
+ then \
+ wget -O kentlines.zip $(KENT_LINES_URL); \
+ unzip kentlines.zip -d ./data/census2000/maryland/lines; \
+ rm kentlines.zip; \
+ fi;
+
+montgomery_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24031_edges.shp ]; \
+ then \
+ wget -O montgomerylines.zip $(MONTGOMERY_LINES_URL); \
+ unzip montgomerylines.zip -d ./data/census2000/maryland/lines; \
+ rm montgomerylines.zip; \
+ fi;
+
+prince_georges_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24033_edges.shp ]; \
+ then \
+ wget -O pglines.zip $(PRINCE_GEORGES_LINES_URL); \
+ unzip pglines.zip -d ./data/census2000/maryland/lines; \
+ rm pglines.zip; \
+ fi;
+
+queen_annes_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24035_edges.shp ]; \
+ then \
+ wget -O qalines.zip $(QUEEN_ANNES_LINES_URL); \
+ unzip qalines.zip -d ./data/census2000/maryland/lines; \
+ rm qalines.zip; \
+ fi;
+
+st_marys_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24037_edges.shp ]; \
+ then \
+ wget -O smlines.zip $(ST_MARYS_LINES_URL); \
+ unzip smlines.zip -d ./data/census2000/maryland/lines; \
+ rm smlines.zip; \
+ fi;
+
+somerset_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24039_edges.shp ]; \
+ then \
+ wget -O somersetlines.zip $(SOMERSET_LINES_URL); \
+ unzip somersetlines.zip -d ./data/census2000/maryland/lines; \
+ rm somersetlines.zip; \
+ fi;
+
+talbot_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24041_edges.shp ]; \
+ then \
+ wget -O talbotlines.zip $(TALBOT_LINES_URL); \
+ unzip talbotlines.zip -d ./data/census2000/maryland/lines; \
+ rm talbotlines.zip; \
+ fi;
+
+washington_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24043_edges.shp ]; \
+ then \
+ wget -O washingtonlines.zip $(WASHINGTON_LINES_URL); \
+ unzip washingtonlines.zip -d ./data/census2000/maryland/lines; \
+ rm washingtonlines.zip; \
+ fi;
+
+wicomico_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24045_edges.shp ]; \
+ then \
+ wget -O wicomicolines.zip $(WICOMICO_LINES_URL); \
+ unzip wicomicolines.zip -d ./data/census2000/maryland/lines; \
+ rm wicomicolines.zip; \
+ fi;
+
+worcester_lines:
+ mkdir -p data/census2000/maryland/lines
+ if [ ! -f data/census2000/maryland/lines/tl_2009_24047_edges.shp ]; \
+ then \
+ wget -O worcesterlines.zip $(WORCESTER_LINES_URL); \
+ unzip worcesterlines.zip -d ./data/census2000/maryland/lines; \
+ rm worcesterlines.zip; \
+ fi;
+
+
+# This imports the Tiger data using shp2pgsql. The shapefiles
+# should exist, since this task depends on the "data" task, which
+# downloads said shapefiles.
+#
+# After the TIGER import is done, we use the sf1blocks2sql script to
+# parse and import the geographic header record information.
+#
+db: data newdb tiger_blocks_table tiger_lines_table sf1_blocks_table
+# All Blocks
+#
+# The table already exists, so we can append to it, and we don't have
+# to create the GiST index.
+ for state in data/census2000/*; do \
+ $(PG_BINDIR)/shp2pgsql \
+ -a \
+ -s $(TIGER_SRID) \
+ -D \
+ $$state/block/*.shp \
+ tiger_blocks \
+ | psql -U $(DB_USER) -d $(DB_NAME); \
+ done;
+
+# MD Lines
+#
+# Since the table and index already exist, we can utilize -a,
+# and leave -I out.
+ for x in data/census2000/maryland/lines/*.shp; do \
+ $(PG_BINDIR)/shp2pgsql \
+ -a \
+ -s $(TIGER_SRID) \
+ -D \
+ $$x \
+ tiger_lines \
+ | psql -U $(DB_USER) -d $(DB_NAME); \
+ done;
+
+ bin/sf1blocks2sql src/Tests/Fixtures/SummaryFile1/mdgeo.uf1 sf1_blocks \
+ | psql -U postgres -d $(DB_NAME) \
+ > /dev/null
+
+
+
+# First, we drop and re-create the DB_NAME database (or schema,
+# whatever). Then, we add PL/pgSQL support to the database.
#
# At that point, we import the two PostGIS files, postgis.sql and
# spatial_ref_sys.sql. The postgis.sql file contains the geometry
# functions, while spatial_ref_sys.sql contains a table of SRIDs, and
# their associated properties. PostGIS requires both.
#
-# Then, we import the Tiger data using shp2pgsql. The shapefiles
-# should exist, since this task depends on the "data" task, which
-# downloads said shapefiles.
-#
-# Finally, we create the table for the demographic data (obtained from
-# the geographic header records), and populate that table with the output
-# of the sf1blocks2sql script.
-#
-db: data
- # Ignore the result of dropdb when it fails.
+newdb:
+# Ignore the result of dropdb when it fails.
dropdb -U $(DB_USER) $(DB_NAME) || true
createdb -U $(DB_USER) $(DB_NAME)
createlang -U $(DB_USER) plpgsql $(DB_NAME)
- psql -d $(DB_NAME) \
- -U $(DB_USER) \
- -f $(PG_SHAREDIR)/contrib/postgis.sql
+ psql -d $(DB_NAME) \
+ -U $(DB_USER) \
+ -f $(PG_SHAREDIR)/contrib/postgis.sql \
+ > /dev/null
- psql -d $(DB_NAME) \
- -U $(DB_USER) \
- -f $(PG_SHAREDIR)/contrib/spatial_ref_sys.sql
+ psql -d $(DB_NAME) \
+ -U $(DB_USER) \
+ -f $(PG_SHAREDIR)/contrib/spatial_ref_sys.sql \
+ > /dev/null
- $(PG_BINDIR)/shp2pgsql -I \
- -s $(TIGER_SRID) \
- data/census2000/maryland/block/tl_2009_24_tabblock00.shp \
- tiger_blocks \
- | psql -U $(DB_USER) -d $(DB_NAME)
- psql -d $(DB_NAME) \
- -U $(DB_USER) \
- -f sql/create-sf1_blocks-table.sql
+# This just runs the SQL script to create the sf1_blocks table.
+sf1_blocks_table:
+ psql -d $(DB_NAME) \
+ -U $(DB_USER) \
+ -f sql/create-sf1_blocks-table.sql \
+ > /dev/null
+
+
+# Create the tiger_blocks table, and create its GiST index. Having the
+# table already exist makes importing via shp2pgsql much easier.
+# Any blocks file will work as an argument.
+tiger_blocks_table:
+ $(PG_BINDIR)/shp2pgsql \
+ -p \
+ -I \
+ -s $(TIGER_SRID) \
+ data/census2000/maryland/block/tl_2009_24_tabblock00.shp \
+ tiger_blocks \
+ | psql -U postgres -d $(DB_NAME) \
+ > /dev/null
- bin/sf1blocks2sql src/Tests/Fixtures/SummaryFile1/mdgeo.uf1 sf1_blocks \
- | psql -U postgres -d $(DB_NAME)
+# Prepare the tiger_lines table, and create the GiST index on its
+# geometry column. Any lines shapefile will do here.
+tiger_lines_table:
+ $(PG_BINDIR)/shp2pgsql \
+ -p \
+ -I \
+ -s $(TIGER_SRID) \
+ data/census2000/maryland/lines/tl_2009_24510_edges.shp \
+ tiger_lines \
+ | psql -U postgres -d $(DB_NAME) \
+ > /dev/null