]> gitweb.michael.orlitzky.com - dead/census-tools.git/blobdiff - makefile
Added the linear program solving the midatlantic region.
[dead/census-tools.git] / makefile
index d32c27f81ef2467a6b513e4fbb4876b494c3a101..2631b6b11a8ed60dde2a492ca0f2db83bb2879b4 100644 (file)
--- a/makefile
+++ b/makefile
@@ -2,406 +2,114 @@ DB_NAME=census
 DB_USER=postgres
 TIGER_SRID=4269
 
-# Root folder for the shapefiles.
-TIGER_ROOT=http://www2.census.gov/geo/tiger/TIGER2009
-
-# State-specific folders.
-DC_ROOT=$(TIGER_ROOT)/11_DISTRICT_OF_COLUMBIA
-MD_ROOT=$(TIGER_ROOT)/24_MARYLAND
-VA_ROOT=$(TIGER_ROOT)/51_VIRGINIA
-PA_ROOT=$(TIGER_ROOT)/42_PENNSYLVANIA
-NY_ROOT=$(TIGER_ROOT)/36_NEW_YORK
-
-# URLs for the TIGER/Line block-level shapefiles.
-DC_BLOCKS_URL=$(DC_ROOT)/tl_2009_11_tabblock00.zip
-MD_BLOCKS_URL=$(MD_ROOT)/tl_2009_24_tabblock00.zip
-VA_BLOCKS_URL=$(VA_ROOT)/tl_2009_51_tabblock00.zip
-PA_BLOCKS_URL=$(PA_ROOT)/tl_2009_42_tabblock00.zip
-NY_BLOCKS_URL=$(NY_ROOT)/tl_2009_36_tabblock00.zip
-
-# URLs for the DC county all-lines shapefiles.
-# D.C. just has one Census county.
-DC_LINES_URL=$(DC_ROOT)/11001_District_of_Columbia/tl_2009_11001_edges.zip
-
-# This is where it gets ugly.
-#
-# URLs for the MD county all-lines shapefiles.
-ALLEGANY_LINES_URL=$(MD_ROOT)/24001_Allegany_County/tl_2009_24001_edges.zip
-ANNE_ARUNDEL_LINES_URL=$(MD_ROOT)/24003_Anne_Arundel_County/tl_2009_24003_edges.zip
-BALTIMORE_CO_LINES_URL=$(MD_ROOT)/24005_Baltimore_County/tl_2009_24005_edges.zip
-BALTIMORE_CI_LINES_URL=$(MD_ROOT)/24510_Baltimore_city/tl_2009_24510_edges.zip
-CALVERT_LINES_URL=$(MD_ROOT)/24009_Calvert_County/tl_2009_24009_edges.zip
-CAROLINE_LINES_URL=$(MD_ROOT)/24011_Caroline_County/tl_2009_24011_edges.zip
-CARROLL_LINES_URL=$(MD_ROOT)/24013_Carroll_County/tl_2009_24013_edges.zip
-CECIL_LINES_URL=$(MD_ROOT)/24015_Cecil_County/tl_2009_24015_edges.zip
-CHARLES_LINES_URL=$(MD_ROOT)/24017_Charles_County/tl_2009_24017_edges.zip
-DORCHESTER_LINES_URL=$(MD_ROOT)/24019_Dorchester_County/tl_2009_24019_edges.zip
-FREDERICK_LINES_URL=$(MD_ROOT)/24021_Frederick_County/tl_2009_24021_edges.zip
-GARRETT_LINES_URL=$(MD_ROOT)/24023_Garrett_County/tl_2009_24023_edges.zip
-HARFORD_LINES_URL=$(MD_ROOT)/24025_Harford_County/tl_2009_24025_edges.zip
-HOWARD_LINES_URL=$(MD_ROOT)/24027_Howard_County/tl_2009_24027_edges.zip
-KENT_LINES_URL=$(MD_ROOT)/24029_Kent_County/tl_2009_24029_edges.zip
-MONTGOMERY_LINES_URL=$(MD_ROOT)/24031_Montgomery_County/tl_2009_24031_edges.zip
-PRINCE_GEORGES_LINES_URL=$(MD_ROOT)/24033_Prince_Georges_County/tl_2009_24033_edges.zip
-QUEEN_ANNES_LINES_URL=$(MD_ROOT)/24035_Queen_Annes_County/tl_2009_24035_edges.zip
-ST_MARYS_LINES_URL=$(MD_ROOT)/24037_St_Marys_County/tl_2009_24037_edges.zip
-SOMERSET_LINES_URL=$(MD_ROOT)/24039_Somerset_County/tl_2009_24039_edges.zip
-TALBOT_LINES_URL=$(MD_ROOT)/24041_Talbot_County/tl_2009_24041_edges.zip
-WASHINGTON_LINES_URL=$(MD_ROOT)/24043_Washington_County/tl_2009_24043_edges.zip
-WICOMICO_LINES_URL=$(MD_ROOT)/24045_Wicomico_County/tl_2009_24045_edges.zip
-WORCESTER_LINES_URL=$(MD_ROOT)/24047_Worcester_County/tl_2009_24047_edges.zip
-
-
-# Starting with PostGIS 1.4.0, these paths are calculated at install
-# time using the pg_config utility. Rather than try to guess where
-# PostGIS will wind up installed, we can just check the output of
-# pg_config ourselves.
-PG_BINDIR=`pg_config --bindir`
-PG_SHAREDIR=`pg_config --sharedir`
+
+# Dark magic. We set these makefile variables to be the result of the
+# 'shell' function. The shell function, in turn, executes a Python
+# script which determines the locations of these files.
+SHP2PGSQL := $(shell bin/find_file_paths --root /usr --single shp2pgsql)
+POSTGIS_SQL := $(shell bin/find_file_paths --root /usr lwpostgis.sql postgis.sql)
+SPATIAL_REF_SYS_SQL := $(shell bin/find_file_paths --root /usr spatial_ref_sys.sql)
 
 # Necessary to run test/data without prerequisites.
 #
-.PHONY : test data
+.PHONY : test data lib
 
 
 # The default task, since it comes first in the list.
 #
-all: clean test
+all: clean lib test
 
 
 test:
        ./bin/run_tests
 
 
+# Download or check out any third-party libraries.
+lib:
+       make -C lib/
+
+
 # Remove byte-compiled python code.
 #
 clean:
        find ./ -name '*.pyc' -print0 | xargs -0 rm -f
 
 
-# Download the shapefiles from Tiger if they don't already exist.
-# For now, we're only dealing with the Census 2000 Maryland Block
-# data, so the filenames are hard-coded. Easy enough to change.
-#
-data: tiger_blocks tiger_lines
-
-tiger_blocks: dc_blocks md_blocks va_blocks pa_blocks ny_blocks
-
-dc_blocks:
-       mkdir -p data/census2000/dc/block
-       if [ ! -f data/census2000/dc/block/tl_2009_11_tabblock00.shp ]; \
-       then                                                            \
-               wget -O dcblocks.zip $(DC_BLOCKS_URL);                  \
-               unzip dcblocks.zip -d ./data/census2000/dc/block;       \
-               rm dcblocks.zip;                                        \
-       fi;
-
-md_blocks:
-       mkdir -p data/census2000/maryland/block
-       if [ ! -f data/census2000/maryland/block/tl_2009_24_tabblock00.shp ]; \
-       then                                                                  \
-               wget -O mdblocks.zip $(MD_BLOCKS_URL);                        \
-               unzip mdblocks.zip -d ./data/census2000/maryland/block;       \
-               rm mdblocks.zip;                                              \
-       fi;
-
-va_blocks:
-       mkdir -p data/census2000/virginia/block
-       if [ ! -f data/census2000/virginia/block/tl_2009_51_tabblock00.shp ]; \
-       then                                                                  \
-               wget -O vablocks.zip $(VA_BLOCKS_URL);                        \
-               unzip vablocks.zip -d ./data/census2000/virginia/block;       \
-               rm vablocks.zip;                                              \
-       fi;
-
-pa_blocks:
-       mkdir -p data/census2000/pennsylvania/block
-       if [ ! -f data/census2000/pennsylvania/block/tl_2009_42_tabblock00.shp ]; \
-       then                                                                      \
-               wget -O pablocks.zip $(PA_BLOCKS_URL);                            \
-               unzip pablocks.zip -d ./data/census2000/pennsylvania/block;       \
-               rm pablocks.zip;                                                  \
-       fi;
-
-ny_blocks:
-       mkdir -p data/census2000/new_york/block
-       if [ ! -f data/census2000/new_york/block/tl_2009_36_tabblock00.shp ]; \
-       then                                                                  \
-               wget -O nyblocks.zip $(NY_BLOCKS_URL);                        \
-               unzip nyblocks.zip -d ./data/census2000/new_york/block;       \
-               rm nyblocks.zip;                                              \
-       fi;
-
-
-tiger_lines: dc_lines md_lines
-
-dc_lines:
-       mkdir -p data/census2000/dc/lines
-       if [ ! -f data/census2000/dc/lines/tl_2009_11001_edges.shp ];   \
-       then                                                            \
-               wget -O dclines.zip $(DC_LINES_URL);                    \
-               unzip dclines.zip -d ./data/census2000/dc/lines;        \
-               rm dclines.zip;                                         \
-       fi;
-
-md_lines: allegany_lines anne_arundel_lines baltimore_co_lines baltimore_ci_lines calvert_lines caroline_lines carroll_lines cecil_lines charles_lines dorchester_lines frederick_lines garrett_lines harford_lines howard_lines kent_lines montgomery_lines prince_georges_lines queen_annes_lines st_marys_lines somerset_lines talbot_lines washington_lines wicomico_lines worcester_lines
-
-allegany_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24001_edges.shp ];  \
-       then                                                                 \
-               wget -O alleganylines.zip $(ALLEGANY_LINES_URL);             \
-               unzip alleganylines.zip -d ./data/census2000/maryland/lines; \
-               rm alleganylines.zip;                                        \
-       fi;
-
-anne_arundel_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24003_edges.shp ];  \
-       then                                                                 \
-               wget -O aalines.zip $(ANNE_ARUNDEL_LINES_URL);               \
-               unzip aalines.zip -d ./data/census2000/maryland/lines;       \
-               rm aalines.zip;                                              \
-       fi;
-
-baltimore_co_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24005_edges.shp ];  \
-       then                                                                 \
-               wget -O bcolines.zip $(BALTIMORE_CO_LINES_URL);              \
-               unzip bcolines.zip -d ./data/census2000/maryland/lines;      \
-               rm bcolines.zip;                                             \
-       fi;
-
-baltimore_ci_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24510_edges.shp ];  \
-       then                                                                 \
-               wget -O bcilines.zip $(BALTIMORE_CI_LINES_URL);              \
-               unzip bcilines.zip -d ./data/census2000/maryland/lines;      \
-               rm bcilines.zip;                                             \
-       fi;
-
-calvert_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24009_edges.shp ];  \
-       then                                                                 \
-               wget -O calvertlines.zip $(CALVERT_LINES_URL);               \
-               unzip calvertlines.zip -d ./data/census2000/maryland/lines;  \
-               rm calvertlines.zip;                                         \
-       fi;
-
-caroline_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24011_edges.shp ];  \
-       then                                                                 \
-               wget -O carolinelines.zip $(CAROLINE_LINES_URL);             \
-               unzip carolinelines.zip -d ./data/census2000/maryland/lines; \
-               rm carolinelines.zip;                                        \
-       fi;
-
-carroll_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24013_edges.shp ];  \
-       then                                                                 \
-               wget -O carrolllines.zip $(CARROLL_LINES_URL);               \
-               unzip carrolllines.zip -d ./data/census2000/maryland/lines;  \
-               rm carrolllines.zip;                                         \
-       fi;
-
-cecil_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24015_edges.shp ];  \
-       then                                                                 \
-               wget -O cecillines.zip $(CECIL_LINES_URL);                   \
-               unzip cecillines.zip -d ./data/census2000/maryland/lines;    \
-               rm cecillines.zip;                                           \
-       fi;
-
-charles_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24017_edges.shp ];  \
-       then                                                                 \
-               wget -O charleslines.zip $(CHARLES_LINES_URL);               \
-               unzip charleslines.zip -d ./data/census2000/maryland/lines;  \
-               rm charleslines.zip;                                         \
-       fi;
-
-dorchester_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24019_edges.shp ];    \
-       then                                                                   \
-               wget -O dorchesterlines.zip $(DORCHESTER_LINES_URL);           \
-               unzip dorchesterlines.zip -d ./data/census2000/maryland/lines; \
-               rm dorchesterlines.zip;                                        \
-       fi;
-
-frederick_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24021_edges.shp ];   \
-       then                                                                  \
-               wget -O fredericklines.zip $(FREDERICK_LINES_URL);            \
-               unzip fredericklines.zip -d ./data/census2000/maryland/lines; \
-               rm fredericklines.zip;                                        \
-       fi;
-
-garrett_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24023_edges.shp ]; \
-       then                                                                \
-               wget -O garrettlines.zip $(GARRETT_LINES_URL);              \
-               unzip garrettlines.zip -d ./data/census2000/maryland/lines; \
-               rm garrettlines.zip;                                        \
-       fi;
-
-harford_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24025_edges.shp ]; \
-       then                                                                \
-               wget -O harfordlines.zip $(HARFORD_LINES_URL);              \
-               unzip harfordlines.zip -d ./data/census2000/maryland/lines; \
-               rm harfordlines.zip;                                        \
-       fi;
-
-howard_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24027_edges.shp ]; \
-       then                                                                \
-               wget -O howardlines.zip $(HOWARD_LINES_URL);                \
-               unzip howardlines.zip -d ./data/census2000/maryland/lines;  \
-               rm howardlines.zip;                                         \
-       fi;
-
-kent_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24029_edges.shp ]; \
-       then                                                                \
-               wget -O kentlines.zip $(KENT_LINES_URL);                    \
-               unzip kentlines.zip -d ./data/census2000/maryland/lines;    \
-               rm kentlines.zip;                                           \
-       fi;
-
-montgomery_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24031_edges.shp ];    \
-       then                                                                   \
-               wget -O montgomerylines.zip $(MONTGOMERY_LINES_URL);           \
-               unzip montgomerylines.zip -d ./data/census2000/maryland/lines; \
-               rm montgomerylines.zip;                                        \
-       fi;
-
-prince_georges_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24033_edges.shp ]; \
-       then                                                                \
-               wget -O pglines.zip $(PRINCE_GEORGES_LINES_URL);            \
-               unzip pglines.zip -d ./data/census2000/maryland/lines;      \
-               rm pglines.zip;                                             \
-       fi;
-
-queen_annes_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24035_edges.shp ]; \
-       then                                                                \
-               wget -O qalines.zip $(QUEEN_ANNES_LINES_URL);               \
-               unzip qalines.zip -d ./data/census2000/maryland/lines;      \
-               rm qalines.zip;                                             \
-       fi;
-
-st_marys_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24037_edges.shp ]; \
-       then                                                                \
-               wget -O smlines.zip $(ST_MARYS_LINES_URL);                  \
-               unzip smlines.zip -d ./data/census2000/maryland/lines;      \
-               rm smlines.zip;                                             \
-       fi;
-
-somerset_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24039_edges.shp ];  \
-       then                                                                 \
-               wget -O somersetlines.zip $(SOMERSET_LINES_URL);             \
-               unzip somersetlines.zip -d ./data/census2000/maryland/lines; \
-               rm somersetlines.zip;                                        \
-       fi;
-
-talbot_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24041_edges.shp ]; \
-       then                                                                \
-               wget -O talbotlines.zip $(TALBOT_LINES_URL);                \
-               unzip talbotlines.zip -d ./data/census2000/maryland/lines;  \
-               rm talbotlines.zip;                                         \
-       fi;
-
-washington_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24043_edges.shp ];    \
-       then                                                                   \
-               wget -O washingtonlines.zip $(WASHINGTON_LINES_URL);           \
-               unzip washingtonlines.zip -d ./data/census2000/maryland/lines; \
-               rm washingtonlines.zip;                                        \
-       fi;
-
-wicomico_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24045_edges.shp ];  \
-       then                                                                 \
-               wget -O wicomicolines.zip $(WICOMICO_LINES_URL);             \
-               unzip wicomicolines.zip -d ./data/census2000/maryland/lines; \
-               rm wicomicolines.zip;                                        \
-       fi;
-
-worcester_lines:
-       mkdir -p data/census2000/maryland/lines
-       if [ ! -f data/census2000/maryland/lines/tl_2009_24047_edges.shp ];   \
-       then                                                                  \
-               wget -O worcesterlines.zip $(WORCESTER_LINES_URL);            \
-               unzip worcesterlines.zip -d ./data/census2000/maryland/lines; \
-               rm worcesterlines.zip;                                        \
-       fi;
+data:
+       bin/download_data
 
 
-# This imports the Tiger data using shp2pgsql. The shapefiles
-# should exist, since this task depends on the "data" task, which
-# downloads said shapefiles.
-#
-# After the TIGER import is done, we use the sf1blocks2sql script to
-# parse and import the geographic header record information.
+# There is a small issue here with the blocks_db and lines_db
+# targets. Each of these requires that the database exists, and might
+# therefore depend on the newdb target. However, if /each/ of them
+# depends on newdb, the database will be dropped twice and the data
+# from one of {blocks, lines} would be lost.
 #
-db: data newdb tiger_blocks_table tiger_lines_table sf1_blocks_table
+# We therefore assume that the database already exists when blocks_db
+# or lines_db are initiated.
+blocks_db: data blocks_table
 # All Blocks
 #
 # The table already exists, so we can append to it, and we don't have
 # to create the GiST index.
        for state in data/census2000/*; do                  \
-               $(PG_BINDIR)/shp2pgsql                      \
+               $(SHP2PGSQL)                                \
                        -a                                  \
                        -s $(TIGER_SRID)                    \
                        -D                                  \
-                       $$state/block/*.shp                 \
+                       $$state/blocks/*.shp                \
                        tiger_blocks                        \
                        | psql -U $(DB_USER) -d $(DB_NAME); \
        done;
 
-# MD Lines
+# Summary File 1
+#
+# Run all of the geo (uf1) files through the import script. This has
+# to happen after the blocks import since we impose a foreign key
+# restriction.
+       for state in data/census2000/*; do                          \
+               bin/sf1blocks2sql $$state/sf1/*.uf1 sf1_blocks      \
+                          | psql -U $(DB_USER) -d $(DB_NAME)        \
+                          > /dev/null;                             \
+       done;
+
+# Run the query to combine the two blocks tables, and drop the
+# constituents.
+       psql -U $(DB_USER) \
+            -d $(DB_NAME) \
+            -f sql/combine-block-tables.sql
+
+
+lines_db: data tiger_lines_table
+# All Lines
 #
 # Since the table and index already exist, we can utilize -a,
 # and leave -I out.
-       for x in data/census2000/maryland/lines/*.shp; do   \
-               $(PG_BINDIR)/shp2pgsql                      \
-                       -a                                  \
-                       -s $(TIGER_SRID)                    \
-                       -D                                  \
-                       $$x                                 \
-                       tiger_lines                         \
-                       | psql -U $(DB_USER) -d $(DB_NAME); \
+       for state in data/census2000/*; do                          \
+               for shapefile in $$state/lines/*.shp; do            \
+                       echo "Importing $$shapefile.";              \
+                       $(SHP2PGSQL)                                \
+                               -a                                  \
+                               -s $(TIGER_SRID)                    \
+                               $$shapefile                         \
+                               tiger_lines                         \
+                               | bin/filter-transactions           \
+                               | psql -U $(DB_USER) -d $(DB_NAME)  \
+                               > /dev/null;                        \
+               done;                                               \
        done;
 
-       bin/sf1blocks2sql src/Tests/Fixtures/SummaryFile1/mdgeo.uf1 sf1_blocks \
-                          | psql -U postgres -d $(DB_NAME)                     \
-                          > /dev/null
+
+
+# This imports the Tiger data using shp2pgsql. The shapefiles
+# should exist, since this task depends on the "data" task, which
+# downloads said shapefiles.
+#
+# After the TIGER import is done, we use the sf1blocks2sql script to
+# parse and import the geographic header record information.
+#
+db: newdb blocks_data lines_data
+       # Do nothing except fulfill our prerequisites.
 
 
 
@@ -419,14 +127,14 @@ newdb:
        createdb -U $(DB_USER) $(DB_NAME)
        createlang -U $(DB_USER) plpgsql $(DB_NAME)
 
-       psql -d $(DB_NAME)                         \
-             -U $(DB_USER)                         \
-             -f $(PG_SHAREDIR)/contrib/postgis.sql \
+       psql -d $(DB_NAME)     \
+             -U $(DB_USER)     \
+             -f $(POSTGIS_SQL) \
              > /dev/null
 
-       psql -d $(DB_NAME)                                 \
-             -U $(DB_USER)                                 \
-             -f $(PG_SHAREDIR)/contrib/spatial_ref_sys.sql \
+       psql -d $(DB_NAME)             \
+             -U $(DB_USER)             \
+             -f $(SPATIAL_REF_SYS_SQL) \
             > /dev/null
 
 
@@ -442,24 +150,36 @@ sf1_blocks_table:
 # table already exist makes importing via shp2pgsql much easier.
 # Any blocks file will work as an argument.
 tiger_blocks_table:
-       $(PG_BINDIR)/shp2pgsql                                            \
+       $(SHP2PGSQL)                                                      \
                -p                                                        \
                -I                                                        \
                -s $(TIGER_SRID)                                          \
-               data/census2000/maryland/block/tl_2009_24_tabblock00.shp  \
+               data/census2000/maryland/blocks/tl_2009_24_tabblock00.shp \
                tiger_blocks                                              \
-               | psql -U postgres -d $(DB_NAME)                          \
+               | psql -U $(DB_USER) -d $(DB_NAME)                        \
                 > /dev/null
 
+# Create the "blocks" table, which is the result of joining
+# the tiger_blocks and sf1_blocks tables.
+blocks_table: tiger_blocks_table sf1_blocks_table
+       psql -U $(DB_USER) \
+            -d $(DB_NAME) \
+            -f sql/create-blocks-table.sql
+
 
 # Prepare the tiger_lines table, and create the GiST index on its
 # geometry column. Any lines shapefile will do here.
 tiger_lines_table:
-       $(PG_BINDIR)/shp2pgsql                                         \
+       $(SHP2PGSQL)                                                   \
                -p                                                     \
                -I                                                     \
                -s $(TIGER_SRID)                                       \
                data/census2000/maryland/lines/tl_2009_24510_edges.shp \
                tiger_lines                                            \
-               | psql -U postgres -d $(DB_NAME)                       \
+               | psql -U $(DB_USER) -d $(DB_NAME)                     \
                 > /dev/null
+
+# Add a unique index on the "tlid" column.
+       psql -U $(DB_USER) \
+             -d $(DB_NAME) \
+             -f sql/create_tlid_unique_index.sql