]> gitweb.michael.orlitzky.com - dead/census-tools.git/commitdiff
Fixed a few hard-coded database names in the makefile.
authorMichael Orlitzky <michael@orlitzky.com>
Fri, 29 Jan 2010 14:24:20 +0000 (09:24 -0500)
committerMichael Orlitzky <michael@orlitzky.com>
Fri, 29 Jan 2010 14:24:20 +0000 (09:24 -0500)
Updated the makefile with some new targets to separate the block data from that of the lines.
Include the SF1/TIGER blocks combination in the blocks_db and db targets.

makefile

index 7d765c6cdaab32bf2a280020097a7455e5e91183..74729735303716524f63a12029e89bf658c6064a 100644 (file)
--- a/makefile
+++ b/makefile
@@ -45,14 +45,15 @@ data:
        bin/download_data
 
 
-# This imports the Tiger data using shp2pgsql. The shapefiles
-# should exist, since this task depends on the "data" task, which
-# downloads said shapefiles.
+# There is a small issue here with the blocks_db and lines_db
+# targets. Each of these requires that the database exists, and might
+# therefore depend on the newdb target. However, if /each/ of them
+# depends on newdb, the database will be dropped twice and the data
+# from one of {blocks, lines} would be lost.
 #
-# After the TIGER import is done, we use the sf1blocks2sql script to
-# parse and import the geographic header record information.
-#
-db: data newdb tiger_blocks_table tiger_lines_table sf1_blocks_table
+# We therefore assume that the database already exists when blocks_db
+# or lines_db are initiated.
+blocks_db: data blocks_table
 # All Blocks
 #
 # The table already exists, so we can append to it, and we don't have
@@ -67,6 +68,25 @@ db: data newdb tiger_blocks_table tiger_lines_table sf1_blocks_table
                        | psql -U $(DB_USER) -d $(DB_NAME); \
        done;
 
+# Summary File 1
+#
+# Run all of the geo (uf1) files through the import script. This has
+# to happen after the blocks import since we impose a foreign key
+# restriction.
+       for state in data/census2000/*; do                          \
+               bin/sf1blocks2sql $$state/sf1/*.uf1 sf1_blocks      \
+                          | psql -U $(DB_USER) -d $(DB_NAME)        \
+                          > /dev/null;                             \
+       done;
+
+# Run the query to combine the two blocks tables, and drop the
+# constituents.
+       psql -U $(DB_USER) \
+            -d $(DB_NAME) \
+            -f sql/combine-block-tables.sql
+
+
+lines_db: data tiger_lines_table
 # All Lines
 #
 # Since the table and index already exist, we can utilize -a,
@@ -85,16 +105,17 @@ db: data newdb tiger_blocks_table tiger_lines_table sf1_blocks_table
                done;                                               \
        done;
 
-# Summary File 1
+
+
+# This imports the Tiger data using shp2pgsql. The shapefiles
+# should exist, since this task depends on the "data" task, which
+# downloads said shapefiles.
 #
-# Run all of the geo (uf1) files through the import script. This has
-# to happen after the blocks import since we impose a foreign key
-# restriction.
-       for state in data/census2000/*; do                          \
-               bin/sf1blocks2sql $$state/sf1/*.uf1 sf1_blocks      \
-                          | psql -U postgres -d $(DB_NAME)          \
-                          > /dev/null;                             \
-       done;
+# After the TIGER import is done, we use the sf1blocks2sql script to
+# parse and import the geographic header record information.
+#
+db: newdb blocks_data lines_data
+       # Do nothing except fulfill our prerequisites.
 
 
 
@@ -141,9 +162,16 @@ tiger_blocks_table:
                -s $(TIGER_SRID)                                          \
                data/census2000/maryland/blocks/tl_2009_24_tabblock00.shp \
                tiger_blocks                                              \
-               | psql -U postgres -d $(DB_NAME)                          \
+               | psql -U $(DB_USER) -d $(DB_NAME)                        \
                 > /dev/null
 
+# Create the "blocks" table, which is the result of joining
+# the tiger_blocks and sf1_blocks tables.
+blocks_table: tiger_blocks_table sf1_blocks_table
+       psql -U $(DB_USER) \
+            -d $(DB_NAME) \
+            -f sql/create-blocks-table.sql
+
 
 # Prepare the tiger_lines table, and create the GiST index on its
 # geometry column. Any lines shapefile will do here.
@@ -154,10 +182,10 @@ tiger_lines_table:
                -s $(TIGER_SRID)                                       \
                data/census2000/maryland/lines/tl_2009_24510_edges.shp \
                tiger_lines                                            \
-               | psql -U postgres -d $(DB_NAME)                       \
+               | psql -U $(DB_USER) -d $(DB_NAME)                     \
                 > /dev/null
 
 # Add a unique index on the "tlid" column.
-       psql -U postgres \
-             -d census   \
+       psql -U $(DB_USER) \
+             -d $(DB_NAME) \
              -f sql/create_tlid_unique_index.sql