DB_NAME=census
DB_USER=postgres
TIGER_SRID=4269
+SHAPELY_URL=http://pypi.python.org/packages/source/S/Shapely/Shapely-1.0.14.tar.gz
-# Root folder for the shapefiles.
-TIGER_ROOT=http://www2.census.gov/geo/tiger/TIGER2009
-
-# State-specific folders.
-DC_ROOT=$(TIGER_ROOT)/11_DISTRICT_OF_COLUMBIA
-MD_ROOT=$(TIGER_ROOT)/24_MARYLAND
-VA_ROOT=$(TIGER_ROOT)/51_VIRGINIA
-PA_ROOT=$(TIGER_ROOT)/42_PENNSYLVANIA
-NY_ROOT=$(TIGER_ROOT)/36_NEW_YORK
-
-# URLs for the TIGER/Line block-level shapefiles.
-DC_BLOCKS_URL=$(DC_ROOT)/tl_2009_11_tabblock00.zip
-MD_BLOCKS_URL=$(MD_ROOT)/tl_2009_24_tabblock00.zip
-VA_BLOCKS_URL=$(VA_ROOT)/tl_2009_51_tabblock00.zip
-PA_BLOCKS_URL=$(PA_ROOT)/tl_2009_42_tabblock00.zip
-NY_BLOCKS_URL=$(NY_ROOT)/tl_2009_36_tabblock00.zip
# Starting with PostGIS 1.4.0, these paths are calculated at install
# time using the pg_config utility. Rather than try to guess where
# Necessary to run test/data without prerequisites.
#
-.PHONY : test data
+.PHONY : test data lib
# The default task, since it comes first in the list.
#
-all: clean test
+all: clean lib test
test:
./bin/run_tests
+# Download or check out any third-party libraries.
+lib:
+ if [ ! -d lib/Shapely ]; then \
+ wget -O shapely.tar.gz $(SHAPELY_URL); \
+ tar -xvzf shapely.tar.gz -C lib/ ; \
+ rm shapely.tar.gz; \
+ mv lib/Shapely* lib/Shapely; \
+ fi;
+
+
# Remove byte-compiled python code.
#
clean:
find ./ -name '*.pyc' -print0 | xargs -0 rm -f
-# Download the shapefiles from Tiger if they don't already exist.
-data: tiger_blocks tiger_lines
-
-tiger_blocks: dc_blocks md_blocks va_blocks pa_blocks ny_blocks
-
-dc_blocks:
- mkdir -p data/census2000/district_of_columbia/block
- if [ ! -f data/census2000/district_of_columbia/block/tl_2009_11_tabblock00.shp ]; \
- then \
- wget -O dcblocks.zip $(DC_BLOCKS_URL); \
- unzip dcblocks.zip -d ./data/census2000/district_of_columbia/block; \
- rm dcblocks.zip; \
- fi;
-
-md_blocks:
- mkdir -p data/census2000/maryland/block
- if [ ! -f data/census2000/maryland/block/tl_2009_24_tabblock00.shp ]; \
- then \
- wget -O mdblocks.zip $(MD_BLOCKS_URL); \
- unzip mdblocks.zip -d ./data/census2000/maryland/block; \
- rm mdblocks.zip; \
- fi;
-
-va_blocks:
- mkdir -p data/census2000/virginia/block
- if [ ! -f data/census2000/virginia/block/tl_2009_51_tabblock00.shp ]; \
- then \
- wget -O vablocks.zip $(VA_BLOCKS_URL); \
- unzip vablocks.zip -d ./data/census2000/virginia/block; \
- rm vablocks.zip; \
- fi;
-
-pa_blocks:
- mkdir -p data/census2000/pennsylvania/block
- if [ ! -f data/census2000/pennsylvania/block/tl_2009_42_tabblock00.shp ]; \
- then \
- wget -O pablocks.zip $(PA_BLOCKS_URL); \
- unzip pablocks.zip -d ./data/census2000/pennsylvania/block; \
- rm pablocks.zip; \
- fi;
-
-ny_blocks:
- mkdir -p data/census2000/new_york/block
- if [ ! -f data/census2000/new_york/block/tl_2009_36_tabblock00.shp ]; \
- then \
- wget -O nyblocks.zip $(NY_BLOCKS_URL); \
- unzip nyblocks.zip -d ./data/census2000/new_york/block; \
- rm nyblocks.zip; \
- fi;
-
-
-tiger_lines:
+data:
bin/download_data
-a \
-s $(TIGER_SRID) \
-D \
- $$state/block/*.shp \
+ $$state/blocks/*.shp \
tiger_blocks \
| psql -U $(DB_USER) -d $(DB_NAME); \
done;
# and leave -I out.
for state in data/census2000/*; do \
for shapefile in $$state/lines/*.shp; do \
+ echo "Importing $$shapefile."; \
$(PG_BINDIR)/shp2pgsql \
-a \
-s $(TIGER_SRID) \
- -D \
$$shapefile \
tiger_lines \
- | psql -U $(DB_USER) -d $(DB_NAME); \
+ | bin/filter-transactions \
+ | psql -U $(DB_USER) -d $(DB_NAME) \
+ > /dev/null; \
done; \
done;
- bin/sf1blocks2sql src/Tests/Fixtures/SummaryFile1/mdgeo.uf1 sf1_blocks \
- | psql -U postgres -d $(DB_NAME) \
- > /dev/null
+# Summary File 1
+#
+# Run all of the geo (uf1) files through the import script. This has
+# to happen after the blocks import since we impose a foreign key
+# restriction.
+ for state in data/census2000/*; do \
+ bin/sf1blocks2sql $$state/sf1/*.uf1 sf1_blocks \
+ | psql -U postgres -d $(DB_NAME) \
+ > /dev/null; \
+ done;
-p \
-I \
-s $(TIGER_SRID) \
- data/census2000/maryland/block/tl_2009_24_tabblock00.shp \
+ data/census2000/maryland/blocks/tl_2009_24_tabblock00.shp \
tiger_blocks \
| psql -U postgres -d $(DB_NAME) \
> /dev/null
tiger_lines \
| psql -U postgres -d $(DB_NAME) \
> /dev/null
+
+# Add a unique index on the "tlid" column.
+ psql -U postgres \
+ -d census \
+ -f sql/create_tlid_unique_index.sql