DB_NAME=census
DB_USER=postgres
TIGER_SRID=4269
+SHAPELY_URL=http://pypi.python.org/packages/source/S/Shapely/Shapely-1.0.14.tar.gz
# Root folder for the shapefiles.
TIGER_ROOT=http://www2.census.gov/geo/tiger/TIGER2009
# Necessary to run test/data without prerequisites.
#
-.PHONY : test data
+.PHONY : test data lib
# The default task, since it comes first in the list.
#
-all: clean test
+all: clean lib test
test:
./bin/run_tests
+# Download or check out any third-party libraries.
+lib:
+ if [ ! -d lib/Shapely ]; then \
+ wget -O shapely.tar.gz $(SHAPELY_URL); \
+ tar -xvzf shapely.tar.gz -C lib/ ; \
+ rm shapely.tar.gz; \
+ mv lib/Shapely* lib/Shapely; \
+ fi;
+
+
# Remove byte-compiled python code.
#
clean:
# Download the shapefiles from Tiger if they don't already exist.
-# For now, we're only dealing with the Census 2000 Maryland Block
-# data, so the filenames are hard-coded. Easy enough to change.
-#
data: tiger_blocks tiger_lines
tiger_blocks: dc_blocks md_blocks va_blocks pa_blocks ny_blocks
dc_blocks:
- mkdir -p data/census2000/dc/block
- if [ ! -f data/census2000/dc/block/tl_2009_11_tabblock00.shp ]; \
+ mkdir -p data/census2000/district_of_columbia/block
+ if [ ! -f data/census2000/district_of_columbia/block/tl_2009_11_tabblock00.shp ]; \
then \
wget -O dcblocks.zip $(DC_BLOCKS_URL); \
- unzip dcblocks.zip -d ./data/census2000/dc/block; \
+ unzip dcblocks.zip -d ./data/census2000/district_of_columbia/block; \
rm dcblocks.zip; \
fi;
| psql -U $(DB_USER) -d $(DB_NAME); \
done;
-# MD Lines
+# All Lines
#
# Since the table and index already exist, we can utilize -a,
# and leave -I out.
- for x in data/census2000/maryland/lines/*.shp; do \
- $(PG_BINDIR)/shp2pgsql \
- -a \
- -s $(TIGER_SRID) \
- -D \
- $$x \
- tiger_lines \
- | psql -U $(DB_USER) -d $(DB_NAME); \
+ for state in data/census2000/*; do \
+ for shapefile in $$state/lines/*.shp; do \
+ echo "Importing $$shapefile."; \
+ $(PG_BINDIR)/shp2pgsql \
+ -a \
+ -s $(TIGER_SRID) \
+ $$shapefile \
+ tiger_lines \
+ | bin/filter-transactions \
+ | psql -U $(DB_USER) -d $(DB_NAME) \
+ > /dev/null; \
+ done; \
done;
bin/sf1blocks2sql src/Tests/Fixtures/SummaryFile1/mdgeo.uf1 sf1_blocks \
tiger_lines \
| psql -U postgres -d $(DB_NAME) \
> /dev/null
+
+# Add a unique index on the "tlid" column.
+ psql -U postgres \
+ -d census \
+ -f sql/create_tlid_unique_index.sql