4 SHAPELY_URL=http://svn.gispython.org/svn/gispy/Shapely/trunk
6 # Root folder for the shapefiles.
7 TIGER_ROOT=http://www2.census.gov/geo/tiger/TIGER2009
9 # State-specific folders.
10 DC_ROOT=$(TIGER_ROOT)/11_DISTRICT_OF_COLUMBIA
11 MD_ROOT=$(TIGER_ROOT)/24_MARYLAND
12 VA_ROOT=$(TIGER_ROOT)/51_VIRGINIA
13 PA_ROOT=$(TIGER_ROOT)/42_PENNSYLVANIA
14 NY_ROOT=$(TIGER_ROOT)/36_NEW_YORK
16 # URLs for the TIGER/Line block-level shapefiles.
17 DC_BLOCKS_URL=$(DC_ROOT)/tl_2009_11_tabblock00.zip
18 MD_BLOCKS_URL=$(MD_ROOT)/tl_2009_24_tabblock00.zip
19 VA_BLOCKS_URL=$(VA_ROOT)/tl_2009_51_tabblock00.zip
20 PA_BLOCKS_URL=$(PA_ROOT)/tl_2009_42_tabblock00.zip
21 NY_BLOCKS_URL=$(NY_ROOT)/tl_2009_36_tabblock00.zip
23 # Starting with PostGIS 1.4.0, these paths are calculated at install
24 # time using the pg_config utility. Rather than try to guess where
25 # PostGIS will wind up installed, we can just check the output of
26 # pg_config ourselves.
27 PG_BINDIR=`pg_config --bindir`
28 PG_SHAREDIR=`pg_config --sharedir`
30 # Necessary to run test/data without prerequisites.
32 .PHONY : test data lib
35 # The default task, since it comes first in the list.
44 # Download or check out any third-party libraries.
46 if [ -d lib/Shapely ]; then \
50 svn co $(SHAPELY_URL) lib/Shapely; \
54 # Remove byte-compiled python code.
57 find ./ -name '*.pyc' -print0 | xargs -0 rm -f
60 # Download the shapefiles from Tiger if they don't already exist.
61 data: tiger_blocks tiger_lines
63 tiger_blocks: dc_blocks md_blocks va_blocks pa_blocks ny_blocks
66 mkdir -p data/census2000/district_of_columbia/block
67 if [ ! -f data/census2000/district_of_columbia/block/tl_2009_11_tabblock00.shp ]; \
69 wget -O dcblocks.zip $(DC_BLOCKS_URL); \
70 unzip dcblocks.zip -d ./data/census2000/district_of_columbia/block; \
75 mkdir -p data/census2000/maryland/block
76 if [ ! -f data/census2000/maryland/block/tl_2009_24_tabblock00.shp ]; \
78 wget -O mdblocks.zip $(MD_BLOCKS_URL); \
79 unzip mdblocks.zip -d ./data/census2000/maryland/block; \
84 mkdir -p data/census2000/virginia/block
85 if [ ! -f data/census2000/virginia/block/tl_2009_51_tabblock00.shp ]; \
87 wget -O vablocks.zip $(VA_BLOCKS_URL); \
88 unzip vablocks.zip -d ./data/census2000/virginia/block; \
93 mkdir -p data/census2000/pennsylvania/block
94 if [ ! -f data/census2000/pennsylvania/block/tl_2009_42_tabblock00.shp ]; \
96 wget -O pablocks.zip $(PA_BLOCKS_URL); \
97 unzip pablocks.zip -d ./data/census2000/pennsylvania/block; \
102 mkdir -p data/census2000/new_york/block
103 if [ ! -f data/census2000/new_york/block/tl_2009_36_tabblock00.shp ]; \
105 wget -O nyblocks.zip $(NY_BLOCKS_URL); \
106 unzip nyblocks.zip -d ./data/census2000/new_york/block; \
115 # This imports the Tiger data using shp2pgsql. The shapefiles
116 # should exist, since this task depends on the "data" task, which
117 # downloads said shapefiles.
119 # After the TIGER import is done, we use the sf1blocks2sql script to
120 # parse and import the geographic header record information.
122 db: data newdb tiger_blocks_table tiger_lines_table sf1_blocks_table
125 # The table already exists, so we can append to it, and we don't have
126 # to create the GiST index.
127 for state in data/census2000/*; do \
128 $(PG_BINDIR)/shp2pgsql \
132 $$state/block/*.shp \
134 | psql -U $(DB_USER) -d $(DB_NAME); \
139 # Since the table and index already exist, we can utilize -a,
141 for state in data/census2000/*; do \
142 for shapefile in $$state/lines/*.shp; do \
143 echo "Importing $$shapefile."; \
144 $(PG_BINDIR)/shp2pgsql \
149 | bin/filter-transactions \
150 | psql -U $(DB_USER) -d $(DB_NAME) \
155 bin/sf1blocks2sql src/Tests/Fixtures/SummaryFile1/mdgeo.uf1 sf1_blocks \
156 | psql -U postgres -d $(DB_NAME) \
161 # First, we drop and re-create the DB_NAME database (or schema,
162 # whatever). Then, we add PL/pgSQL support to the database.
164 # At that point, we import the two PostGIS files, postgis.sql and
165 # spatial_ref_sys.sql. The postgis.sql file contains the geometry
166 # functions, while spatial_ref_sys.sql contains a table of SRIDs, and
167 # their associated properties. PostGIS requires both.
170 # Ignore the result of dropdb when it fails.
171 dropdb -U $(DB_USER) $(DB_NAME) || true
172 createdb -U $(DB_USER) $(DB_NAME)
173 createlang -U $(DB_USER) plpgsql $(DB_NAME)
177 -f $(PG_SHAREDIR)/contrib/postgis.sql \
182 -f $(PG_SHAREDIR)/contrib/spatial_ref_sys.sql \
186 # This just runs the SQL script to create the sf1_blocks table.
190 -f sql/create-sf1_blocks-table.sql \
194 # Create the tiger_blocks table, and create its GiST index. Having the
195 # table already exist makes importing via shp2pgsql much easier.
196 # Any blocks file will work as an argument.
198 $(PG_BINDIR)/shp2pgsql \
202 data/census2000/maryland/block/tl_2009_24_tabblock00.shp \
204 | psql -U postgres -d $(DB_NAME) \
208 # Prepare the tiger_lines table, and create the GiST index on its
209 # geometry column. Any lines shapefile will do here.
211 $(PG_BINDIR)/shp2pgsql \
215 data/census2000/maryland/lines/tl_2009_24510_edges.shp \
217 | psql -U postgres -d $(DB_NAME) \
220 # Add a unique index on the "tlid" column.
223 -f sql/create_tlid_unique_index.sql