4 SHAPELY_URL=http://pypi.python.org/packages/source/S/Shapely/Shapely-1.0.14.tar.gz
7 # Dark magic. We set these makefile variables to be the result of the
8 # 'shell' function. The shell function, in turn, executes a Python
9 # script which determines the locations of these files.
10 SHP2PGSQL := $(shell bin/find_file_paths --root /usr --single shp2pgsql)
11 POSTGIS_SQL := $(shell bin/find_file_paths --root /usr lwpostgis.sql postgis.sql)
12 SPATIAL_REF_SYS_SQL := $(shell bin/find_file_paths --root /usr spatial_ref_sys.sql)
14 # Necessary to run test/data without prerequisites.
16 .PHONY : test data lib
19 # The default task, since it comes first in the list.
28 # Download or check out any third-party libraries.
30 if [ ! -d lib/Shapely ]; then \
31 wget -O shapely.tar.gz $(SHAPELY_URL); \
32 tar -xvzf shapely.tar.gz -C lib/ ; \
34 mv lib/Shapely* lib/Shapely; \
38 # Remove byte-compiled python code.
41 find ./ -name '*.pyc' -print0 | xargs -0 rm -f
48 # There is a small issue here with the blocks_db and lines_db
49 # targets. Each of these requires that the database exists, and might
50 # therefore depend on the newdb target. However, if /each/ of them
51 # depends on newdb, the database will be dropped twice and the data
52 # from one of {blocks, lines} would be lost.
54 # We therefore assume that the database already exists when blocks_db
55 # or lines_db are initiated.
56 blocks_db: data blocks_table
59 # The table already exists, so we can append to it, and we don't have
60 # to create the GiST index.
61 for state in data/census2000/*; do \
66 $$state/blocks/*.shp \
68 | psql -U $(DB_USER) -d $(DB_NAME); \
73 # Run all of the geo (uf1) files through the import script. This has
74 # to happen after the blocks import since we impose a foreign key
76 for state in data/census2000/*; do \
77 bin/sf1blocks2sql $$state/sf1/*.uf1 sf1_blocks \
78 | psql -U $(DB_USER) -d $(DB_NAME) \
82 # Run the query to combine the two blocks tables, and drop the
86 -f sql/combine-block-tables.sql
89 lines_db: data tiger_lines_table
92 # Since the table and index already exist, we can utilize -a,
94 for state in data/census2000/*; do \
95 for shapefile in $$state/lines/*.shp; do \
96 echo "Importing $$shapefile."; \
102 | bin/filter-transactions \
103 | psql -U $(DB_USER) -d $(DB_NAME) \
110 # This imports the Tiger data using shp2pgsql. The shapefiles
111 # should exist, since this task depends on the "data" task, which
112 # downloads said shapefiles.
114 # After the TIGER import is done, we use the sf1blocks2sql script to
115 # parse and import the geographic header record information.
117 db: newdb blocks_data lines_data
118 # Do nothing except fulfill our prerequisites.
122 # First, we drop and re-create the DB_NAME database (or schema,
123 # whatever). Then, we add PL/pgSQL support to the database.
125 # At that point, we import the two PostGIS files, postgis.sql and
126 # spatial_ref_sys.sql. The postgis.sql file contains the geometry
127 # functions, while spatial_ref_sys.sql contains a table of SRIDs, and
128 # their associated properties. PostGIS requires both.
131 # Ignore the result of dropdb when it fails.
132 dropdb -U $(DB_USER) $(DB_NAME) || true
133 createdb -U $(DB_USER) $(DB_NAME)
134 createlang -U $(DB_USER) plpgsql $(DB_NAME)
143 -f $(SPATIAL_REF_SYS_SQL) \
147 # This just runs the SQL script to create the sf1_blocks table.
151 -f sql/create-sf1_blocks-table.sql \
155 # Create the tiger_blocks table, and create its GiST index. Having the
156 # table already exist makes importing via shp2pgsql much easier.
157 # Any blocks file will work as an argument.
163 data/census2000/maryland/blocks/tl_2009_24_tabblock00.shp \
165 | psql -U $(DB_USER) -d $(DB_NAME) \
168 # Create the "blocks" table, which is the result of joining
169 # the tiger_blocks and sf1_blocks tables.
170 blocks_table: tiger_blocks_table sf1_blocks_table
173 -f sql/create-blocks-table.sql
176 # Prepare the tiger_lines table, and create the GiST index on its
177 # geometry column. Any lines shapefile will do here.
183 data/census2000/maryland/lines/tl_2009_24510_edges.shp \
185 | psql -U $(DB_USER) -d $(DB_NAME) \
188 # Add a unique index on the "tlid" column.
191 -f sql/create_tlid_unique_index.sql