]> gitweb.michael.orlitzky.com - dead/census-tools.git/blob - makefile
Moved the post_data() function in to its own static Javascript file.
[dead/census-tools.git] / makefile
1 DB_NAME=census
2 DB_USER=postgres
3 TIGER_SRID=4269
4 SHAPELY_URL=http://pypi.python.org/packages/source/S/Shapely/Shapely-1.0.14.tar.gz
5
6
7 # Dark magic. We set these makefile variables to be the result of the
8 # 'shell' function. The shell function, in turn, executes a Python
9 # script which determines the locations of these files.
10 SHP2PGSQL := $(shell bin/find_file_paths --root /usr --single shp2pgsql)
11 POSTGIS_SQL := $(shell bin/find_file_paths --root /usr lwpostgis.sql postgis.sql)
12 SPATIAL_REF_SYS_SQL := $(shell bin/find_file_paths --root /usr spatial_ref_sys.sql)
13
14 # Necessary to run test/data without prerequisites.
15 #
16 .PHONY : test data lib
17
18
19 # The default task, since it comes first in the list.
20 #
21 all: clean lib test
22
23
24 test:
25 ./bin/run_tests
26
27
28 # Download or check out any third-party libraries.
29 lib:
30 if [ ! -d lib/Shapely ]; then \
31 wget -O shapely.tar.gz $(SHAPELY_URL); \
32 tar -xvzf shapely.tar.gz -C lib/ ; \
33 rm shapely.tar.gz; \
34 mv lib/Shapely* lib/Shapely; \
35 fi;
36
37
38 # Remove byte-compiled python code.
39 #
40 clean:
41 find ./ -name '*.pyc' -print0 | xargs -0 rm -f
42
43
44 data:
45 bin/download_data
46
47
48 # There is a small issue here with the blocks_db and lines_db
49 # targets. Each of these requires that the database exists, and might
50 # therefore depend on the newdb target. However, if /each/ of them
51 # depends on newdb, the database will be dropped twice and the data
52 # from one of {blocks, lines} would be lost.
53 #
54 # We therefore assume that the database already exists when blocks_db
55 # or lines_db are initiated.
56 blocks_db: data blocks_table
57 # All Blocks
58 #
59 # The table already exists, so we can append to it, and we don't have
60 # to create the GiST index.
61 for state in data/census2000/*; do \
62 $(SHP2PGSQL) \
63 -a \
64 -s $(TIGER_SRID) \
65 -D \
66 $$state/blocks/*.shp \
67 tiger_blocks \
68 | psql -U $(DB_USER) -d $(DB_NAME); \
69 done;
70
71 # Summary File 1
72 #
73 # Run all of the geo (uf1) files through the import script. This has
74 # to happen after the blocks import since we impose a foreign key
75 # restriction.
76 for state in data/census2000/*; do \
77 bin/sf1blocks2sql $$state/sf1/*.uf1 sf1_blocks \
78 | psql -U $(DB_USER) -d $(DB_NAME) \
79 > /dev/null; \
80 done;
81
82 # Run the query to combine the two blocks tables, and drop the
83 # constituents.
84 psql -U $(DB_USER) \
85 -d $(DB_NAME) \
86 -f sql/combine-block-tables.sql
87
88
89 lines_db: data tiger_lines_table
90 # All Lines
91 #
92 # Since the table and index already exist, we can utilize -a,
93 # and leave -I out.
94 for state in data/census2000/*; do \
95 for shapefile in $$state/lines/*.shp; do \
96 echo "Importing $$shapefile."; \
97 $(SHP2PGSQL) \
98 -a \
99 -s $(TIGER_SRID) \
100 $$shapefile \
101 tiger_lines \
102 | bin/filter-transactions \
103 | psql -U $(DB_USER) -d $(DB_NAME) \
104 > /dev/null; \
105 done; \
106 done;
107
108
109
110 # This imports the Tiger data using shp2pgsql. The shapefiles
111 # should exist, since this task depends on the "data" task, which
112 # downloads said shapefiles.
113 #
114 # After the TIGER import is done, we use the sf1blocks2sql script to
115 # parse and import the geographic header record information.
116 #
117 db: newdb blocks_data lines_data
118 # Do nothing except fulfill our prerequisites.
119
120
121
122 # First, we drop and re-create the DB_NAME database (or schema,
123 # whatever). Then, we add PL/pgSQL support to the database.
124 #
125 # At that point, we import the two PostGIS files, postgis.sql and
126 # spatial_ref_sys.sql. The postgis.sql file contains the geometry
127 # functions, while spatial_ref_sys.sql contains a table of SRIDs, and
128 # their associated properties. PostGIS requires both.
129 #
130 newdb:
131 # Ignore the result of dropdb when it fails.
132 dropdb -U $(DB_USER) $(DB_NAME) || true
133 createdb -U $(DB_USER) $(DB_NAME)
134 createlang -U $(DB_USER) plpgsql $(DB_NAME)
135
136 psql -d $(DB_NAME) \
137 -U $(DB_USER) \
138 -f $(POSTGIS_SQL) \
139 > /dev/null
140
141 psql -d $(DB_NAME) \
142 -U $(DB_USER) \
143 -f $(SPATIAL_REF_SYS_SQL) \
144 > /dev/null
145
146
147 # This just runs the SQL script to create the sf1_blocks table.
148 sf1_blocks_table:
149 psql -d $(DB_NAME) \
150 -U $(DB_USER) \
151 -f sql/create-sf1_blocks-table.sql \
152 > /dev/null
153
154
155 # Create the tiger_blocks table, and create its GiST index. Having the
156 # table already exist makes importing via shp2pgsql much easier.
157 # Any blocks file will work as an argument.
158 tiger_blocks_table:
159 $(SHP2PGSQL) \
160 -p \
161 -I \
162 -s $(TIGER_SRID) \
163 data/census2000/maryland/blocks/tl_2009_24_tabblock00.shp \
164 tiger_blocks \
165 | psql -U $(DB_USER) -d $(DB_NAME) \
166 > /dev/null
167
168 # Create the "blocks" table, which is the result of joining
169 # the tiger_blocks and sf1_blocks tables.
170 blocks_table: tiger_blocks_table sf1_blocks_table
171 psql -U $(DB_USER) \
172 -d $(DB_NAME) \
173 -f sql/create-blocks-table.sql
174
175
176 # Prepare the tiger_lines table, and create the GiST index on its
177 # geometry column. Any lines shapefile will do here.
178 tiger_lines_table:
179 $(SHP2PGSQL) \
180 -p \
181 -I \
182 -s $(TIGER_SRID) \
183 data/census2000/maryland/lines/tl_2009_24510_edges.shp \
184 tiger_lines \
185 | psql -U $(DB_USER) -d $(DB_NAME) \
186 > /dev/null
187
188 # Add a unique index on the "tlid" column.
189 psql -U $(DB_USER) \
190 -d $(DB_NAME) \
191 -f sql/create_tlid_unique_index.sql