Browse Source

Refactor Python script to generate heightmaps and metadata

Phyks (Lucas Verney) 4 years ago
parent
commit
bc3eb2e5ea

+ 0
- 242
scripts/bd_alti.py View File

@@ -1,242 +0,0 @@
1
-#!/usr/bin/env python3
2
-"""
3
-This script filters the BD ALTI from IGN (ESRI grid) to keep only the relevant
4
-files for the specified area.
5
-
6
-Input is an ESRI grid (ASCII) file from the IGN BD ALTI database.
7
-(https://en.wikipedia.org/wiki/Esri_grid#ASCII)
8
-
9
-Output is an Azavea Raster Grid format (ARG) file.
10
-http://geotrellis.io/documentation/0.9.0/geotrellis/io/arg/
11
-
12
-Metadata are going to the standard output, and should be appended to
13
-config.json file in ressources folder.
14
-
15
-Requires the pyproj module.
16
-"""
17
-
18
-import json
19
-import math
20
-import os
21
-import pyproj
22
-import struct
23
-import sys
24
-
25
-
26
-def has_overlap(a, b):
27
-    """
28
-    a and b are two intervals (list of bounds)
29
-
30
-    Returns
31
-    * False if no overlap between the intervals represented by a and b,
32
-    * True otherwise
33
-    """
34
-    return (max(0, min(a[1], b[1]) - max(a[0], b[0])) > 0)
35
-
36
-
37
-def parse_header(fh):
38
-    """
39
-    Parses the header of an ESRI (ASCII) altitude file.
40
-    """
41
-    ncols = int(fh.readline().strip().split(" ")[-1])
42
-    nrows = int(fh.readline().strip().split(" ")[-1])
43
-    xllcorner = float(fh.readline().strip().split(" ")[-1])
44
-    yllcorner = float(fh.readline().strip().split(" ")[-1])
45
-    cellsize = float(fh.readline().strip().split(" ")[-1])
46
-    nodata = float(fh.readline().strip().split(" ")[-1])
47
-
48
-    return ncols, nrows, xllcorner, yllcorner, cellsize, nodata
49
-
50
-
51
-def latlng_to_lambert93(lat, lng):
52
-    """
53
-    Converts a latitude and longitude to Lambert93 X, Y representation.
54
-
55
-    Returns x, y
56
-    """
57
-    wgs84 = pyproj.Proj("+init=EPSG:4326")
58
-    lambert = pyproj.Proj("+init=EPSG:2154")
59
-    x, y = pyproj.transform(wgs84, lambert, lng, lat)
60
-
61
-    return x, y
62
-
63
-
64
-def find_relevant_files(x_min, y_min, x_max, y_max, data_folder):
65
-    """
66
-    Finds the files with relevant points from the data folder.
67
-
68
-    Returns a list of filenames.
69
-    """
70
-    founds = []
71
-    for f in os.listdir(data_folder):
72
-        if not(f.endswith(".asc")):
73
-            continue
74
-        file = os.path.join(data_folder, f)
75
-        with open(file, 'r') as fh:
76
-            ncols, nrows, xllcorner, yllcorner, cellsize, nodata = parse_header(fh)
77
-
78
-            if has_overlap([x_min, x_max],
79
-                           [xllcorner, xllcorner + cellsize * ncols]):
80
-                if has_overlap([y_min, y_max],
81
-                               [yllcorner, yllcorner + cellsize * nrows]):
82
-                    founds.append(file)
83
-    return founds
84
-
85
-
86
-def pack(fmt, nodata, value):
87
-    """
88
-    given fmt and nodata, encodes a value as bytes
89
-    """
90
-    if value is None:
91
-        value = nodata
92
-    return struct.pack(fmt, value)
93
-
94
-
95
-# packs the given values together as bytes
96
-def encode(fmt, nodata, values):
97
-    """
98
-    Packs the given values together as bytes.
99
-    """
100
-    chunks = [pack(fmt, nodata, v) for v in values]
101
-    return b''.join(chunks)
102
-
103
-
104
-def format_arg_out(points, no_data=-99999):
105
-    """
106
-    Format the list of points in arg format. no_data is the value associated to
107
-    no data.
108
-
109
-    Returns out, metadata where out is a binary content ready to be written to
110
-    a file and metadata is a JSON file containing arg metadata.
111
-
112
-    We expect cellheight and cellwidth to be equal and constant across the
113
-    dataset.
114
-
115
-    We assume there is no holes in the data.
116
-    """
117
-    # Sort points list by decreasing Y and then by increasing X
118
-    # (from NW to SE), as it will be output in the ARG file
119
-    points.sort(key=lambda e: (-e[1], e[0]))
120
-
121
-    # Set metadata
122
-    x_min = points[0][0]
123
-    y_min = points[-1][1]
124
-    x_max = points[-1][0]
125
-    y_max = points[0][1]
126
-    cellwidth = points[1][0] - points[0][0]  # X difference between the first two points
127
-    cellheight = cellwidth  # Assume square cells
128
-    metadata = {
129
-        "datatype": "int16",
130
-        "x_min": x_min,
131
-        "y_min": y_min,
132
-        "x_max": x_max,
133
-        "y_max": y_max,
134
-        "cellwidth": cellwidth,
135
-        "cellheight": cellheight,
136
-        "cols": (x_max - x_min) // cellwidth,
137
-        "rows": (y_max - y_min) // cellheight
138
-    }
139
-
140
-    # Get only the z values, replacing the NO_DATA values by None
141
-    z_values = [i[2] if i[2] != no_data else None for i in points]
142
-    max_z = max(z_values)
143
-    min_z = min(z_values)
144
-    format = ">h"
145
-    nodata_out = -(1 << 15)  # -32 768
146
-    min_value = -(1 << 15) + 1  # -32 767
147
-    max_value = (1 >> 15) - 1  # 32 767
148
-    # Convert floating z_values to int levels
149
-    z_values = [int((i - min_z) / (max_z - min_z) * (max_value - min_value) +
150
-        min_value)
151
-        for i in z_values]
152
-    # Get the output as a binary string
153
-    out = encode(format, nodata_out, z_values)
154
-
155
-    return out, metadata
156
-
157
-
158
-if __name__ == "__main__":
159
-    if len(sys.argv) < 7:
160
-        print("Usage: " +
161
-              sys.argv[0] +
162
-              " LAT_MIN LNG_MIN LAT_MAX LNG_MAX DATA_FOLDER OUT_FILE")
163
-        sys.exit()
164
-
165
-    # Parse parameters
166
-    lat_min = float(sys.argv[1])
167
-    lng_min = float(sys.argv[2])
168
-    lat_max = float(sys.argv[3])
169
-    lng_max = float(sys.argv[4])
170
-    data_folder = os.path.expanduser(sys.argv[5])
171
-    out_file = sys.argv[6]
172
-
173
-    print(("Looking for data between latitudes and longitudes " +
174
-           "({0}, {1}) and ({2}, {3})").format(lat_min,
175
-                                               lng_min,
176
-                                               lat_max,
177
-                                               lng_max))
178
-
179
-    # Convert latitude / longitude coordinates to Lambert93 projection
180
-    x_min, y_min = latlng_to_lambert93(lat_min, lng_min)
181
-    x_max, y_max = latlng_to_lambert93(lat_max, lng_max)
182
-
183
-    if x_min > x_max:
184
-        x_min, x_max = x_max, x_min
185
-    if y_min > y_max:
186
-        y_min, y_max = y_max, y_min
187
-
188
-    print(("Looking for data between map coordinates " +
189
-           "({0}, {1}) and ({2}, {3})").format(x_min, y_min, x_max, y_max))
190
-
191
-    # Loop over all the available data files and find those with relevant points
192
-    founds = find_relevant_files(x_min, y_min, x_max, y_max, data_folder)
193
-
194
-    if len(founds) == 0:
195
-        print("\nNo matching dataset found =(.")
196
-        sys.exit(1)
197
-    else:
198
-        print("\nMatching datasets:")
199
-        print("\n".join(founds))
200
-
201
-    # Extract relevant parts from the datasets and append them to output
202
-    points = []  # Store relevant points as (X, Y, Z) tuples
203
-    for f in founds:
204
-        with open(f, 'r') as fh:
205
-            # We expect all of the files to have same cellsize parameter
206
-            ncols, nrows, xllcorner, yllcorner, cellsize, nodata = parse_header(fh)
207
-
208
-            # Fetch bounds in this file
209
-            col_min = math.floor((x_min - xllcorner) / cellsize)
210
-            col_max = math.ceil((x_max - xllcorner) / cellsize)
211
-            # The (0, 0) point is the lower left one, that is on the last line.
212
-            row_max = nrows - math.floor((y_min - yllcorner) / cellsize)
213
-            row_min = nrows - math.ceil((y_max - yllcorner) / cellsize)
214
-
215
-            i = 0
216
-            for line in fh.readlines():
217
-                if i >= row_min and i <= row_max:
218
-                    row = [float(j) for j in line.strip().split(" ")]
219
-                    for j in range(col_min, col_max):
220
-                        points.append((xllcorner + j * cellsize,
221
-                                       yllcorner + (nrows - i) * cellsize,
222
-                                       row[j]))
223
-                i += 1
224
-
225
-    # Get ARG formatted data
226
-    out, metadata = format_arg_out(points)
227
-
228
-    # Filter metadata according to our needs for config
229
-    metadata = {k.upper(): v for k, v in metadata.items() if k != "datatype"}
230
-    metadata["N_ROWS"] = metadata.pop("ROWS")
231
-    metadata["N_COLS"] = metadata.pop("COLS")
232
-
233
-    # Write raw file to out_file
234
-    with open(out_file, 'wb') as fh:
235
-        fh.write(out)
236
-
237
-    # Print metadata to stdout, as JSON
238
-    print("\n\nConfig file:")
239
-    print(json.dumps(metadata,
240
-                     sort_keys=True,
241
-                     indent=4,
242
-                     separators=(',', ': ')))

+ 71
- 0
scripts/libskimap/esri.py View File

@@ -0,0 +1,71 @@
1
+"""
2
+    This module contains ESRI grid (ASCII) specific functions.
3
+    https://en.wikipedia.org/wiki/Esri_grid#ASCII
4
+"""
5
+
6
+import math
7
+
8
+import tools
9
+
10
+
11
+def parse_header(fh):
12
+    """
13
+        Parses the header of an opened ESRI (ASCII) altitude file.
14
+    """
15
+    ncols = int(fh.readline().strip().split(" ")[-1])
16
+    nrows = int(fh.readline().strip().split(" ")[-1])
17
+    xllcorner = float(fh.readline().strip().split(" ")[-1])
18
+    yllcorner = float(fh.readline().strip().split(" ")[-1])
19
+    cellsize = float(fh.readline().strip().split(" ")[-1])
20
+    nodata = float(fh.readline().strip().split(" ")[-1])
21
+
22
+    return ncols, nrows, xllcorner, yllcorner, cellsize, nodata
23
+
24
+
25
+def find_relevant_points(x_min, y_min, x_max, y_max, in_files,
26
+                         verbose=True):
27
+    """
28
+    Finds the points in the target geographic region which are in the files in
29
+    the in_files list.
30
+
31
+    x_min, y_min, x_max, y_max specify the target geographic region, in
32
+    Lambert93 coordinates.
33
+
34
+    Returns a list of relevant points.
35
+    """
36
+    points = []
37
+    for file in in_files:
38
+        with open(file, 'r') as fh:
39
+            # Parse file header
40
+            ncols, nrows, xllcorner, yllcorner, cellsize, nodata = parse_header(fh)
41
+
42
+            # Check overlap between area in the file and target area
43
+            if((not tools.has_overlap([x_min, x_max],
44
+                                      [xllcorner,
45
+                                       xllcorner + cellsize * ncols])) or
46
+               (not tools.has_overlap([y_min, y_max],
47
+                                      [yllcorner,
48
+                                       yllcorner + cellsize * nrows]))):
49
+                # If no overlap, go on with next file
50
+                continue
51
+
52
+            if verbose:
53
+                print("Relevant dataset: " + file)
54
+
55
+            # Compute which part of the file to keep (rows / cols)
56
+            col_min = math.floor((x_min - xllcorner) / cellsize)
57
+            col_max = math.ceil((x_max - xllcorner) / cellsize)
58
+            # The (0, 0) point is the lower left one, that is on the last line.
59
+            row_max = nrows - math.floor((y_min - yllcorner) / cellsize)
60
+            row_min = nrows - math.ceil((y_max - yllcorner) / cellsize)
61
+
62
+            i = 0
63
+            for line in fh.readlines():
64
+                if i >= row_min and i <= row_max:
65
+                    row = [float(j) for j in line.strip().split(" ")]
66
+                    for j in range(col_min, col_max):
67
+                        points.append((xllcorner + j * cellsize,
68
+                                       yllcorner + (nrows - i) * cellsize,
69
+                                       row[j]))
70
+            i += 1
71
+    return points

+ 15
- 0
scripts/libskimap/geodata.py View File

@@ -0,0 +1,15 @@
1
+"""
2
+    This module contains useful functions from geographic data conversions.
3
+"""
4
+import pyproj
5
+
6
+
7
+def latlng_to_lambert93(lat, lng):
8
+    """
9
+        Converts a latitude and longitude to Lambert93 X, Y representation.
10
+    """
11
+    wgs84 = pyproj.Proj("+init=EPSG:4326")
12
+    lambert = pyproj.Proj("+init=EPSG:2154")
13
+    x, y = pyproj.transform(wgs84, lambert, lng, lat)
14
+
15
+    return x, y

+ 104
- 0
scripts/libskimap/osm.py View File

@@ -0,0 +1,104 @@
1
+"""
2
+    This module contains functions to fetch OSM metadata.
3
+
4
+    It filters the OpenStreetMap XML data file and exports only the
5
+    geographically relevant informations in a JSON file.
6
+"""
7
+import xml.etree.ElementTree as ET
8
+
9
+import geodata
10
+
11
+
12
+def fetch_metadta(lat_min, lng_min, lat_max, lng_max, data_file=""):
13
+    """
14
+        This function extracts the relevant information from the OSM
15
+        metadata (slopes, aerialways, and their associated metadata).
16
+
17
+        It returns a list of such items.
18
+
19
+        data_file is the path to a downloaded OSM XML file. If data_file is not
20
+        specified, the script will query the OSM API.
21
+    """
22
+    # ways is the returned list of matching items
23
+    ways = []
24
+    if data_file is "":
25
+        # TODO: Get the metadata from the OSM API
26
+        pass
27
+    # Parse the input XML file
28
+    xml = ET.parse(data_file)
29
+    root = xml.getroot()
30
+
31
+    # Find geographically relevant nodes, by iterating over all of them
32
+    # nodes are points at the end of ways, so we will look for ways using these
33
+    # nodes and only these nodes.
34
+    nodes = {}
35
+    for node in root.iter("node"):
36
+        lat = float(node.attrib["lat"])
37
+        lng = float(node.attrib["lon"])
38
+        if(lat_min <= lat and lat <= lat_max and
39
+           lng_min <= lng and lng <= lng_max):
40
+            # If the node is within the target area, store it and convert its
41
+            # coordinates to Lambert93
42
+            x, y = geodata.latlng_to_lambert93(lat, lng)
43
+            id = node.attrib["id"]
44
+            nodes[id] = {"id": id,
45
+                         "lat": lat,
46
+                         "lng": lng,
47
+                         "x": x,
48
+                         "y": y}
49
+
50
+    # Now, iterate over all the ways
51
+    for way in root.iter("way"):
52
+        # Way element to append to the way list if within given area
53
+        way_to_append = {
54
+            "name": "",
55
+            "type": "",
56
+            "nodes": [],
57
+            # Slopes-specific attributes
58
+            "difficulty": "",
59
+            # Aerial ways specific attributes
60
+            "bubble": "",
61
+            "capacity": "",
62
+            "duration": "",
63
+            "heating": "",
64
+            "occupancy": ""
65
+        }
66
+        # Flag indicating that the way is not within given area
67
+        ignore_way = False
68
+        for nd in way.iter("nd"):
69
+            # Fetch all the nodes associated to the current way
70
+            if nd.attrib["ref"] in nodes:
71
+                way_to_append.nodes.append(nodes[nd.attrib["ref"]])
72
+            else:
73
+                # If one of them is not within given area, ignore the way
74
+                ignore_way = True
75
+        if 0 == len(way_to_append.nodes) or ignore_way:
76
+            continue
77
+        # Parse the tag elements for the current way, which contain the
78
+        # metadata
79
+        for tag in way.iter("tag"):
80
+            # For each relevant tags, if available, store it in the
81
+            # corresponding attribute
82
+            if tag.attrib["k"] == "name":
83
+                way_to_append.name = tag.attrib["v"]
84
+            elif tag.attrib["k"] == "piste:difficulty":
85
+                way_to_append.difficulty = tag.attrib["v"]
86
+            elif tag.attrib["k"] == "piste:type":
87
+                way_to_append.type = tag.attrib["v"]
88
+            elif tag.attrib["k"] == "aerialway":
89
+                way_to_append.type = tag.attrib["v"]
90
+            elif tag.attrib["k"] == "aerialway:bubble":
91
+                way_to_append.bubble = tag.attrib["v"]
92
+            elif tag.attrib["k"] == "aerialway:capacity":
93
+                way_to_append.capacity = tag.attrib["v"]
94
+            elif tag.attrib["k"] == "aerialway:duration":
95
+                way_to_append.duration = tag.attrib["v"]
96
+            elif tag.attrib["k"] == "aerialway:heating":
97
+                way_to_append.heating = tag.attrib["v"]
98
+            elif tag.attrib["k"] == "aerialway:occupancy":
99
+                way_to_append.occupancy = tag.attrib["v"]
100
+        # Finally, if we got there, the way is ok and the way dict can be
101
+        # appended to the ways list
102
+        ways.append(way_to_append)
103
+    # Return a list of relevant ways with associated metadata
104
+    return ways

+ 80
- 0
scripts/libskimap/output.py View File

@@ -0,0 +1,80 @@
1
+"""
2
+    This module contains functions to output data in an Azavea Raster Grid
3
+    (ARG) file (slightly modified implementation to fit with our needs).
4
+    http://geotrellis.io/documentation/0.9.0/geotrellis/io/arg/
5
+"""
6
+import tools
7
+
8
+
9
+def format_arg(points):
10
+    """
11
+    Given a list of points, format it as a byte string in ARG format, ready to
12
+    be written in a file. Returns out, metadata where out is the binary content
13
+    of the main ARG file and metadata are associated metadata, in JSON.
14
+
15
+    In this function, we assume cellheight and cellwidth are the same and are
16
+    constants across the dataset. We also assume there is no missing data
17
+    between two abscissas or ordinates.
18
+
19
+    Absence of data is represented by None in the input points.
20
+    """
21
+    # Sort points list by decreasing Y and then by increasing X
22
+    # (from NW to SE), as it will be outputted in the ARG file
23
+    points.sort(key=lambda e: (-e[1], e[0]))
24
+
25
+    # Set metadata
26
+    x_min = points[0][0]
27
+    y_min = points[-1][1]
28
+    x_max = points[-1][0]
29
+    y_max = points[0][1]
30
+    # cellwidth is the X difference between the first two points
31
+    cellwidth = points[1][0] - points[0][0]
32
+    # Assume square cells
33
+    cellheight = cellwidth
34
+    metadata = {
35
+        # ARG metadata
36
+        "datatype": "int16",
37
+        "x_min": x_min,
38
+        "y_min": y_min,
39
+        "x_max": x_max,
40
+        "y_max": y_max,
41
+        "cellwidth": cellwidth,
42
+        "cellheight": cellheight,
43
+        "cols": (x_max - x_min) // cellwidth,
44
+        "rows": (y_max - y_min) // cellheight,
45
+        # Extra (custom) metadata
46
+        "mean_height": 0  # Updated below
47
+    }
48
+
49
+    # Get the maximum and minimum altitudes in the dataset
50
+    max_altitude = max(points)
51
+    min_altitude = min(points)
52
+    # Set outptu binary format parameters
53
+    format = ">h"
54
+    nodata_out = -(1 << 15)  # -32 768
55
+    min_value = -(1 << 15) + 1  # -32 767
56
+    max_value = (1 >> 15) - 1  # 32 767
57
+    # Convert floating points to int in the given range, using bounds
58
+    z_values = [int(((i - min_altitude) / (max_altitude - min_altitude) *
59
+                     (max_value - min_value)) +
60
+                    min_value)
61
+                for i in points]
62
+    # Get the output as a binary string
63
+    out = tools.encode(format, nodata_out, z_values)
64
+
65
+    # Append the mean altitude to the metadata information
66
+    metadata["mean_altitude"] = (max_altitude + min_altitude) / 2
67
+
68
+    return out, metadata
69
+
70
+
71
+def arg_metadata_to_JSON_config(original_metadata):
72
+    """
73
+        Convert metadata in JSON associated to an ARG file to the one expected
74
+        for the config.json file in the app.
75
+    """
76
+    metadata = {k.upper(): v
77
+                for k, v in original_metadata.items() if k != "datatype"}
78
+    metadata["N_ROWS"] = metadata.pop("ROWS")
79
+    metadata["N_COLS"] = metadata.pop("COLS")
80
+    return tools.pretty_json(metadata)

+ 55
- 0
scripts/libskimap/srtm.py View File

@@ -0,0 +1,55 @@
1
+"""
2
+    This module contains HGT-specific functions
3
+    https://gis.stackexchange.com/questions/43743/how-to-extract-elevation-from-hgt-file
4
+
5
+    These functions are useful for the SRTM dataset which is provided as HGT
6
+    files.
7
+"""
8
+
9
+import struct
10
+
11
+
12
+def lat_lng_to_row_col(lat, lng):
13
+    """
14
+        Converts a (latitude, longitude) point to a (row, col) point in the HGT
15
+        dataset.
16
+    """
17
+    # The first row in the file is very likely the northernmost one and there
18
+    # are 1200 rows. 3 arc-seconds sampling
19
+    return 1201 - round((lng % 1) * 3600 / 3), round((lat % 1) * 3600 / 3)
20
+
21
+
22
+def find_relevant_points(lat_min, lng_min, lat_max, lng_max, in_file,
23
+                         verbose=True):
24
+    """
25
+        Finds the points in the target geographic region which are in in_file.
26
+
27
+        lat_min, lng_min, lat_max, y_max specify the target geographic
28
+        region.
29
+
30
+        Returns a list of relevant points.
31
+
32
+        TODO: This function may not be fully working.
33
+    """
34
+    # Returned list of points
35
+    points = []
36
+
37
+    # Find the corresponding spot in the dataset
38
+    row_min, col_min = lat_lng_to_row_col(lat_min, lng_min)
39
+    row_max, col_max = lat_lng_to_row_col(lat_max, lng_max)
40
+    if col_min > col_max:
41
+        col_min, col_max = col_max, col_min
42
+    if row_min > row_max:
43
+        row_min, row_max = row_max, row_min
44
+
45
+    with open(in_file, 'rb') as fh:
46
+        for i in range(row_max, row_min - 1, -1):
47
+            for j in range(col_min, col_max + 1):
48
+                # Find the right spot in the file
49
+                fh.seek(((i - 1) * 1201 + (j - 1)) * 2)
50
+                buf = fh.read(2)  # read two bytes and convert them
51
+                # ">h" is a signed two byte integer
52
+                val = struct.unpack('>h', buf)
53
+                # Append (X, Y, Z) to the relevant points list
54
+                points.append(j, i, val[0])
55
+    return points

+ 39
- 0
scripts/libskimap/tools.py View File

@@ -0,0 +1,39 @@
1
+"""
2
+    This module contains misc utilitary functions.
3
+"""
4
+import json
5
+import struct
6
+
7
+
8
+def has_overlap(a, b):
9
+    """
10
+        Check for overlap between two intervals given by a tuple of bounds.
11
+    """
12
+    return (max(0, min(a[1], b[1]) - max(a[0], b[0])) > 0)
13
+
14
+
15
+def pack(fmt, nodata, value):
16
+    """
17
+        Encode a value as bytes, given a format and a nodata value.
18
+    """
19
+    if value is None:
20
+        value = nodata
21
+    return struct.pack(fmt, value)
22
+
23
+
24
+def encode(fmt, nodata, values):
25
+    """
26
+        Packs the given values together as byte string.
27
+    """
28
+    chunks = [pack(fmt, nodata, v) for v in values]
29
+    return b''.join(chunks)
30
+
31
+
32
+def pretty_json(data):
33
+    """
34
+        Outputs the data object as prettified JSON, with indents and spaces.
35
+    """
36
+    return json.dumps(data,
37
+                      sort_keys=True,
38
+                      indent=4,
39
+                      separators=(',', ': '))

+ 0
- 118
scripts/osm_filter.py View File

@@ -1,118 +0,0 @@
1
-#!/usr/bin/env python3
2
-import json
3
-import pyproj
4
-import os
5
-import sys
6
-import xml.etree.ElementTree as ET
7
-
8
-"""
9
-This script filters the OpenStreetMap XML data file. It exports only the
10
-geographically relevant informations in a JSON file.
11
-"""
12
-
13
-wgs84 = pyproj.Proj("+init=EPSG:4326")
14
-lambert = pyproj.Proj("+init=EPSG:2154")
15
-
16
-
17
-if len(sys.argv) < 6:
18
-    print("Usage: "+sys.argv[0]+" LAT_MIN LNG_MIN LAT_MAX LNG_MAX DATA_FILE")
19
-    sys.exit()
20
-
21
-lat_min = float(sys.argv[1])
22
-lng_min = float(sys.argv[2])
23
-lat_max = float(sys.argv[3])
24
-lng_max = float(sys.argv[4])
25
-data_file = os.path.expanduser(sys.argv[5])
26
-
27
-if len(sys.argv) > 6:
28
-    out_file = sys.argv[6]
29
-else:
30
-    out_file = data_file.replace(".xml", ".json")
31
-
32
-print(("Looking for data between latitudes and longitudes " +
33
-       "({0}, {1}) and ({2}, {3})").format(lat_min, lng_min, lat_max, lng_max))
34
-
35
-if lat_min > lat_max:
36
-    lat_min, lat_max = lat_max, lat_min
37
-if lng_min > lng_max:
38
-    lng_min, lng_max = lng_max, lng_min
39
-
40
-# Extract relevant parts from the dataset
41
-ways = []
42
-xml = ET.parse(data_file)
43
-root = xml.getroot()
44
-
45
-nodes = {}
46
-for node in root.iter("node"):
47
-    lat = float(node.attrib["lat"])
48
-    lng = float(node.attrib["lon"])
49
-    if lat_min <= lat and lat <= lat_max and lng_min <= lng and lng <= lng_max:
50
-        x, y = pyproj.transform(wgs84, lambert, lng, lat)
51
-        id = node.attrib["id"]
52
-        nodes[id] = {"id": id,
53
-                     "lat": lat,
54
-                     "lng": lng,
55
-                     "x": x,
56
-                     "y": y}
57
-
58
-for way in root.iter("way"):
59
-    way_nodes = []
60
-    name = ""
61
-    type = ""
62
-    # Slopes
63
-    difficulty = ""
64
-    # Aerial ways
65
-    bubble = ""
66
-    capacity = ""
67
-    duration = ""
68
-    heating = ""
69
-    occupancy = ""
70
-    for nd in way.iter("nd"):
71
-        if nd.attrib["ref"] in nodes:
72
-            way_nodes.append(nodes[nd.attrib["ref"]])
73
-    for tag in way.iter("tag"):
74
-        if tag.attrib["k"] == "name":
75
-            name = tag.attrib["v"]
76
-        elif tag.attrib["k"] == "piste:difficulty":
77
-            difficulty = tag.attrib["v"]
78
-        elif tag.attrib["k"] == "piste:type":
79
-            type = tag.attrib["v"]
80
-        elif tag.attrib["k"] == "aerialway":
81
-            type = tag.attrib["v"]
82
-        elif tag.attrib["k"] == "aerialway:bubble":
83
-            bubble = tag.attrib["v"]
84
-        elif tag.attrib["k"] == "aerialway:capacity":
85
-            capacity = tag.attrib["v"]
86
-        elif tag.attrib["k"] == "aerialway:duration":
87
-            duration = tag.attrib["v"]
88
-        elif tag.attrib["k"] == "aerialway:heating":
89
-            heating = tag.attrib["v"]
90
-        elif tag.attrib["k"] == "aerialway:occupancy":
91
-            occupancy = tag.attrib["v"]
92
-    if len(way_nodes) > 0:
93
-        append = {"name": name,
94
-                  "type": type,
95
-                  "nodes": way_nodes}
96
-        if difficulty != "":
97
-            append["difficulty"] = difficulty
98
-        if(bubble != "" or capacity != "" or duration != "" or
99
-           heating != "" or occupancy != ""):
100
-            append["bubble"] = bubble
101
-            append["capacity"] = capacity
102
-            append["duration"] = duration
103
-            append["heating"] = heating
104
-            append["occupancy"] = occupancy
105
-        ways.append(append)
106
-
107
-
108
-# Write it to output file
109
-with open(out_file, 'w') as fh:
110
-    fh.write(json.dumps(ways, sort_keys=True,
111
-             indent=4, separators=(',', ': ')))
112
-
113
-if len(sys.argv) < 7:
114
-    print("Found data (also exported to "+out_file+"):")
115
-    print(json.dumps(ways, sort_keys=True,
116
-          indent=4, separators=(',', ': ')))
117
-else:
118
-    print("Found data exported to "+out_file+".")

+ 122
- 0
scripts/skimap.py View File

@@ -0,0 +1,122 @@
1
+#!/usr/bin/env python3
2
+"""
3
+This script filters the BD ALTI from IGN (ESRI grid) to keep only the relevant
4
+files for the specified area.
5
+
6
+Input is an ESRI grid (ASCII) file from the IGN BD ALTI database.
7
+(https://en.wikipedia.org/wiki/Esri_grid#ASCII)
8
+
9
+Output is an Azavea Raster Grid format (ARG) file.
10
+http://geotrellis.io/documentation/0.9.0/geotrellis/io/arg/
11
+
12
+Metadata are going to the standard output, and should be appended to
13
+config.json file in ressources folder.
14
+"""
15
+import argparse
16
+import os
17
+
18
+from libskimap import esri, geodata, hgt, osm, output, tools
19
+
20
+
21
+def main(lat_min, lng_min, lat_max, lng_max,
22
+         in_files, osm_file,
23
+         out, out_geometadata):
24
+    """
25
+        Main function
26
+    """
27
+    print(("Looking for data between latitudes and longitudes " +
28
+           "({0}, {1}) and ({2}, {3})").format(lat_min,
29
+                                               lng_min,
30
+                                               lat_max,
31
+                                               lng_max))
32
+
33
+    # Convert latitude / longitude coordinates to Lambert93 projection
34
+    x_min, y_min = geodata.latlng_to_lambert93(lat_min, lng_min)
35
+    x_max, y_max = geodata.latlng_to_lambert93(lat_max, lng_max)
36
+
37
+    # Switch x_min and x_max if necessary
38
+    if x_min > x_max:
39
+        x_min, x_max = x_max, x_min
40
+    # Switch y_min and y_may if necessary
41
+    if y_min > y_max:
42
+        y_min, y_max = y_max, y_min
43
+
44
+    print(("Looking for data between map coordinates " +
45
+           "({0}, {1}) and ({2}, {3})").format(x_min, y_min, x_max, y_max))
46
+
47
+    # Loop over all the available data files and find those with relevant
48
+    # points
49
+    in_files_esri = [i for i in in_files if i.endswith(".asc")]
50
+    points = esri.find_relevant_points(x_min, y_min, x_max, y_max,
51
+                                       in_files_esri)
52
+    in_files_hgt = [i for i in in_files if i.endswith(".hgt")]
53
+    points.extend(hgt.find_relevant_points(x_min, y_min, x_max, y_max,
54
+                                           in_files_hgt))
55
+
56
+    # Get ARG formatted data
57
+    out, arg_metadata = output.format_arg(points)
58
+
59
+    # Filter ARG metadata according to our needs for config
60
+    arg_metadata = output.arg_metadata_to_JSON_config(arg_metadata)
61
+
62
+    # Fetch OSM metadata for the target geographic region
63
+    osm_metadata = osm.fetch_metadata(lat_min, lng_min, lat_max, lng_max,
64
+                                      osm_file)
65
+    # Output the OSM metadata to relevant file
66
+    with open(out_geometadata, "w") as fh:
67
+        fh.write(tools.pretty_json(osm_metadata))
68
+
69
+    # Write raw file to out_file
70
+    with open(out, "wb") as fh:
71
+        fh.write(out)
72
+
73
+    # Print metadata to stdout, as JSON
74
+    print("\n\nConfig file:")
75
+    print(arg_metadata)
76
+
77
+
78
+if __name__ == "__main__":
79
+    parser = argparse.ArgumentParser(
80
+        description="Fetch height dataset from geodata.")
81
+    parser.add_argument("--lat-min", dest="lat_min", action="store",
82
+                        required=True,
83
+                        help="Minimum latitude in the covered area.")
84
+    parser.add_argument("--lat-max", dest="lat_max", action="store",
85
+                        required=True,
86
+                        help="Maximum latitude in the covered area.")
87
+    parser.add_argument("--lng-min", dest="lng_min", action="store",
88
+                        required=True,
89
+                        help="Minimum longitude in the covered area.")
90
+    parser.add_argument("--lng-max", dest="lng_max", action="store",
91
+                        required=True,
92
+                        help="Maximum longitude in the covered area.")
93
+    parser.add_argument("--out", dest="out", action="store",
94
+                        required=True,
95
+                        help="Output heightmap metadata file name.")
96
+    parser.add_argument("--osm-file", dest="osm_file", action="store",
97
+                        required=False, default="",
98
+                        help="Path to a downloaded OSM metadata XML file.")
99
+    parser.add_argument("--out-geometadata", dest="out_geometadata",
100
+                        action="store", required=False, default="",
101
+                        help="Ouptut geographic metadata file name " +
102
+                        "(defaults to osm_(--out arg) file).")
103
+    parser.add_argument("in", dest="in_files", action="store", nargs="+",
104
+                        help="Input dataset files or folder containing input" +
105
+                        "files.")
106
+    # Parse parameters
107
+    args = parser.parse_args()
108
+
109
+    in_files = args.in_files
110
+    # If in_files has only one element which is a directory
111
+    if len(in_files) == 1 and os.path.isdir(in_files[0]):
112
+        # in_files is the list of all the files in this directory
113
+        in_files = [os.path.join(in_files[0], f)
114
+                    for f in os.listdir(in_files[0])]
115
+
116
+    out_geometadata = args.out_geometadata
117
+    if out_geometadata is "":
118
+        out_geometadata = "osm_" + args.out
119
+
120
+    # Call main functions with arguments from command line
121
+    main(args.lat_min, args.lng_min, args.lat_max, args.lng_max,
122
+         in_files, args.osm_file, args.out, out_geometadata)

+ 0
- 70
scripts/srtm_alti.py View File

@@ -1,70 +0,0 @@
1
-#!/usr/bin/env python3
2
-import struct
3
-import sys
4
-
5
-"""
6
-This script filters the SRTM altimetry dataset from NASA to keep only the
7
-relevant files for the specified area.
8
-
9
-Input is an HGT file from the SRTM dataset.
10
-Output is an ASCII XYZ gridded file, sorted by increasing X and decreasing Y.
11
-"""
12
-
13
-
14
-def lat_lng_to_row_col(lat, lng):
15
-    return 1201 - round((lng % 1) * 3600 / 3), round((lat % 1) * 3600 / 3)
16
-
17
-
18
-if len(sys.argv) < 6:
19
-    print("Usage: "+sys.argv[0]+" LAT_MIN LNG_MIN LAT_MAX LNG_MAX HGT_FILE")
20
-    sys.exit()
21
-
22
-lat_min = float(sys.argv[1])
23
-lng_min = float(sys.argv[2])
24
-lat_max = float(sys.argv[3])
25
-lng_max = float(sys.argv[4])
26
-hgt_file = sys.argv[5]
27
-
28
-if len(sys.argv) > 6:
29
-    out_file = sys.argv[6]
30
-else:
31
-    out_file = "out.xyz"
32
-
33
-print(("Looking for data between latitudes and longitudes " +
34
-       "({0}, {1}) and ({2}, {3})").format(lat_min, lng_min, lat_max, lng_max))
35
-
36
-# Extract relevant parts from the dataset and output it as xyz values
37
-# X and Y are pixels coordinates in this case
38
-out = "X\tY\tZ\n"
39
-with open(hgt_file, "rb") as fh:
40
-    # The first row in the file is very likely the northernmost one and there
41
-    # are 1200 rows. 3 arc-seconds sampling
42
-    row_min, col_min = lat_lng_to_row_col(lat_min, lng_min)
43
-    row_max, col_max = lat_lng_to_row_col(lat_max, lng_max)
44
-    print("Corresponding rectangle in the image is " +
45
-          "({0}, {1}), ({2}, {3})".format(row_min, col_min, row_max, col_max))
46
-
47
-    if col_min > col_max:
48
-        col_min, col_max = col_max, col_min
49
-    if row_min > row_max:
50
-        row_min, row_max = row_max, row_min
51
-
52
-    for i in range(row_max, row_min - 1, -1):
53
-        for j in range(col_min, col_max + 1):
54
-            fh.seek(((i - 1) * 1201 + (j - 1)) * 2)  # Find the right spot
55
-            buf = fh.read(2)  # read two bytes and convert them
56
-            val = struct.unpack('>h', buf)  # ">h" is a signed two byte integer
57
-            out += "{0}\t{1}\t{2}\n".format(j,
58
-                                            i,
59
-                                            val[0])
60
-out = out.strip()
61
-
62
-# Write it to out.xyz file
63
-with open(out_file, 'w') as fh:
64
-    fh.write(out)
65
-
66
-if len(sys.argv) < 7:
67
-    print("Found data (also exported to "+out_file+"):")
68
-    print(out)
69
-else:
70
-    print("Found data exported to "+out_file+".")