Browse Source

Reformat with black (max-line-length=120)

Gautier P 6 months ago
parent
commit
a92db5e8ee

+ 10
- 0
.editorconfig View File

@@ -0,0 +1,10 @@
1
+root = true
2
+
3
+[*]
4
+indent_style = space
5
+indent_size = 4
6
+end_of_line = lf
7
+insert_final_newline = true
8
+
9
+[*.py]
10
+max_line_length=120

+ 12
- 38
flatisfy/__main__.py View File

@@ -28,15 +28,11 @@ def parse_args(argv=None):
28 28
     """
29 29
     Create parser and parse arguments.
30 30
     """
31
-    parser = argparse.ArgumentParser(
32
-        prog="Flatisfy", description="Find the perfect flat."
33
-    )
31
+    parser = argparse.ArgumentParser(prog="Flatisfy", description="Find the perfect flat.")
34 32
 
35 33
     # Parent parser containing arguments common to any subcommand
36 34
     parent_parser = argparse.ArgumentParser(add_help=False)
37
-    parent_parser.add_argument(
38
-        "--data-dir", help="Location of Flatisfy data directory."
39
-    )
35
+    parent_parser.add_argument("--data-dir", help="Location of Flatisfy data directory.")
40 36
     parent_parser.add_argument("--config", help="Configuration file to use.")
41 37
     parent_parser.add_argument(
42 38
         "--passes",
@@ -44,12 +40,8 @@ def parse_args(argv=None):
44 40
         type=int,
45 41
         help="Number of passes to do on the filtered data.",
46 42
     )
47
-    parent_parser.add_argument(
48
-        "--max-entries", type=int, help="Maximum number of entries to fetch."
49
-    )
50
-    parent_parser.add_argument(
51
-        "-v", "--verbose", action="store_true", help="Verbose logging output."
52
-    )
43
+    parent_parser.add_argument("--max-entries", type=int, help="Maximum number of entries to fetch.")
44
+    parent_parser.add_argument("-v", "--verbose", action="store_true", help="Verbose logging output.")
53 45
     parent_parser.add_argument("-vv", action="store_true", help="Debug logging output.")
54 46
     parent_parser.add_argument(
55 47
         "--constraints",
@@ -61,17 +53,13 @@ def parse_args(argv=None):
61 53
     subparsers = parser.add_subparsers(dest="cmd", help="Available subcommands")
62 54
 
63 55
     # Build data subcommand
64
-    subparsers.add_parser(
65
-        "build-data", parents=[parent_parser], help="Build necessary data"
66
-    )
56
+    subparsers.add_parser("build-data", parents=[parent_parser], help="Build necessary data")
67 57
 
68 58
     # Init config subcommand
69 59
     parser_init_config = subparsers.add_parser(
70 60
         "init-config", parents=[parent_parser], help="Initialize empty configuration."
71 61
     )
72
-    parser_init_config.add_argument(
73
-        "output", nargs="?", help="Output config file. Use '-' for stdout."
74
-    )
62
+    parser_init_config.add_argument("output", nargs="?", help="Output config file. Use '-' for stdout.")
75 63
 
76 64
     # Fetch subcommand parser
77 65
     subparsers.add_parser("fetch", parents=[parent_parser], help="Fetch housings posts")
@@ -93,9 +81,7 @@ def parse_args(argv=None):
93 81
     )
94 82
 
95 83
     # Import subcommand parser
96
-    import_filter = subparsers.add_parser(
97
-        "import", parents=[parent_parser], help="Import housing posts in database."
98
-    )
84
+    import_filter = subparsers.add_parser("import", parents=[parent_parser], help="Import housing posts in database.")
99 85
     import_filter.add_argument(
100 86
         "--new-only",
101 87
         action="store_true",
@@ -106,9 +92,7 @@ def parse_args(argv=None):
106 92
     subparsers.add_parser("purge", parents=[parent_parser], help="Purge database.")
107 93
 
108 94
     # Serve subcommand parser
109
-    parser_serve = subparsers.add_parser(
110
-        "serve", parents=[parent_parser], help="Serve the web app."
111
-    )
95
+    parser_serve = subparsers.add_parser("serve", parents=[parent_parser], help="Serve the web app.")
112 96
     parser_serve.add_argument("--port", type=int, help="Port to bind to.")
113 97
     parser_serve.add_argument("--host", help="Host to listen on.")
114 98
 
@@ -170,14 +154,9 @@ def main():
170 154
     if args.cmd == "fetch":
171 155
         # Fetch and filter flats list
172 156
         fetched_flats = fetch.fetch_flats(config)
173
-        fetched_flats = cmds.filter_fetched_flats(
174
-            config, fetched_flats=fetched_flats, fetch_details=True
175
-        )
157
+        fetched_flats = cmds.filter_fetched_flats(config, fetched_flats=fetched_flats, fetch_details=True)
176 158
         # Sort by cost
177
-        fetched_flats = {
178
-            k: tools.sort_list_of_dicts_by(v["new"], "cost")
179
-            for k, v in fetched_flats.items()
180
-        }
159
+        fetched_flats = {k: tools.sort_list_of_dicts_by(v["new"], "cost") for k, v in fetched_flats.items()}
181 160
 
182 161
         print(tools.pretty_json(fetched_flats))
183 162
         return
@@ -187,15 +166,10 @@ def main():
187 166
         if args.input:
188 167
             fetched_flats = fetch.load_flats_from_file(args.input, config)
189 168
 
190
-            fetched_flats = cmds.filter_fetched_flats(
191
-                config, fetched_flats=fetched_flats, fetch_details=False
192
-            )
169
+            fetched_flats = cmds.filter_fetched_flats(config, fetched_flats=fetched_flats, fetch_details=False)
193 170
 
194 171
             # Sort by cost
195
-            fetched_flats = {
196
-                k: tools.sort_list_of_dicts_by(v["new"], "cost")
197
-                for k, v in fetched_flats.items()
198
-            }
172
+            fetched_flats = {k: tools.sort_list_of_dicts_by(v["new"], "cost") for k, v in fetched_flats.items()}
199 173
 
200 174
             # Output to stdout
201 175
             print(tools.pretty_json(fetched_flats))

+ 21
- 65
flatisfy/config.py View File

@@ -123,9 +123,7 @@ def validate_config(config, check_with_data):
123 123
         """
124 124
         assert isinstance(bounds, list)
125 125
         assert len(bounds) == 2
126
-        assert all(
127
-            x is None or (isinstance(x, (float, int)) and x >= 0) for x in bounds
128
-        )
126
+        assert all(x is None or (isinstance(x, (float, int)) and x >= 0) for x in bounds)
129 127
         if bounds[0] is not None and bounds[1] is not None:
130 128
             assert bounds[1] > bounds[0]
131 129
 
@@ -141,45 +139,25 @@ def validate_config(config, check_with_data):
141 139
             isinstance(config["max_entries"], int) and config["max_entries"] > 0
142 140
         )  # noqa: E501
143 141
 
144
-        assert config["data_directory"] is None or isinstance(
145
-            config["data_directory"], str
146
-        )  # noqa: E501
142
+        assert config["data_directory"] is None or isinstance(config["data_directory"], str)  # noqa: E501
147 143
         assert os.path.isdir(config["data_directory"])
148 144
         assert isinstance(config["search_index"], str)
149
-        assert config["modules_path"] is None or isinstance(
150
-            config["modules_path"], str
151
-        )  # noqa: E501
145
+        assert config["modules_path"] is None or isinstance(config["modules_path"], str)  # noqa: E501
152 146
 
153
-        assert config["database"] is None or isinstance(
154
-            config["database"], str
155
-        )  # noqa: E501
147
+        assert config["database"] is None or isinstance(config["database"], str)  # noqa: E501
156 148
 
157 149
         assert isinstance(config["port"], int)
158 150
         assert isinstance(config["host"], str)
159
-        assert config["webserver"] is None or isinstance(
160
-            config["webserver"], str
161
-        )  # noqa: E501
162
-        assert config["backends"] is None or isinstance(
163
-            config["backends"], list
164
-        )  # noqa: E501
151
+        assert config["webserver"] is None or isinstance(config["webserver"], str)  # noqa: E501
152
+        assert config["backends"] is None or isinstance(config["backends"], list)  # noqa: E501
165 153
 
166 154
         assert isinstance(config["send_email"], bool)
167
-        assert config["smtp_server"] is None or isinstance(
168
-            config["smtp_server"], str
169
-        )  # noqa: E501
170
-        assert config["smtp_port"] is None or isinstance(
171
-            config["smtp_port"], int
172
-        )  # noqa: E501
173
-        assert config["smtp_username"] is None or isinstance(
174
-            config["smtp_username"], str
175
-        )  # noqa: E501
176
-        assert config["smtp_password"] is None or isinstance(
177
-            config["smtp_password"], str
178
-        )  # noqa: E501
155
+        assert config["smtp_server"] is None or isinstance(config["smtp_server"], str)  # noqa: E501
156
+        assert config["smtp_port"] is None or isinstance(config["smtp_port"], int)  # noqa: E501
157
+        assert config["smtp_username"] is None or isinstance(config["smtp_username"], str)  # noqa: E501
158
+        assert config["smtp_password"] is None or isinstance(config["smtp_password"], str)  # noqa: E501
179 159
         assert config["smtp_to"] is None or isinstance(config["smtp_to"], list)
180
-        assert config["notification_lang"] is None or isinstance(
181
-            config["notification_lang"], str
182
-        )
160
+        assert config["notification_lang"] is None or isinstance(config["notification_lang"], str)
183 161
 
184 162
         assert isinstance(config["store_personal_data"], bool)
185 163
         assert isinstance(config["max_distance_housing_station"], (int, float))
@@ -187,16 +165,10 @@ def validate_config(config, check_with_data):
187 165
         assert isinstance(config["duplicate_image_hash_threshold"], int)
188 166
 
189 167
         # API keys
190
-        assert config["navitia_api_key"] is None or isinstance(
191
-            config["navitia_api_key"], str
192
-        )  # noqa: E501
193
-        assert config["mapbox_api_key"] is None or isinstance(
194
-            config["mapbox_api_key"], str
195
-        )  # noqa: E501
168
+        assert config["navitia_api_key"] is None or isinstance(config["navitia_api_key"], str)  # noqa: E501
169
+        assert config["mapbox_api_key"] is None or isinstance(config["mapbox_api_key"], str)  # noqa: E501
196 170
 
197
-        assert config["ignore_station"] is None or isinstance(
198
-            config["ignore_station"], bool
199
-        )  # noqa: E501
171
+        assert config["ignore_station"] is None or isinstance(config["ignore_station"], bool)  # noqa: E501
200 172
 
201 173
         # Ensure constraints are ok
202 174
         assert config["constraints"]
@@ -234,10 +206,7 @@ def validate_config(config, check_with_data):
234 206
                 # Ensure data is built into db
235 207
                 data.preprocess_data(config, force=False)
236 208
                 # Check postal codes
237
-                opendata_postal_codes = [
238
-                    x.postal_code
239
-                    for x in data.load_data(PostalCode, constraint, config)
240
-                ]
209
+                opendata_postal_codes = [x.postal_code for x in data.load_data(PostalCode, constraint, config)]
241 210
                 for postal_code in constraint["postal_codes"]:
242 211
                     assert postal_code in opendata_postal_codes  # noqa: E501
243 212
 
@@ -292,16 +261,13 @@ def load_config(args=None, check_with_data=True):
292 261
                 config_data.update(json.load(fh))
293 262
         except (IOError, ValueError) as exc:
294 263
             LOGGER.error(
295
-                "Unable to load configuration from file, "
296
-                "using default configuration: %s.",
264
+                "Unable to load configuration from file, " "using default configuration: %s.",
297 265
                 exc,
298 266
             )
299 267
 
300 268
     # Overload config with arguments
301 269
     if args and getattr(args, "passes", None) is not None:
302
-        LOGGER.debug(
303
-            "Overloading number of passes from CLI arguments: %d.", args.passes
304
-        )
270
+        LOGGER.debug("Overloading number of passes from CLI arguments: %d.", args.passes)
305 271
         config_data["passes"] = args.passes
306 272
     if args and getattr(args, "max_entries", None) is not None:
307 273
         LOGGER.debug(
@@ -322,9 +288,7 @@ def load_config(args=None, check_with_data=True):
322 288
         config_data["data_directory"] = args.data_dir
323 289
     elif config_data["data_directory"] is None:
324 290
         config_data["data_directory"] = appdirs.user_data_dir("flatisfy", "flatisfy")
325
-        LOGGER.debug(
326
-            "Using default XDG data directory: %s.", config_data["data_directory"]
327
-        )
291
+        LOGGER.debug("Using default XDG data directory: %s.", config_data["data_directory"])
328 292
 
329 293
     if not os.path.isdir(config_data["data_directory"]):
330 294
         LOGGER.info(
@@ -335,14 +299,10 @@ def load_config(args=None, check_with_data=True):
335 299
         os.makedirs(os.path.join(config_data["data_directory"], "images"))
336 300
 
337 301
     if config_data["database"] is None:
338
-        config_data["database"] = "sqlite:///" + os.path.join(
339
-            config_data["data_directory"], "flatisfy.db"
340
-        )
302
+        config_data["database"] = "sqlite:///" + os.path.join(config_data["data_directory"], "flatisfy.db")
341 303
 
342 304
     if config_data["search_index"] is None:
343
-        config_data["search_index"] = os.path.join(
344
-            config_data["data_directory"], "search_index"
345
-        )
305
+        config_data["search_index"] = os.path.join(config_data["data_directory"], "search_index")
346 306
 
347 307
     # Handle constraints filtering
348 308
     if args and getattr(args, "constraints", None) is not None:
@@ -354,11 +314,7 @@ def load_config(args=None, check_with_data=True):
354 314
             args.constraints.replace(",", ", "),
355 315
         )
356 316
         constraints_filter = args.constraints.split(",")
357
-        config_data["constraints"] = {
358
-            k: v
359
-            for k, v in config_data["constraints"].items()
360
-            if k in constraints_filter
361
-        }
317
+        config_data["constraints"] = {k: v for k, v in config_data["constraints"].items() if k in constraints_filter}
362 318
 
363 319
     # Sanitize website url
364 320
     if config_data["website_url"] is not None:

+ 2
- 7
flatisfy/data.py View File

@@ -50,10 +50,7 @@ def preprocess_data(config, force=False):
50 50
     # Check if a build is required
51 51
     get_session = database.init_db(config["database"], config["search_index"])
52 52
     with get_session() as session:
53
-        is_built = (
54
-            session.query(PublicTransport).count() > 0
55
-            and session.query(PostalCode).count() > 0
56
-        )
53
+        is_built = session.query(PublicTransport).count() > 0 and session.query(PostalCode).count() > 0
57 54
         if is_built and not force:
58 55
             # No need to rebuild the database, skip
59 56
             return False
@@ -66,9 +63,7 @@ def preprocess_data(config, force=False):
66 63
     for preprocess in data_files.PREPROCESSING_FUNCTIONS:
67 64
         data_objects = preprocess()
68 65
         if not data_objects:
69
-            raise flatisfy.exceptions.DataBuildError(
70
-                "Error with %s." % preprocess.__name__
71
-            )
66
+            raise flatisfy.exceptions.DataBuildError("Error with %s." % preprocess.__name__)
72 67
         with get_session() as session:
73 68
             session.add_all(data_objects)
74 69
     LOGGER.info("Done building data!")

+ 6
- 22
flatisfy/data_files/__init__.py View File

@@ -114,19 +114,11 @@ def french_postal_codes_to_quarter(postal_code):
114 114
     }
115 115
 
116 116
     subdivision = next(
117
-        (
118
-            i
119
-            for i, departments in department_to_subdivision.items()
120
-            if departement in departments
121
-        ),
117
+        (i for i, departments in department_to_subdivision.items() if departement in departments),
122 118
         None,
123 119
     )
124 120
     return next(
125
-        (
126
-            i
127
-            for i, subdivisions in subdivision_to_quarters.items()
128
-            if subdivision in subdivisions
129
-        ),
121
+        (i for i, subdivisions in subdivision_to_quarters.items() if subdivision in subdivisions),
130 122
         None,
131 123
     )
132 124
 
@@ -165,9 +157,7 @@ def _preprocess_laposte():
165 157
                 )
166 158
                 continue
167 159
 
168
-            name = normalize_string(
169
-                titlecase.titlecase(fields["nom_de_la_commune"]), lowercase=False
170
-            )
160
+            name = normalize_string(titlecase.titlecase(fields["nom_de_la_commune"]), lowercase=False)
171 161
 
172 162
             if (fields["code_postal"], name) in seen_postal_codes:
173 163
                 continue
@@ -183,9 +173,7 @@ def _preprocess_laposte():
183 173
                 )
184 174
             )
185 175
         except KeyError:
186
-            LOGGER.info(
187
-                "Missing data for postal code %s, skipping it.", fields["code_postal"]
188
-            )
176
+            LOGGER.info("Missing data for postal code %s, skipping it.", fields["code_postal"])
189 177
 
190 178
     return postal_codes_data
191 179
 
@@ -201,15 +189,11 @@ def _preprocess_public_transport():
201 189
     for area, data_file in TRANSPORT_DATA_FILES.items():
202 190
         LOGGER.info("Building from public transport data %s.", data_file)
203 191
         try:
204
-            with io.open(
205
-                os.path.join(MODULE_DIR, data_file), "r", encoding="utf-8"
206
-            ) as fh:
192
+            with io.open(os.path.join(MODULE_DIR, data_file), "r", encoding="utf-8") as fh:
207 193
                 filereader = csv.reader(fh)
208 194
                 next(filereader, None)  # Skip first row (headers)
209 195
                 for row in filereader:
210
-                    public_transport_data.append(
211
-                        PublicTransport(name=row[2], area=area, lat=row[3], lng=row[4])
212
-                    )
196
+                    public_transport_data.append(PublicTransport(name=row[2], area=area, lat=row[3], lng=row[4]))
213 197
         except (IOError, IndexError):
214 198
             LOGGER.error("Invalid raw opendata file: %s.", data_file)
215 199
             return []

+ 5
- 16
flatisfy/database/whooshalchemy.py View File

@@ -92,23 +92,17 @@ class IndexService(object):
92 92
         for model in session.new:
93 93
             model_class = model.__class__
94 94
             if hasattr(model_class, "__searchable__"):
95
-                self.to_update.setdefault(model_class.__name__, []).append(
96
-                    ("new", model)
97
-                )
95
+                self.to_update.setdefault(model_class.__name__, []).append(("new", model))
98 96
 
99 97
         for model in session.deleted:
100 98
             model_class = model.__class__
101 99
             if hasattr(model_class, "__searchable__"):
102
-                self.to_update.setdefault(model_class.__name__, []).append(
103
-                    ("deleted", model)
104
-                )
100
+                self.to_update.setdefault(model_class.__name__, []).append(("deleted", model))
105 101
 
106 102
         for model in session.dirty:
107 103
             model_class = model.__class__
108 104
             if hasattr(model_class, "__searchable__"):
109
-                self.to_update.setdefault(model_class.__name__, []).append(
110
-                    ("changed", model)
111
-                )
105
+                self.to_update.setdefault(model_class.__name__, []).append(("changed", model))
112 106
 
113 107
     def after_commit(self, session):
114 108
         """
@@ -129,16 +123,11 @@ class IndexService(object):
129 123
                     # added as a new doc. Could probably replace this with a whoosh
130 124
                     # update.
131 125
 
132
-                    writer.delete_by_term(
133
-                        primary_field, text_type(getattr(model, primary_field))
134
-                    )
126
+                    writer.delete_by_term(primary_field, text_type(getattr(model, primary_field)))
135 127
 
136 128
                     if change_type in ("new", "changed"):
137 129
                         attrs = dict((key, getattr(model, key)) for key in searchable)
138
-                        attrs = {
139
-                            attr: text_type(getattr(model, attr))
140
-                            for attr in attrs.keys()
141
-                        }
130
+                        attrs = {attr: text_type(getattr(model, attr)) for attr in attrs.keys()}
142 131
                         attrs[primary_field] = text_type(getattr(model, primary_field))
143 132
                         writer.add_document(**attrs)
144 133
 

+ 1
- 3
flatisfy/email.py View File

@@ -16,9 +16,7 @@ from email.utils import formatdate, make_msgid
16 16
 LOGGER = logging.getLogger(__name__)
17 17
 
18 18
 
19
-def send_email(
20
-    server, port, subject, _from, _to, txt, html, username=None, password=None
21
-):
19
+def send_email(server, port, subject, _from, _to, txt, html, username=None, password=None):
22 20
     """
23 21
     Send an email
24 22
 

+ 10
- 31
flatisfy/fetch.py View File

@@ -24,9 +24,7 @@ try:
24 24
     from weboob.core.ouiboube import WebNip
25 25
     from weboob.tools.json import WeboobEncoder
26 26
 except ImportError:
27
-    LOGGER.error(
28
-        "Weboob is not available on your system. Make sure you " "installed it."
29
-    )
27
+    LOGGER.error("Weboob is not available on your system. Make sure you " "installed it.")
30 28
     raise
31 29
 
32 30
 
@@ -79,9 +77,7 @@ class WebOOBProxy(object):
79 77
         self.webnip = WebNip(modules_path=config["modules_path"])
80 78
 
81 79
         # Create backends
82
-        self.backends = [
83
-            self.webnip.load_backend(module, module, params={}) for module in backends
84
-        ]
80
+        self.backends = [self.webnip.load_backend(module, module, params={}) for module in backends]
85 81
 
86 82
     def __enter__(self):
87 83
         return self
@@ -118,18 +114,14 @@ class WebOOBProxy(object):
118 114
 
119 115
                 if not matching_cities:
120 116
                     # If postal code gave no match, warn the user
121
-                    LOGGER.warn(
122
-                        "Postal code %s could not be matched with a city.", postal_code
123
-                    )
117
+                    LOGGER.warn("Postal code %s could not be matched with a city.", postal_code)
124 118
 
125 119
         # Remove "TOUTES COMMUNES" entry which are duplicates of the individual
126 120
         # cities entries in Logicimmo module.
127 121
         matching_cities = [
128 122
             city
129 123
             for city in matching_cities
130
-            if not (
131
-                city.backend == "logicimmo" and city.name.startswith("TOUTES COMMUNES")
132
-            )
124
+            if not (city.backend == "logicimmo" and city.name.startswith("TOUTES COMMUNES"))
133 125
         ]
134 126
 
135 127
         # Then, build queries by grouping cities by at most 3
@@ -139,8 +131,7 @@ class WebOOBProxy(object):
139 131
 
140 132
             try:
141 133
                 query.house_types = [
142
-                    getattr(HOUSE_TYPES, house_type.upper())
143
-                    for house_type in constraints_dict["house_types"]
134
+                    getattr(HOUSE_TYPES, house_type.upper()) for house_type in constraints_dict["house_types"]
144 135
                 ]
145 136
             except AttributeError:
146 137
                 LOGGER.error("Invalid house types constraint.")
@@ -193,9 +184,7 @@ class WebOOBProxy(object):
193 184
                 housings.append(json.dumps(housing, cls=WeboobEncoder))
194 185
         except CallErrors as exc:
195 186
             # If an error occured, just log it
196
-            LOGGER.error(
197
-                "An error occured while fetching the housing posts: %s", str(exc)
198
-            )
187
+            LOGGER.error("An error occured while fetching the housing posts: %s", str(exc))
199 188
         return housings
200 189
 
201 190
     def info(self, full_flat_id, store_personal_data=False):
@@ -210,9 +199,7 @@ class WebOOBProxy(object):
210 199
         """
211 200
         flat_id, backend_name = full_flat_id.rsplit("@", 1)
212 201
         try:
213
-            backend = next(
214
-                backend for backend in self.backends if backend.name == backend_name
215
-            )
202
+            backend = next(backend for backend in self.backends if backend.name == backend_name)
216 203
         except StopIteration:
217 204
             LOGGER.error("Backend %s is not available.", backend_name)
218 205
             return "{}"
@@ -231,9 +218,7 @@ class WebOOBProxy(object):
231 218
             return json.dumps(housing, cls=WeboobEncoder)
232 219
         except Exception as exc:  # pylint: disable=broad-except
233 220
             # If an error occured, just log it
234
-            LOGGER.error(
235
-                "An error occured while fetching housing %s: %s", full_flat_id, str(exc)
236
-            )
221
+            LOGGER.error("An error occured while fetching housing %s: %s", full_flat_id, str(exc))
237 222
             return "{}"
238 223
 
239 224
 
@@ -253,18 +238,12 @@ def fetch_flats(config):
253 238
             queries = webOOB_proxy.build_queries(constraint)
254 239
             housing_posts = []
255 240
             for query in queries:
256
-                housing_posts.extend(
257
-                    webOOB_proxy.query(
258
-                        query, config["max_entries"], config["store_personal_data"]
259
-                    )
260
-                )
241
+                housing_posts.extend(webOOB_proxy.query(query, config["max_entries"], config["store_personal_data"]))
261 242
         housing_posts = housing_posts[: config["max_entries"]]
262 243
         LOGGER.info("Fetched %d flats.", len(housing_posts))
263 244
 
264 245
         constraint_flats_list = [json.loads(flat) for flat in housing_posts]
265
-        constraint_flats_list = [
266
-            WebOOBProxy.restore_decimal_fields(flat) for flat in constraint_flats_list
267
-        ]
246
+        constraint_flats_list = [WebOOBProxy.restore_decimal_fields(flat) for flat in constraint_flats_list]
268 247
         fetched_flats[constraint_name] = constraint_flats_list
269 248
     return fetched_flats
270 249
 

+ 7
- 22
flatisfy/filters/duplicates.py View File

@@ -169,9 +169,7 @@ def detect(flats_list, key="id", merge=True, should_intersect=False):
169 169
             # Sort matching flats by backend precedence
170 170
             matching_flats.sort(
171 171
                 key=lambda flat: next(
172
-                    i
173
-                    for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE)
174
-                    if flat["id"].endswith(backend)
172
+                    i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE) if flat["id"].endswith(backend)
175 173
                 ),
176 174
                 reverse=True,
177 175
             )
@@ -199,9 +197,7 @@ def detect(flats_list, key="id", merge=True, should_intersect=False):
199 197
     if should_intersect:
200 198
         # We added some flats twice with the above method, let's deduplicate on
201 199
         # id.
202
-        unique_flats_list, _ = detect(
203
-            unique_flats_list, key="id", merge=True, should_intersect=False
204
-        )
200
+        unique_flats_list, _ = detect(unique_flats_list, key="id", merge=True, should_intersect=False)
205 201
 
206 202
     return unique_flats_list, duplicate_flats
207 203
 
@@ -274,18 +270,14 @@ def get_duplicate_score(flat1, flat2, photo_cache, hash_threshold):
274 270
         # If the two flats are from the same website and have a
275 271
         # different float part, consider they cannot be duplicates. See
276 272
         # https://framagit.org/phyks/Flatisfy/issues/100.
277
-        both_are_from_same_backend = (
278
-            flat1["id"].split("@")[-1] == flat2["id"].split("@")[-1]
279
-        )
273
+        both_are_from_same_backend = flat1["id"].split("@")[-1] == flat2["id"].split("@")[-1]
280 274
         both_have_float_part = (flat1["area"] % 1) > 0 and (flat2["area"] % 1) > 0
281 275
         both_have_equal_float_part = (flat1["area"] % 1) == (flat2["area"] % 1)
282 276
         if both_have_float_part and both_are_from_same_backend:
283 277
             assert both_have_equal_float_part
284 278
 
285 279
         if flat1.get("photos", []) and flat2.get("photos", []):
286
-            n_common_photos = find_number_common_photos(
287
-                flat1["photos"], flat2["photos"], photo_cache, hash_threshold
288
-            )
280
+            n_common_photos = find_number_common_photos(flat1["photos"], flat2["photos"], photo_cache, hash_threshold)
289 281
 
290 282
             min_number_photos = min(len(flat1["photos"]), len(flat2["photos"]))
291 283
 
@@ -332,18 +324,13 @@ def deep_detect(flats_list, config):
332 324
             if flat2["id"] in matching_flats[flat1["id"]]:
333 325
                 continue
334 326
 
335
-            n_common_items = get_duplicate_score(
336
-                flat1, flat2, photo_cache, config["duplicate_image_hash_threshold"]
337
-            )
327
+            n_common_items = get_duplicate_score(flat1, flat2, photo_cache, config["duplicate_image_hash_threshold"])
338 328
 
339 329
             # Minimal score to consider they are duplicates
340 330
             if n_common_items >= config["duplicate_threshold"]:
341 331
                 # Mark flats as duplicates
342 332
                 LOGGER.info(
343
-                    (
344
-                        "Found duplicates using deep detection: (%s, %s). "
345
-                        "Score is %d."
346
-                    ),
333
+                    ("Found duplicates using deep detection: (%s, %s). " "Score is %d."),
347 334
                     flat1["id"],
348 335
                     flat2["id"],
349 336
                     n_common_items,
@@ -369,9 +356,7 @@ def deep_detect(flats_list, config):
369 356
         to_merge = sorted(
370 357
             [flat for flat in flats_list if flat["id"] in matching_flats[flat_id]],
371 358
             key=lambda flat: next(
372
-                i
373
-                for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE)
374
-                if flat["id"].endswith(backend)
359
+                i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE) if flat["id"].endswith(backend)
375 360
             ),
376 361
             reverse=True,
377 362
         )

+ 1
- 3
flatisfy/filters/images.py View File

@@ -22,9 +22,7 @@ def download_images(flats_list, config):
22 22
     :param flats_list: A list of flats dicts.
23 23
     :param config: A config dict.
24 24
     """
25
-    photo_cache = ImageCache(
26
-        storage_dir=os.path.join(config["data_directory"], "images")
27
-    )
25
+    photo_cache = ImageCache(storage_dir=os.path.join(config["data_directory"], "images"))
28 26
     for flat in flats_list:
29 27
         for photo in flat["photos"]:
30 28
             # Download photo

+ 18
- 64
flatisfy/filters/metadata.py View File

@@ -97,11 +97,7 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
97 97
     # Get the matches (normalized strings)
98 98
     # Keep only ``limit`` matches.
99 99
     matches = sorted(
100
-        [
101
-            (choice, len(choice))
102
-            for choice in tools.uniqify(unique_normalized_choices)
103
-            if choice in normalized_query
104
-        ],
100
+        [(choice, len(choice)) for choice in tools.uniqify(unique_normalized_choices) if choice in normalized_query],
105 101
         key=lambda x: x[1],
106 102
         reverse=True,
107 103
     )
@@ -115,11 +111,7 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
115 111
 
116 112
     # Convert back matches to original strings
117 113
     # Also filter out matches below threshold
118
-    matches = [
119
-        (choices[normalized_choices.index(x[0])], x[1])
120
-        for x in matches
121
-        if x[1] >= threshold
122
-    ]
114
+    matches = [(choices[normalized_choices.index(x[0])], x[1]) for x in matches if x[1] >= threshold]
123 115
 
124 116
     return matches
125 117
 
@@ -135,16 +127,10 @@ def guess_location_position(location, cities, constraint):
135 127
         # Find associated postal codes
136 128
         matched_postal_codes = []
137 129
         for matched_city_name, _ in matched_cities:
138
-            postal_code_objects_for_city = [
139
-                x for x in cities if x.name == matched_city_name
140
-            ]
141
-            matched_postal_codes.extend(
142
-                pc.postal_code for pc in postal_code_objects_for_city
143
-            )
130
+            postal_code_objects_for_city = [x for x in cities if x.name == matched_city_name]
131
+            matched_postal_codes.extend(pc.postal_code for pc in postal_code_objects_for_city)
144 132
         # Try to match them with postal codes in config constraint
145
-        matched_postal_codes_in_config = set(matched_postal_codes) & set(
146
-            constraint["postal_codes"]
147
-        )
133
+        matched_postal_codes_in_config = set(matched_postal_codes) & set(constraint["postal_codes"])
148 134
         if matched_postal_codes_in_config:
149 135
             # If there are some matched postal codes which are also in
150 136
             # config, use them preferentially. This avoid ignoring
@@ -158,18 +144,14 @@ def guess_location_position(location, cities, constraint):
158 144
         # take the city position
159 145
         for matched_city_name, _ in matched_cities:
160 146
             postal_code_objects_for_city = [
161
-                x
162
-                for x in cities
163
-                if x.name == matched_city_name and x.postal_code == postal_code
147
+                x for x in cities if x.name == matched_city_name and x.postal_code == postal_code
164 148
             ]
165 149
             if len(postal_code_objects_for_city):
166 150
                 position = {
167 151
                     "lat": postal_code_objects_for_city[0].lat,
168 152
                     "lng": postal_code_objects_for_city[0].lng,
169 153
                 }
170
-                LOGGER.debug(
171
-                    ("Found position %s using city %s."), position, matched_city_name
172
-                )
154
+                LOGGER.debug(("Found position %s using city %s."), position, matched_city_name)
173 155
                 break
174 156
 
175 157
     return (postal_code, position)
@@ -228,30 +210,18 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
228 210
 
229 211
         # Then fetch position (and postal_code is couldn't be found earlier)
230 212
         if postal_code:
231
-            cities = [
232
-                x for x in opendata["postal_codes"] if x.postal_code == postal_code
233
-            ]
213
+            cities = [x for x in opendata["postal_codes"] if x.postal_code == postal_code]
234 214
             (_, position) = guess_location_position(location, cities, constraint)
235 215
         else:
236
-            (postal_code, position) = guess_location_position(
237
-                location, opendata["postal_codes"], constraint
238
-            )
216
+            (postal_code, position) = guess_location_position(location, opendata["postal_codes"], constraint)
239 217
 
240 218
         # Check that postal code is not too far from the ones listed in config,
241 219
         # limit bad fuzzy matching
242 220
         if postal_code and distance_threshold:
243 221
             distance = min(
244 222
                 tools.distance(
245
-                    next(
246
-                        (x.lat, x.lng)
247
-                        for x in opendata["postal_codes"]
248
-                        if x.postal_code == postal_code
249
-                    ),
250
-                    next(
251
-                        (x.lat, x.lng)
252
-                        for x in opendata["postal_codes"]
253
-                        if x.postal_code == constraint_postal_code
254
-                    ),
223
+                    next((x.lat, x.lng) for x in opendata["postal_codes"] if x.postal_code == postal_code),
224
+                    next((x.lat, x.lng) for x in opendata["postal_codes"] if x.postal_code == constraint_postal_code),
255 225
                 )
256 226
                 for constraint_postal_code in constraint["postal_codes"]
257 227
             )
@@ -314,9 +284,7 @@ def guess_stations(flats_list, constraint, config):
314 284
 
315 285
         if not flat_station:
316 286
             # Skip everything if empty station
317
-            LOGGER.info(
318
-                "No stations field for flat %s, skipping stations lookup.", flat["id"]
319
-            )
287
+            LOGGER.info("No stations field for flat %s, skipping stations lookup.", flat["id"])
320 288
             continue
321 289
 
322 290
         # Weboob modules can return several stations in a comma-separated list.
@@ -345,22 +313,14 @@ def guess_stations(flats_list, constraint, config):
345 313
         if postal_code:
346 314
             # If there is a postal code, check that the matched station is
347 315
             # closed to it
348
-            postal_code_gps = next(
349
-                (x.lat, x.lng)
350
-                for x in opendata["postal_codes"]
351
-                if x.postal_code == postal_code
352
-            )
316
+            postal_code_gps = next((x.lat, x.lng) for x in opendata["postal_codes"] if x.postal_code == postal_code)
353 317
             for station in matched_stations:
354 318
                 # Note that multiple stations with the same name exist in a
355 319
                 # city, hence the list of stations objects for a given matching
356 320
                 # station name.
357
-                stations_objects = [
358
-                    x for x in opendata["stations"] if x.name == station[0]
359
-                ]
321
+                stations_objects = [x for x in opendata["stations"] if x.name == station[0]]
360 322
                 for station_data in stations_objects:
361
-                    distance = tools.distance(
362
-                        (station_data.lat, station_data.lng), postal_code_gps
363
-                    )
323
+                    distance = tools.distance((station_data.lat, station_data.lng), postal_code_gps)
364 324
                     if distance < distance_threshold:
365 325
                         # If at least one of the coordinates for a given
366 326
                         # station is close enough, that's ok and we can add
@@ -375,19 +335,14 @@ def guess_stations(flats_list, constraint, config):
375 335
                         )
376 336
                         break
377 337
                     LOGGER.info(
378
-                        (
379
-                            "Station %s is too far from flat %s (%dm > %dm), "
380
-                            "discarding this station."
381
-                        ),
338
+                        ("Station %s is too far from flat %s (%dm > %dm), " "discarding this station."),
382 339
                         station[0],
383 340
                         flat["id"],
384 341
                         int(distance),
385 342
                         int(distance_threshold),
386 343
                     )
387 344
         else:
388
-            LOGGER.info(
389
-                "No postal code for flat %s, skipping stations detection.", flat["id"]
390
-            )
345
+            LOGGER.info("No postal code for flat %s, skipping stations detection.", flat["id"])
391 346
 
392 347
         if not good_matched_stations:
393 348
             # No stations found, log it and cotninue with next housing
@@ -460,8 +415,7 @@ def compute_travel_times(flats_list, constraint, config):
460 415
                     station["gps"], place["gps"], TimeToModes[mode], config
461 416
                 )
462 417
                 if time_from_station_dict and (
463
-                    time_from_station_dict["time"] < time_to_place_dict
464
-                    or time_to_place_dict is None
418
+                    time_from_station_dict["time"] < time_to_place_dict or time_to_place_dict is None
465 419
                 ):
466 420
                     # If starting from this station makes the route to the
467 421
                     # specified place shorter, update

+ 4
- 12
flatisfy/models/flat.py View File

@@ -182,22 +182,14 @@ class Flat(BASE):
182 182
         # Handle flatisfy metadata
183 183
         flat_dict = flat_dict.copy()
184 184
         if "flatisfy" in flat_dict:
185
-            flat_dict["flatisfy_stations"] = flat_dict["flatisfy"].get(
186
-                "matched_stations", []
187
-            )
188
-            flat_dict["flatisfy_postal_code"] = flat_dict["flatisfy"].get(
189
-                "postal_code", None
190
-            )
185
+            flat_dict["flatisfy_stations"] = flat_dict["flatisfy"].get("matched_stations", [])
186
+            flat_dict["flatisfy_postal_code"] = flat_dict["flatisfy"].get("postal_code", None)
191 187
             flat_dict["flatisfy_position"] = flat_dict["flatisfy"].get("position", None)
192 188
             flat_dict["flatisfy_time_to"] = flat_dict["flatisfy"].get("time_to", {})
193
-            flat_dict["flatisfy_constraint"] = flat_dict["flatisfy"].get(
194
-                "constraint", "default"
195
-            )
189
+            flat_dict["flatisfy_constraint"] = flat_dict["flatisfy"].get("constraint", "default")
196 190
             del flat_dict["flatisfy"]
197 191
 
198
-        flat_dict = {
199
-            k: v for k, v in flat_dict.items() if k in inspect(Flat).columns.keys()
200
-        }
192
+        flat_dict = {k: v for k, v in flat_dict.items() if k in inspect(Flat).columns.keys()}
201 193
         return Flat(**flat_dict)
202 194
 
203 195
     def __repr__(self):

+ 14
- 44
flatisfy/tests.py View File

@@ -65,9 +65,7 @@ class TestTexts(unittest.TestCase):
65 65
             tools.convert_arabic_to_roman_in_text("Dans le 15e arrondissement"),
66 66
         )
67 67
 
68
-        self.assertEqual(
69
-            "XXeme arr.", tools.convert_arabic_to_roman_in_text("20eme arr.")
70
-        )
68
+        self.assertEqual("XXeme arr.", tools.convert_arabic_to_roman_in_text("20eme arr."))
71 69
 
72 70
         self.assertEqual(
73 71
             "A AIX EN PROVENCE",
@@ -121,25 +119,19 @@ class TestPhoneNumbers(unittest.TestCase):
121 119
         """
122 120
         Checks phone numbers with international prefixes.
123 121
         """
124
-        self.assertEqual(
125
-            "0605040302", duplicates.homogeneize_phone_number("+33605040302")
126
-        )
122
+        self.assertEqual("0605040302", duplicates.homogeneize_phone_number("+33605040302"))
127 123
 
128 124
     def test_dots_separators(self):
129 125
         """
130 126
         Checks phone numbers with dots.
131 127
         """
132
-        self.assertEqual(
133
-            "0605040302", duplicates.homogeneize_phone_number("06.05.04.03.02")
134
-        )
128
+        self.assertEqual("0605040302", duplicates.homogeneize_phone_number("06.05.04.03.02"))
135 129
 
136 130
     def test_spaces_separators(self):
137 131
         """
138 132
         Checks phone numbers with spaces.
139 133
         """
140
-        self.assertEqual(
141
-            "0605040302", duplicates.homogeneize_phone_number("06 05 04 03 02")
142
-        )
134
+        self.assertEqual("0605040302", duplicates.homogeneize_phone_number("06 05 04 03 02"))
143 135
 
144 136
 
145 137
 class TestPhotos(unittest.TestCase):
@@ -157,11 +149,7 @@ class TestPhotos(unittest.TestCase):
157 149
         """
158 150
         photo = {"url": TESTS_DATA_DIR + "127028739@seloger.jpg"}
159 151
 
160
-        self.assertTrue(
161
-            duplicates.compare_photos(
162
-                photo, photo, self.IMAGE_CACHE, self.HASH_THRESHOLD
163
-            )
164
-        )
152
+        self.assertTrue(duplicates.compare_photos(photo, photo, self.IMAGE_CACHE, self.HASH_THRESHOLD))
165 153
 
166 154
     def test_different_photos(self):
167 155
         """
@@ -256,9 +244,7 @@ class TestImageCache(unittest.TestCase):
256 244
     """
257 245
 
258 246
     def __init__(self, *args, **kwargs):
259
-        self.IMAGE_CACHE = ImageCache(  # pylint: disable=invalid-name
260
-            storage_dir=tempfile.mkdtemp(prefix="flatisfy-")
261
-        )
247
+        self.IMAGE_CACHE = ImageCache(storage_dir=tempfile.mkdtemp(prefix="flatisfy-"))  # pylint: disable=invalid-name
262 248
         super(TestImageCache, self).__init__(*args, **kwargs)
263 249
 
264 250
     def test_invalid_url(self):
@@ -297,9 +283,7 @@ class TestDuplicates(unittest.TestCase):
297 283
         """
298 284
         Generates a fake flat post.
299 285
         """
300
-        backend = BACKENDS_BY_PRECEDENCE[
301
-            random.randint(0, len(BACKENDS_BY_PRECEDENCE) - 1)
302
-        ]
286
+        backend = BACKENDS_BY_PRECEDENCE[random.randint(0, len(BACKENDS_BY_PRECEDENCE) - 1)]
303 287
         return {
304 288
             "id": str(random.randint(100000, 199999)) + "@" + backend,
305 289
             "phone": "0607080910",
@@ -331,9 +315,7 @@ class TestDuplicates(unittest.TestCase):
331 315
         """
332 316
         flat1 = self.generate_fake_flat()
333 317
         flat2 = copy.deepcopy(flat1)
334
-        score = duplicates.get_duplicate_score(
335
-            flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD
336
-        )
318
+        score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
337 319
         self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
338 320
 
339 321
     def test_different_prices(self):
@@ -344,9 +326,7 @@ class TestDuplicates(unittest.TestCase):
344 326
         flat2 = copy.deepcopy(flat1)
345 327
         flat2["cost"] += 1000
346 328
 
347
-        score = duplicates.get_duplicate_score(
348
-            flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD
349
-        )
329
+        score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
350 330
         self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
351 331
 
352 332
     def test_different_rooms(self):
@@ -358,9 +338,7 @@ class TestDuplicates(unittest.TestCase):
358 338
         flat2 = copy.deepcopy(flat1)
359 339
         flat2["rooms"] += 1
360 340
 
361
-        score = duplicates.get_duplicate_score(
362
-            flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD
363
-        )
341
+        score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
364 342
         self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
365 343
 
366 344
     def test_different_areas(self):
@@ -371,9 +349,7 @@ class TestDuplicates(unittest.TestCase):
371 349
         flat2 = copy.deepcopy(flat1)
372 350
         flat2["area"] += 10
373 351
 
374
-        score = duplicates.get_duplicate_score(
375
-            flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD
376
-        )
352
+        score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
377 353
         self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
378 354
 
379 355
     def test_different_areas_decimals(self):
@@ -386,9 +362,7 @@ class TestDuplicates(unittest.TestCase):
386 362
         flat1["area"] = 50.65
387 363
         flat2["area"] = 50.37
388 364
 
389
-        score = duplicates.get_duplicate_score(
390
-            flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD
391
-        )
365
+        score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
392 366
         self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
393 367
 
394 368
     def test_different_phones(self):
@@ -400,9 +374,7 @@ class TestDuplicates(unittest.TestCase):
400 374
         flat2 = copy.deepcopy(flat1)
401 375
         flat2["phone"] = "0708091011"
402 376
 
403
-        score = duplicates.get_duplicate_score(
404
-            flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD
405
-        )
377
+        score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
406 378
         self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
407 379
 
408 380
     def test_real_duplicates(self):
@@ -412,9 +384,7 @@ class TestDuplicates(unittest.TestCase):
412 384
         """
413 385
         flats = self.load_files("127028739@seloger", "14428129@explorimmo")
414 386
 
415
-        score = duplicates.get_duplicate_score(
416
-            flats[0], flats[1], self.IMAGE_CACHE, self.HASH_THRESHOLD
417
-        )
387
+        score = duplicates.get_duplicate_score(flats[0], flats[1], self.IMAGE_CACHE, self.HASH_THRESHOLD)
418 388
         self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITH_PHOTOS)
419 389
 
420 390
         # TODO: fixme, find new testing examples

+ 7
- 21
flatisfy/tools.py View File

@@ -90,9 +90,7 @@ def convert_arabic_to_roman_in_text(text):
90 90
     :returns: The corresponding text with roman literals converted to
91 91
         arabic.
92 92
     """
93
-    return re.sub(
94
-        r"(\d+)", lambda matchobj: convert_arabic_to_roman(matchobj.group(0)), text
95
-    )
93
+    return re.sub(r"(\d+)", lambda matchobj: convert_arabic_to_roman(matchobj.group(0)), text)
96 94
 
97 95
 
98 96
 def hash_dict(func):
@@ -155,9 +153,7 @@ def pretty_json(data):
155 153
             "toto": "ok"
156 154
         }
157 155
     """
158
-    return json.dumps(
159
-        data, cls=DateAwareJSONEncoder, indent=4, separators=(",", ": "), sort_keys=True
160
-    )
156
+    return json.dumps(data, cls=DateAwareJSONEncoder, indent=4, separators=(",", ": "), sort_keys=True)
161 157
 
162 158
 
163 159
 def batch(iterable, size):
@@ -296,10 +292,7 @@ def distance(gps1, gps2):
296 292
     long2 = math.radians(gps2[1])
297 293
 
298 294
     # pylint: disable=locally-disabled,invalid-name
299
-    a = (
300
-        math.sin((lat2 - lat1) / 2.0) ** 2
301
-        + math.cos(lat1) * math.cos(lat2) * math.sin((long2 - long1) / 2.0) ** 2
302
-    )
295
+    a = math.sin((lat2 - lat1) / 2.0) ** 2 + math.cos(lat1) * math.cos(lat2) * math.sin((long2 - long1) / 2.0) ** 2
303 296
     c = 2.0 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
304 297
     earth_radius = 6371000
305 298
 
@@ -329,9 +322,7 @@ def merge_dicts(*args):
329 322
     if len(args) == 1:
330 323
         return args[0]
331 324
 
332
-    flat1, flat2 = args[
333
-        :2
334
-    ]  # pylint: disable=locally-disabled,unbalanced-tuple-unpacking,line-too-long
325
+    flat1, flat2 = args[:2]  # pylint: disable=locally-disabled,unbalanced-tuple-unpacking,line-too-long
335 326
     merged_flat = {}
336 327
     for k, value2 in flat2.items():
337 328
         value1 = flat1.get(k, None)
@@ -408,9 +399,7 @@ def get_travel_time_between(latlng_from, latlng_to, mode, config):
408 399
                         sections.append(
409 400
                             {
410 401
                                 "geojson": section["geojson"],
411
-                                "color": (
412
-                                    section["display_informations"].get("color", None)
413
-                                ),
402
+                                "color": (section["display_informations"].get("color", None)),
414 403
                             }
415 404
                         )
416 405
                     elif section["type"] == "street_network":
@@ -427,8 +416,7 @@ def get_travel_time_between(latlng_from, latlng_to, mode, config):
427 416
             ) as exc:
428 417
                 # Ignore any possible exception
429 418
                 LOGGER.warning(
430
-                    "An exception occurred during travel time lookup on "
431
-                    "Navitia: %s.",
419
+                    "An exception occurred during travel time lookup on " "Navitia: %s.",
432 420
                     str(exc),
433 421
                 )
434 422
         else:
@@ -467,9 +455,7 @@ def get_travel_time_between(latlng_from, latlng_to, mode, config):
467 455
                 route = response.geojson()["features"][0]
468 456
                 # Fix longitude/latitude inversion in geojson output
469 457
                 geometry = route["geometry"]
470
-                geometry["coordinates"] = [
471
-                    (x[1], x[0]) for x in geometry["coordinates"]
472
-                ]
458
+                geometry["coordinates"] = [(x[1], x[0]) for x in geometry["coordinates"]]
473 459
                 sections = [{"geojson": geometry, "color": "000"}]
474 460
                 travel_time = route["properties"]["duration"]
475 461
             except (requests.exceptions.RequestException, IndexError, KeyError) as exc:

+ 5
- 17
flatisfy/web/app.py View File

@@ -28,9 +28,7 @@ class QuietWSGIRefServer(bottle.WSGIRefServer):
28 28
     quiet = True
29 29
 
30 30
     def run(self, app):
31
-        app.log.info(
32
-            "Server is now up and ready! Listening on %s:%s." % (self.host, self.port)
33
-        )
31
+        app.log.info("Server is now up and ready! Listening on %s:%s." % (self.host, self.port))
34 32
         super(QuietWSGIRefServer, self).run(app)
35 33
 
36 34
 
@@ -61,11 +59,7 @@ def get_app(config):
61 59
     app.install(canister.Canister())
62 60
     # Use DateAwareJSONEncoder to dump JSON strings
63 61
     # From http://stackoverflow.com/questions/21282040/bottle-framework-how-to-return-datetime-in-json-response#comment55718456_21282666.  pylint: disable=locally-disabled,line-too-long
64
-    app.install(
65
-        bottle.JSONPlugin(
66
-            json_dumps=functools.partial(json.dumps, cls=DateAwareJSONEncoder)
67
-        )
68
-    )
62
+    app.install(bottle.JSONPlugin(json_dumps=functools.partial(json.dumps, cls=DateAwareJSONEncoder)))
69 63
 
70 64
     # Enable CORS
71 65
     @app.hook("after_request")
@@ -76,9 +70,7 @@ def get_app(config):
76 70
         # The str() call is required as we import unicode_literal and WSGI
77 71
         # headers list should have plain str type.
78 72
         bottle.response.headers[str("Access-Control-Allow-Origin")] = str("*")
79
-        bottle.response.headers[str("Access-Control-Allow-Methods")] = str(
80
-            "PUT, GET, POST, DELETE, OPTIONS, PATCH"
81
-        )
73
+        bottle.response.headers[str("Access-Control-Allow-Methods")] = str("PUT, GET, POST, DELETE, OPTIONS, PATCH")
82 74
         bottle.response.headers[str("Access-Control-Allow-Headers")] = str(
83 75
             "Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token"
84 76
         )
@@ -86,9 +78,7 @@ def get_app(config):
86 78
     # API v1 routes
87 79
     app.route("/api/v1", ["GET", "OPTIONS"], api_routes.index_v1)
88 80
 
89
-    app.route(
90
-        "/api/v1/time_to_places", ["GET", "OPTIONS"], api_routes.time_to_places_v1
91
-    )
81
+    app.route("/api/v1/time_to_places", ["GET", "OPTIONS"], api_routes.time_to_places_v1)
92 82
 
93 83
     app.route("/api/v1/flats", ["GET", "OPTIONS"], api_routes.flats_v1)
94 84
     app.route("/api/v1/flats/:flat_id", ["GET", "OPTIONS"], api_routes.flat_v1)
@@ -130,9 +120,7 @@ def get_app(config):
130 120
     app.route(
131 121
         "/data/img/<filename:path>",
132 122
         "GET",
133
-        lambda filename: bottle.static_file(
134
-            filename, root=os.path.join(config["data_directory"], "images")
135
-        ),
123
+        lambda filename: bottle.static_file(filename, root=os.path.join(config["data_directory"], "images")),
136 124
     )
137 125
 
138 126
     return app

+ 5
- 17
flatisfy/web/routes/api.py View File

@@ -83,9 +83,7 @@ def _JSONApiSpec(query, model, default_sorting=None):
83 83
         try:
84 84
             sorting.append(getattr(model, default_sorting))
85 85
         except AttributeError:
86
-            raise ValueError(
87
-                "Invalid default sorting key provided: {}.".format(default_sorting)
88
-            )
86
+            raise ValueError("Invalid default sorting key provided: {}.".format(default_sorting))
89 87
 
90 88
     return filters, page_number, page_size, sorting
91 89
 
@@ -104,9 +102,7 @@ def _serialize_flat(flat, config):
104 102
 
105 103
     postal_codes = {}
106 104
     for constraint_name, constraint in config["constraints"].items():
107
-        postal_codes[constraint_name] = flatisfy.data.load_data(
108
-            PostalCode, constraint, config
109
-        )
105
+        postal_codes[constraint_name] = flatisfy.data.load_data(PostalCode, constraint, config)
110 106
 
111 107
     try:
112 108
         assert flat["flatisfy_postal_code"]
@@ -287,9 +283,7 @@ def time_to_places_v1(config):
287 283
     try:
288 284
         places = {}
289 285
         for constraint_name, constraint in config["constraints"].items():
290
-            places[constraint_name] = {
291
-                k: v["gps"] for k, v in constraint["time_to"].items()
292
-            }
286
+            places[constraint_name] = {k: v["gps"] for k, v in constraint["time_to"].items()}
293 287
         return {"data": places}
294 288
     except Exception as exc:  # pylint: disable= broad-except
295 289
         return JSONError(500, str(exc))
@@ -342,11 +336,7 @@ def search_v1(db, config):
342 336
         except ValueError as exc:
343 337
             return JSONError(400, str(exc))
344 338
 
345
-        flats_db_query = (
346
-            flat_model.Flat.search_query(db, query)
347
-            .filter_by(**filters)
348
-            .order_by(*sorting)
349
-        )
339
+        flats_db_query = flat_model.Flat.search_query(db, query).filter_by(**filters).order_by(*sorting)
350 340
         flats = [
351 341
             _serialize_flat(flat, config)
352 342
             for flat in itertools.islice(
@@ -381,9 +371,7 @@ def ics_feed_v1(config, db):
381 371
 
382 372
     cal = vobject.iCalendar()
383 373
     try:
384
-        flats_with_visits = db.query(flat_model.Flat).filter(
385
-            flat_model.Flat.visit_date.isnot(None)
386
-        )
374
+        flats_with_visits = db.query(flat_model.Flat).filter(flat_model.Flat.visit_date.isnot(None))
387 375
 
388 376
         for flat in flats_with_visits:
389 377
             vevent = cal.add("vevent")

+ 2
- 8
wsgi.py View File

@@ -13,9 +13,7 @@ from flatisfy.web import app as web_app
13 13
 
14 14
 
15 15
 class Args:
16
-    config = os.path.join(
17
-        os.path.dirname(os.path.realpath(__file__)), "config/config.json"
18
-    )
16
+    config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "config/config.json")
19 17
 
20 18
 
21 19
 LOGGER = logging.getLogger("flatisfy")
@@ -23,11 +21,7 @@ LOGGER = logging.getLogger("flatisfy")
23 21
 
24 22
 CONFIG = flatisfy.config.load_config(Args())
25 23
 if CONFIG is None:
26
-    LOGGER.error(
27
-        "Invalid configuration. Exiting. "
28
-        "Run init-config before if this is the first time "
29
-        "you run Flatisfy."
30
-    )
24
+    LOGGER.error("Invalid configuration. Exiting. Run init-config before if this is the first time you run Flatisfy.")
31 25
     sys.exit(1)
32 26
 
33 27