Browse Source

reformat with black

Gautier P 11 months ago
parent
commit
42909bd46f

+ 47
- 41
doc/conf.py View File

@@ -18,7 +18,8 @@
18 18
 
19 19
 import os
20 20
 import sys
21
-sys.path.insert(0, os.path.abspath('..'))
21
+
22
+sys.path.insert(0, os.path.abspath(".."))
22 23
 
23 24
 # -- General configuration ------------------------------------------------
24 25
 
@@ -30,19 +31,19 @@ sys.path.insert(0, os.path.abspath('..'))
30 31
 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
31 32
 # ones.
32 33
 extensions = [
33
-    'sphinx.ext.autodoc',
34
-    'sphinx.ext.viewcode',
34
+    "sphinx.ext.autodoc",
35
+    "sphinx.ext.viewcode",
35 36
 ]
36 37
 
37 38
 # Add any paths that contain templates here, relative to this directory.
38
-templates_path = ['_templates']
39
+templates_path = ["_templates"]
39 40
 
40 41
 # The suffix(es) of source filenames.
41 42
 # You can specify multiple suffix as a list of string:
42 43
 #
43
-source_suffix = ['.rst', '.md']
44
+source_suffix = [".rst", ".md"]
44 45
 source_parsers = {
45
-   '.md': 'recommonmark.parser.CommonMarkParser',
46
+    ".md": "recommonmark.parser.CommonMarkParser",
46 47
 }
47 48
 
48 49
 # The encoding of source files.
@@ -50,21 +51,21 @@ source_parsers = {
50 51
 # source_encoding = 'utf-8-sig'
51 52
 
52 53
 # The master toctree document.
53
-master_doc = 'index'
54
+master_doc = "index"
54 55
 
55 56
 # General information about the project.
56
-project = u'Flatisfy'
57
-copyright = u'2017, Phyks (Lucas Verney)'
58
-author = u'Phyks (Lucas Verney)'
57
+project = u"Flatisfy"
58
+copyright = u"2017, Phyks (Lucas Verney)"
59
+author = u"Phyks (Lucas Verney)"
59 60
 
60 61
 # The version info for the project you're documenting, acts as replacement for
61 62
 # |version| and |release|, also used in various other places throughout the
62 63
 # built documents.
63 64
 #
64 65
 # The short X.Y version.
65
-version = u'0.1'
66
+version = u"0.1"
66 67
 # The full version, including alpha/beta/rc tags.
67
-release = u'0.1'
68
+release = u"0.1"
68 69
 
69 70
 # The language for content autogenerated by Sphinx. Refer to documentation
70 71
 # for a list of supported languages.
@@ -85,7 +86,7 @@ language = None
85 86
 # List of patterns, relative to source directory, that match files and
86 87
 # directories to ignore when looking for source files.
87 88
 # This patterns also effect to html_static_path and html_extra_path
88
-exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
89
+exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
89 90
 
90 91
 # The reST default role (used for this markup: `text`) to use for all
91 92
 # documents.
@@ -107,7 +108,7 @@ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
107 108
 # show_authors = False
108 109
 
109 110
 # The name of the Pygments (syntax highlighting) style to use.
110
-pygments_style = 'sphinx'
111
+pygments_style = "sphinx"
111 112
 
112 113
 # A list of ignored prefixes for module index sorting.
113 114
 # modindex_common_prefix = []
@@ -124,7 +125,7 @@ todo_include_todos = False
124 125
 # The theme to use for HTML and HTML Help pages.  See the documentation for
125 126
 # a list of builtin themes.
126 127
 #
127
-html_theme = 'classic'
128
+html_theme = "classic"
128 129
 
129 130
 # Theme options are theme-specific and customize the look and feel of a theme
130 131
 # further.  For a list of options available for each theme, see the
@@ -158,7 +159,7 @@ html_theme = 'classic'
158 159
 # Add any paths that contain custom static files (such as style sheets) here,
159 160
 # relative to this directory. They are copied after the builtin static files,
160 161
 # so a file named "default.css" will overwrite the builtin "default.css".
161
-html_static_path = ['_static']
162
+html_static_path = ["_static"]
162 163
 
163 164
 # Add any extra paths that contain custom files (such as robots.txt or
164 165
 # .htaccess) here, relative to this directory. These files are copied
@@ -238,34 +239,36 @@ html_static_path = ['_static']
238 239
 # html_search_scorer = 'scorer.js'
239 240
 
240 241
 # Output file base name for HTML help builder.
241
-htmlhelp_basename = 'Flatisfydoc'
242
+htmlhelp_basename = "Flatisfydoc"
242 243
 
243 244
 # -- Options for LaTeX output ---------------------------------------------
244 245
 
245 246
 latex_elements = {
246
-     # The paper size ('letterpaper' or 'a4paper').
247
-     #
248
-     # 'papersize': 'letterpaper',
249
-
250
-     # The font size ('10pt', '11pt' or '12pt').
251
-     #
252
-     # 'pointsize': '10pt',
253
-
254
-     # Additional stuff for the LaTeX preamble.
255
-     #
256
-     # 'preamble': '',
257
-
258
-     # Latex figure (float) alignment
259
-     #
260
-     # 'figure_align': 'htbp',
247
+    # The paper size ('letterpaper' or 'a4paper').
248
+    #
249
+    # 'papersize': 'letterpaper',
250
+    # The font size ('10pt', '11pt' or '12pt').
251
+    #
252
+    # 'pointsize': '10pt',
253
+    # Additional stuff for the LaTeX preamble.
254
+    #
255
+    # 'preamble': '',
256
+    # Latex figure (float) alignment
257
+    #
258
+    # 'figure_align': 'htbp',
261 259
 }
262 260
 
263 261
 # Grouping the document tree into LaTeX files. List of tuples
264 262
 # (source start file, target name, title,
265 263
 #  author, documentclass [howto, manual, or own class]).
266 264
 latex_documents = [
267
-    (master_doc, 'Flatisfy.tex', u'Flatisfy Documentation',
268
-     u'Phyks (Lucas Verney)', 'manual'),
265
+    (
266
+        master_doc,
267
+        "Flatisfy.tex",
268
+        u"Flatisfy Documentation",
269
+        u"Phyks (Lucas Verney)",
270
+        "manual",
271
+    ),
269 272
 ]
270 273
 
271 274
 # The name of an image file (relative to this directory) to place at the top of
@@ -305,10 +308,7 @@ latex_documents = [
305 308
 
306 309
 # One entry per manual page. List of tuples
307 310
 # (source start file, name, description, authors, manual section).
308
-man_pages = [
309
-    (master_doc, 'flatisfy', u'Flatisfy Documentation',
310
-     [author], 1)
311
-]
311
+man_pages = [(master_doc, "flatisfy", u"Flatisfy Documentation", [author], 1)]
312 312
 
313 313
 # If true, show URL addresses after external links.
314 314
 #
@@ -321,9 +321,15 @@ man_pages = [
321 321
 # (source start file, target name, title, author,
322 322
 #  dir menu entry, description, category)
323 323
 texinfo_documents = [
324
-    (master_doc, 'Flatisfy', u'Flatisfy Documentation',
325
-     author, 'Flatisfy', 'One line description of project.',
326
-     'Miscellaneous'),
324
+    (
325
+        master_doc,
326
+        "Flatisfy",
327
+        u"Flatisfy Documentation",
328
+        author,
329
+        "Flatisfy",
330
+        "One line description of project.",
331
+        "Miscellaneous",
332
+    ),
327 333
 ]
328 334
 
329 335
 # Documents to append as an appendix to all manuals.

+ 49
- 64
flatisfy/__main__.py View File

@@ -17,6 +17,7 @@ from flatisfy import data
17 17
 from flatisfy import fetch
18 18
 from flatisfy import tools
19 19
 from flatisfy import tests
20
+
20 21
 # pylint: enable=locally-disabled,wrong-import-position
21 22
 
22 23
 
@@ -27,68 +28,59 @@ def parse_args(argv=None):
27 28
     """
28 29
     Create parser and parse arguments.
29 30
     """
30
-    parser = argparse.ArgumentParser(prog="Flatisfy",
31
-                                     description="Find the perfect flat.")
31
+    parser = argparse.ArgumentParser(
32
+        prog="Flatisfy", description="Find the perfect flat."
33
+    )
32 34
 
33 35
     # Parent parser containing arguments common to any subcommand
34 36
     parent_parser = argparse.ArgumentParser(add_help=False)
35 37
     parent_parser.add_argument(
36
-        "--data-dir",
37
-        help="Location of Flatisfy data directory."
38
-    )
39
-    parent_parser.add_argument(
40
-        "--config",
41
-        help="Configuration file to use."
38
+        "--data-dir", help="Location of Flatisfy data directory."
42 39
     )
40
+    parent_parser.add_argument("--config", help="Configuration file to use.")
43 41
     parent_parser.add_argument(
44
-        "--passes", choices=[0, 1, 2, 3], type=int,
45
-        help="Number of passes to do on the filtered data."
42
+        "--passes",
43
+        choices=[0, 1, 2, 3],
44
+        type=int,
45
+        help="Number of passes to do on the filtered data.",
46 46
     )
47 47
     parent_parser.add_argument(
48
-        "--max-entries", type=int,
49
-        help="Maximum number of entries to fetch."
48
+        "--max-entries", type=int, help="Maximum number of entries to fetch."
50 49
     )
51 50
     parent_parser.add_argument(
52
-        "-v", "--verbose", action="store_true",
53
-        help="Verbose logging output."
51
+        "-v", "--verbose", action="store_true", help="Verbose logging output."
54 52
     )
53
+    parent_parser.add_argument("-vv", action="store_true", help="Debug logging output.")
55 54
     parent_parser.add_argument(
56
-        "-vv", action="store_true",
57
-        help="Debug logging output."
58
-    )
59
-    parent_parser.add_argument(
60
-        "--constraints", type=str,
61
-        help="Comma-separated list of constraints to consider."
55
+        "--constraints",
56
+        type=str,
57
+        help="Comma-separated list of constraints to consider.",
62 58
     )
63 59
 
64 60
     # Subcommands
65
-    subparsers = parser.add_subparsers(
66
-        dest="cmd", help="Available subcommands"
67
-    )
61
+    subparsers = parser.add_subparsers(dest="cmd", help="Available subcommands")
68 62
 
69 63
     # Build data subcommand
70 64
     subparsers.add_parser(
71
-        "build-data", parents=[parent_parser],
72
-        help="Build necessary data"
65
+        "build-data", parents=[parent_parser], help="Build necessary data"
73 66
     )
74 67
 
75 68
     # Init config subcommand
76 69
     parser_init_config = subparsers.add_parser(
77
-        "init-config", parents=[parent_parser],
78
-        help="Initialize empty configuration."
70
+        "init-config", parents=[parent_parser], help="Initialize empty configuration."
79 71
     )
80 72
     parser_init_config.add_argument(
81 73
         "output", nargs="?", help="Output config file. Use '-' for stdout."
82 74
     )
83 75
 
84 76
     # Fetch subcommand parser
85
-    subparsers.add_parser("fetch", parents=[parent_parser],
86
-                          help="Fetch housings posts")
77
+    subparsers.add_parser("fetch", parents=[parent_parser], help="Fetch housings posts")
87 78
 
88 79
     # Filter subcommand parser
89 80
     parser_filter = subparsers.add_parser(
90
-        "filter", parents=[parent_parser],
91
-        help="Filter housings posts according to constraints in config."
81
+        "filter",
82
+        parents=[parent_parser],
83
+        help="Filter housings posts according to constraints in config.",
92 84
     )
93 85
     parser_filter.add_argument(
94 86
         "--input",
@@ -97,34 +89,31 @@ def parse_args(argv=None):
97 89
             "no additional fetching of infos is done, and the script outputs "
98 90
             "a filtered JSON dump on stdout. If not provided, update status "
99 91
             "of the flats in the database."
100
-        )
92
+        ),
101 93
     )
102 94
 
103 95
     # Import subcommand parser
104 96
     import_filter = subparsers.add_parser(
105
-                        "import", parents=[parent_parser],
106
-                        help="Import housing posts in database.")
97
+        "import", parents=[parent_parser], help="Import housing posts in database."
98
+    )
107 99
     import_filter.add_argument(
108 100
         "--new-only",
109 101
         action="store_true",
110
-        help=(
111
-            "Download new housing posts only but do not refresh existing ones"
112
-        )
102
+        help=("Download new housing posts only but do not refresh existing ones"),
113 103
     )
114 104
 
115 105
     # Purge subcommand parser
116
-    subparsers.add_parser("purge", parents=[parent_parser],
117
-                          help="Purge database.")
106
+    subparsers.add_parser("purge", parents=[parent_parser], help="Purge database.")
118 107
 
119 108
     # Serve subcommand parser
120
-    parser_serve = subparsers.add_parser("serve", parents=[parent_parser],
121
-                                         help="Serve the web app.")
109
+    parser_serve = subparsers.add_parser(
110
+        "serve", parents=[parent_parser], help="Serve the web app."
111
+    )
122 112
     parser_serve.add_argument("--port", type=int, help="Port to bind to.")
123 113
     parser_serve.add_argument("--host", help="Host to listen on.")
124 114
 
125 115
     # Test subcommand parser
126
-    subparsers.add_parser("test", parents=[parent_parser],
127
-                          help="Unit testing.")
116
+    subparsers.add_parser("test", parents=[parent_parser], help="Unit testing.")
128 117
 
129 118
     return parser.parse_args(argv)
130 119
 
@@ -139,15 +128,15 @@ def main():
139 128
 
140 129
     # Set logger
141 130
     if args.vv:
142
-        logging.getLogger('').setLevel(logging.DEBUG)
143
-        logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG)
131
+        logging.getLogger("").setLevel(logging.DEBUG)
132
+        logging.getLogger("sqlalchemy.engine").setLevel(logging.DEBUG)
144 133
     elif args.verbose:
145
-        logging.getLogger('').setLevel(logging.INFO)
134
+        logging.getLogger("").setLevel(logging.INFO)
146 135
         # sqlalchemy INFO level is way too loud, just stick with WARNING
147
-        logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING)
136
+        logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
148 137
     else:
149
-        logging.getLogger('').setLevel(logging.WARNING)
150
-        logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING)
138
+        logging.getLogger("").setLevel(logging.WARNING)
139
+        logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
151 140
 
152 141
     # Init-config command
153 142
     if args.cmd == "init-config":
@@ -161,9 +150,11 @@ def main():
161 150
         else:
162 151
             config = flatisfy.config.load_config(args, check_with_data=True)
163 152
         if config is None:
164
-            LOGGER.error("Invalid configuration. Exiting. "
165
-                         "Run init-config before if this is the first time "
166
-                         "you run Flatisfy.")
153
+            LOGGER.error(
154
+                "Invalid configuration. Exiting. "
155
+                "Run init-config before if this is the first time "
156
+                "you run Flatisfy."
157
+            )
167 158
             sys.exit(1)
168 159
 
169 160
     # Purge command
@@ -179,18 +170,16 @@ def main():
179 170
     if args.cmd == "fetch":
180 171
         # Fetch and filter flats list
181 172
         fetched_flats = fetch.fetch_flats(config)
182
-        fetched_flats = cmds.filter_fetched_flats(config,
183
-                                                  fetched_flats=fetched_flats,
184
-                                                  fetch_details=True)
173
+        fetched_flats = cmds.filter_fetched_flats(
174
+            config, fetched_flats=fetched_flats, fetch_details=True
175
+        )
185 176
         # Sort by cost
186 177
         fetched_flats = {
187 178
             k: tools.sort_list_of_dicts_by(v["new"], "cost")
188 179
             for k, v in fetched_flats.items()
189 180
         }
190 181
 
191
-        print(
192
-            tools.pretty_json(fetched_flats)
193
-        )
182
+        print(tools.pretty_json(fetched_flats))
194 183
         return
195 184
     # Filter command
196 185
     elif args.cmd == "filter":
@@ -199,9 +188,7 @@ def main():
199 188
             fetched_flats = fetch.load_flats_from_file(args.input, config)
200 189
 
201 190
             fetched_flats = cmds.filter_fetched_flats(
202
-                config,
203
-                fetched_flats=fetched_flats,
204
-                fetch_details=False
191
+                config, fetched_flats=fetched_flats, fetch_details=False
205 192
             )
206 193
 
207 194
             # Sort by cost
@@ -211,9 +198,7 @@ def main():
211 198
             }
212 199
 
213 200
             # Output to stdout
214
-            print(
215
-                tools.pretty_json(fetched_flats)
216
-            )
201
+            print(tools.pretty_json(fetched_flats))
217 202
         else:
218 203
             cmds.import_and_filter(config, load_from_db=True)
219 204
         return

+ 29
- 33
flatisfy/cmds.py View File

@@ -23,7 +23,9 @@ import time
23 23
 LOGGER = logging.getLogger(__name__)
24 24
 
25 25
 
26
-def filter_flats_list(config, constraint_name, flats_list, fetch_details=True, past_flats=None):
26
+def filter_flats_list(
27
+    config, constraint_name, flats_list, fetch_details=True, past_flats=None
28
+):
27 29
     """
28 30
     Filter the available flats list. Then, filter it according to criteria.
29 31
 
@@ -45,13 +47,9 @@ def filter_flats_list(config, constraint_name, flats_list, fetch_details=True, p
45 47
     except KeyError:
46 48
         LOGGER.error(
47 49
             "Missing constraint %s. Skipping filtering for these posts.",
48
-            constraint_name
50
+            constraint_name,
49 51
         )
50
-        return {
51
-            "new": [],
52
-            "duplicate": [],
53
-            "ignored": []
54
-        }
52
+        return {"new": [], "duplicate": [], "ignored": []}
55 53
 
56 54
     first_pass_result = collections.defaultdict(list)
57 55
     second_pass_result = collections.defaultdict(list)
@@ -59,9 +57,7 @@ def filter_flats_list(config, constraint_name, flats_list, fetch_details=True, p
59 57
     # Do a first pass with the available infos to try to remove as much
60 58
     # unwanted postings as possible
61 59
     if config["passes"] > 0:
62
-        first_pass_result = flatisfy.filters.first_pass(flats_list,
63
-                                                        constraint,
64
-                                                        config)
60
+        first_pass_result = flatisfy.filters.first_pass(flats_list, constraint, config)
65 61
     else:
66 62
         first_pass_result["new"] = flats_list
67 63
 
@@ -95,8 +91,7 @@ def filter_flats_list(config, constraint_name, flats_list, fetch_details=True, p
95 91
     # Do a third pass to deduplicate better
96 92
     if config["passes"] > 2:
97 93
         third_pass_result = flatisfy.filters.third_pass(
98
-            second_pass_result["new"],
99
-            config
94
+            second_pass_result["new"], config
100 95
         )
101 96
     else:
102 97
         third_pass_result["new"] = second_pass_result["new"]
@@ -104,15 +99,15 @@ def filter_flats_list(config, constraint_name, flats_list, fetch_details=True, p
104 99
     return {
105 100
         "new": third_pass_result["new"],
106 101
         "duplicate": (
107
-            first_pass_result["duplicate"] +
108
-            second_pass_result["duplicate"] +
109
-            third_pass_result["duplicate"]
102
+            first_pass_result["duplicate"]
103
+            + second_pass_result["duplicate"]
104
+            + third_pass_result["duplicate"]
110 105
         ),
111 106
         "ignored": (
112
-            first_pass_result["ignored"] +
113
-            second_pass_result["ignored"] +
114
-            third_pass_result["ignored"]
115
-        )
107
+            first_pass_result["ignored"]
108
+            + second_pass_result["ignored"]
109
+            + third_pass_result["ignored"]
110
+        ),
116 111
     }
117 112
 
118 113
 
@@ -134,7 +129,7 @@ def filter_fetched_flats(config, fetched_flats, fetch_details=True, past_flats={
134 129
             constraint_name,
135 130
             flats_list,
136 131
             fetch_details,
137
-            past_flats.get(constraint_name, None)
132
+            past_flats.get(constraint_name, None),
138 133
         )
139 134
     return fetched_flats
140 135
 
@@ -156,9 +151,12 @@ def import_and_filter(config, load_from_db=False, new_only=False):
156 151
     else:
157 152
         fetched_flats = fetch.fetch_flats(config)
158 153
     # Do not fetch additional details if we loaded data from the db.
159
-    flats_by_status = filter_fetched_flats(config, fetched_flats=fetched_flats,
160
-                                           fetch_details=(not load_from_db),
161
-                                           past_flats=past_flats if new_only else {})
154
+    flats_by_status = filter_fetched_flats(
155
+        config,
156
+        fetched_flats=fetched_flats,
157
+        fetch_details=(not load_from_db),
158
+        past_flats=past_flats if new_only else {},
159
+    )
162 160
     # Create database connection
163 161
     get_session = database.init_db(config["database"], config["search_index"])
164 162
 
@@ -175,7 +173,7 @@ def import_and_filter(config, load_from_db=False, new_only=False):
175 173
         # Set is_expired to true for all existing flats.
176 174
         # This will be set back to false if we find them during importing.
177 175
         for flat in session.query(flat_model.Flat).all():
178
-            flat.is_expired = True;
176
+            flat.is_expired = True
179 177
 
180 178
         for status, flats_list in flatten_flats_by_status.items():
181 179
             # Build SQLAlchemy Flat model objects for every available flat
@@ -195,9 +193,7 @@ def import_and_filter(config, load_from_db=False, new_only=False):
195 193
                     # status if the user defined it
196 194
                     flat_object = flats_objects[each.id]
197 195
                     if each.status in flat_model.AUTOMATED_STATUSES:
198
-                        flat_object.status = getattr(
199
-                            flat_model.FlatStatus, status
200
-                        )
196
+                        flat_object.status = getattr(flat_model.FlatStatus, status)
201 197
                     else:
202 198
                         flat_object.status = each.status
203 199
 
@@ -223,11 +219,8 @@ def import_and_filter(config, load_from_db=False, new_only=False):
223 219
     LOGGER.info(f"Found {len(new_flats)} new flats.")
224 220
 
225 221
     # Touch a file to indicate last update timestamp
226
-    ts_file = os.path.join(
227
-        config["data_directory"],
228
-        "timestamp"
229
-    )
230
-    with open(ts_file, 'w'):
222
+    ts_file = os.path.join(config["data_directory"], "timestamp")
223
+    with open(ts_file, "w"):
231 224
         os.utime(ts_file, None)
232 225
 
233 226
     LOGGER.info("Done!")
@@ -270,5 +263,8 @@ def serve(config):
270 263
         # standard logging
271 264
         server = web_app.QuietWSGIRefServer
272 265
 
273
-    print("Launching web viewer running on http://%s:%s" % (config["host"], config["port"]))
266
+    print(
267
+        "Launching web viewer running on http://%s:%s"
268
+        % (config["host"], config["port"])
269
+    )
274 270
     app.run(host=config["host"], port=config["port"], server=server)

+ 67
- 47
flatisfy/config.py View File

@@ -30,7 +30,7 @@ DEFAULT_CONFIG = {
30 30
         "default": {
31 31
             "type": None,  # RENT, SALE, SHARING
32 32
             "house_types": [],  # List of house types, must be in APART, HOUSE,
33
-                                # PARKING, LAND, OTHER or UNKNOWN
33
+            # PARKING, LAND, OTHER or UNKNOWN
34 34
             "postal_codes": [],  # List of postal codes
35 35
             "area": (None, None),  # (min, max) in m^2
36 36
             "cost": (None, None),  # (min, max) in currency unit
@@ -42,12 +42,12 @@ DEFAULT_CONFIG = {
42 42
                 "vendu",
43 43
                 "Vendu",
44 44
                 "VENDU",
45
-                "recherche"
45
+                "recherche",
46 46
             ],
47 47
             "time_to": {}  # Dict mapping names to {"gps": [lat, lng],
48
-                           #                        "time": (min, max),
49
-                           #                        "mode": Valid mode }
50
-                           # Time is in seconds
48
+            #                        "time": (min, max),
49
+            #                        "mode": Valid mode }
50
+            # Time is in seconds
51 51
         }
52 52
     },
53 53
     # Whether or not to store personal data from housing posts (phone number
@@ -91,7 +91,7 @@ DEFAULT_CONFIG = {
91 91
     "backends": None,
92 92
     # Should email notifications be sent?
93 93
     "send_email": False,
94
-    "smtp_server": 'localhost',
94
+    "smtp_server": "localhost",
95 95
     "smtp_port": 25,
96 96
     "smtp_username": None,
97 97
     "smtp_password": None,
@@ -115,6 +115,7 @@ def validate_config(config, check_with_data):
115 115
         check the config values.
116 116
     :return: ``True`` if the configuration is valid, ``False`` otherwise.
117 117
     """
118
+
118 119
     def _check_constraints_bounds(bounds):
119 120
         """
120 121
         Check the bounds for numeric constraints.
@@ -122,12 +123,7 @@ def validate_config(config, check_with_data):
122 123
         assert isinstance(bounds, list)
123 124
         assert len(bounds) == 2
124 125
         assert all(
125
-            x is None or
126
-            (
127
-                isinstance(x, (float, int)) and
128
-                x >= 0
129
-            )
130
-            for x in bounds
126
+            x is None or (isinstance(x, (float, int)) and x >= 0) for x in bounds
131 127
         )
132 128
         if bounds[0] is not None and bounds[1] is not None:
133 129
             assert bounds[1] > bounds[0]
@@ -140,25 +136,45 @@ def validate_config(config, check_with_data):
140 136
         # pylint: disable=locally-disabled,line-too-long
141 137
 
142 138
         assert config["passes"] in [0, 1, 2, 3]
143
-        assert config["max_entries"] is None or (isinstance(config["max_entries"], int) and config["max_entries"] > 0)  # noqa: E501
139
+        assert config["max_entries"] is None or (
140
+            isinstance(config["max_entries"], int) and config["max_entries"] > 0
141
+        )  # noqa: E501
144 142
 
145
-        assert config["data_directory"] is None or isinstance(config["data_directory"], str)  # noqa: E501
143
+        assert config["data_directory"] is None or isinstance(
144
+            config["data_directory"], str
145
+        )  # noqa: E501
146 146
         assert os.path.isdir(config["data_directory"])
147 147
         assert isinstance(config["search_index"], str)
148
-        assert config["modules_path"] is None or isinstance(config["modules_path"], str)  # noqa: E501
148
+        assert config["modules_path"] is None or isinstance(
149
+            config["modules_path"], str
150
+        )  # noqa: E501
149 151
 
150
-        assert config["database"] is None or isinstance(config["database"], str)  # noqa: E501
152
+        assert config["database"] is None or isinstance(
153
+            config["database"], str
154
+        )  # noqa: E501
151 155
 
152 156
         assert isinstance(config["port"], int)
153 157
         assert isinstance(config["host"], str)
154
-        assert config["webserver"] is None or isinstance(config["webserver"], str)  # noqa: E501
155
-        assert config["backends"] is None or isinstance(config["backends"], list)  # noqa: E501
158
+        assert config["webserver"] is None or isinstance(
159
+            config["webserver"], str
160
+        )  # noqa: E501
161
+        assert config["backends"] is None or isinstance(
162
+            config["backends"], list
163
+        )  # noqa: E501
156 164
 
157 165
         assert isinstance(config["send_email"], bool)
158
-        assert config["smtp_server"] is None or isinstance(config["smtp_server"], str)  # noqa: E501
159
-        assert config["smtp_port"] is None or isinstance(config["smtp_port"], int)  # noqa: E501
160
-        assert config["smtp_username"] is None or isinstance(config["smtp_username"], str)  # noqa: E501
161
-        assert config["smtp_password"] is None or isinstance(config["smtp_password"], str)  # noqa: E501
166
+        assert config["smtp_server"] is None or isinstance(
167
+            config["smtp_server"], str
168
+        )  # noqa: E501
169
+        assert config["smtp_port"] is None or isinstance(
170
+            config["smtp_port"], int
171
+        )  # noqa: E501
172
+        assert config["smtp_username"] is None or isinstance(
173
+            config["smtp_username"], str
174
+        )  # noqa: E501
175
+        assert config["smtp_password"] is None or isinstance(
176
+            config["smtp_password"], str
177
+        )  # noqa: E501
162 178
         assert config["smtp_to"] is None or isinstance(config["smtp_to"], list)
163 179
 
164 180
         assert isinstance(config["store_personal_data"], bool)
@@ -167,10 +183,16 @@ def validate_config(config, check_with_data):
167 183
         assert isinstance(config["duplicate_image_hash_threshold"], int)
168 184
 
169 185
         # API keys
170
-        assert config["navitia_api_key"] is None or isinstance(config["navitia_api_key"], str)  # noqa: E501
171
-        assert config["mapbox_api_key"] is None or isinstance(config["mapbox_api_key"], str)  # noqa: E501
186
+        assert config["navitia_api_key"] is None or isinstance(
187
+            config["navitia_api_key"], str
188
+        )  # noqa: E501
189
+        assert config["mapbox_api_key"] is None or isinstance(
190
+            config["mapbox_api_key"], str
191
+        )  # noqa: E501
172 192
 
173
-        assert config["ignore_station"] is None or isinstance(config["ignore_station"], bool)  # noqa: E501
193
+        assert config["ignore_station"] is None or isinstance(
194
+            config["ignore_station"], bool
195
+        )  # noqa: E501
174 196
 
175 197
         # Ensure constraints are ok
176 198
         assert config["constraints"]
@@ -191,8 +213,7 @@ def validate_config(config, check_with_data):
191 213
                     assert isinstance(term, str)
192 214
 
193 215
             assert "description_should_not_contain" in constraint
194
-            assert isinstance(constraint["description_should_not_contain"],
195
-                              list)
216
+            assert isinstance(constraint["description_should_not_contain"], list)
196 217
             if constraint["description_should_not_contain"]:
197 218
                 for term in constraint["description_should_not_contain"]:
198 219
                     assert isinstance(term, str)
@@ -269,20 +290,19 @@ def load_config(args=None, check_with_data=True):
269 290
             LOGGER.error(
270 291
                 "Unable to load configuration from file, "
271 292
                 "using default configuration: %s.",
272
-                exc
293
+                exc,
273 294
             )
274 295
 
275 296
     # Overload config with arguments
276 297
     if args and getattr(args, "passes", None) is not None:
277 298
         LOGGER.debug(
278
-            "Overloading number of passes from CLI arguments: %d.",
279
-            args.passes
299
+            "Overloading number of passes from CLI arguments: %d.", args.passes
280 300
         )
281 301
         config_data["passes"] = args.passes
282 302
     if args and getattr(args, "max_entries", None) is not None:
283 303
         LOGGER.debug(
284 304
             "Overloading maximum number of entries from CLI arguments: %d.",
285
-            args.max_entries
305
+            args.max_entries,
286 306
         )
287 307
         config_data["max_entries"] = args.max_entries
288 308
     if args and getattr(args, "port", None) is not None:
@@ -297,37 +317,37 @@ def load_config(args=None, check_with_data=True):
297 317
         LOGGER.debug("Overloading data directory from CLI arguments.")
298 318
         config_data["data_directory"] = args.data_dir
299 319
     elif config_data["data_directory"] is None:
300
-        config_data["data_directory"] = appdirs.user_data_dir(
301
-            "flatisfy",
302
-            "flatisfy"
320
+        config_data["data_directory"] = appdirs.user_data_dir("flatisfy", "flatisfy")
321
+        LOGGER.debug(
322
+            "Using default XDG data directory: %s.", config_data["data_directory"]
303 323
         )
304
-        LOGGER.debug("Using default XDG data directory: %s.",
305
-                     config_data["data_directory"])
306 324
 
307 325
     if not os.path.isdir(config_data["data_directory"]):
308
-        LOGGER.info("Creating data directory according to config: %s",
309
-                    config_data["data_directory"])
326
+        LOGGER.info(
327
+            "Creating data directory according to config: %s",
328
+            config_data["data_directory"],
329
+        )
310 330
         os.makedirs(config_data["data_directory"])
311 331
         os.makedirs(os.path.join(config_data["data_directory"], "images"))
312 332
 
313 333
     if config_data["database"] is None:
314 334
         config_data["database"] = "sqlite:///" + os.path.join(
315
-            config_data["data_directory"],
316
-            "flatisfy.db"
335
+            config_data["data_directory"], "flatisfy.db"
317 336
         )
318 337
 
319 338
     if config_data["search_index"] is None:
320 339
         config_data["search_index"] = os.path.join(
321
-            config_data["data_directory"],
322
-            "search_index"
340
+            config_data["data_directory"], "search_index"
323 341
         )
324 342
 
325 343
     # Handle constraints filtering
326 344
     if args and getattr(args, "constraints", None) is not None:
327 345
         LOGGER.info(
328
-            ("Filtering constraints from config according to CLI argument. "
329
-             "Using only the following constraints: %s."),
330
-            args.constraints.replace(",", ", ")
346
+            (
347
+                "Filtering constraints from config according to CLI argument. "
348
+                "Using only the following constraints: %s."
349
+            ),
350
+            args.constraints.replace(",", ", "),
331 351
         )
332 352
         constraints_filter = args.constraints.split(",")
333 353
         config_data["constraints"] = {
@@ -338,8 +358,8 @@ def load_config(args=None, check_with_data=True):
338 358
 
339 359
     # Sanitize website url
340 360
     if config_data["website_url"] is not None:
341
-        if config_data["website_url"][-1] != '/':
342
-            config_data["website_url"] += '/'
361
+        if config_data["website_url"][-1] != "/":
362
+            config_data["website_url"] += "/"
343 363
 
344 364
     config_validation = validate_config(config_data, check_with_data)
345 365
     if config_validation is True:

+ 1
- 1
flatisfy/constants.py View File

@@ -16,7 +16,7 @@ BACKENDS_BY_PRECEDENCE = [
16 16
     "pap",
17 17
     "leboncoin",
18 18
     "explorimmo",
19
-    "logicimmo"
19
+    "logicimmo",
20 20
 ]
21 21
 
22 22
 

+ 5
- 6
flatisfy/data.py View File

@@ -24,11 +24,13 @@ except ImportError:
24 24
     try:
25 25
         from functools32 import lru_cache
26 26
     except ImportError:
27
+
27 28
         def lru_cache(maxsize=None):  # pylint: disable=unused-argument
28 29
             """
29 30
             Identity implementation of ``lru_cache`` for fallback.
30 31
             """
31 32
             return lambda func: func
33
+
32 34
         LOGGER.warning(
33 35
             "`functools.lru_cache` is not available on your system. Consider "
34 36
             "installing `functools32` Python module if using Python2 for "
@@ -49,8 +51,8 @@ def preprocess_data(config, force=False):
49 51
     get_session = database.init_db(config["database"], config["search_index"])
50 52
     with get_session() as session:
51 53
         is_built = (
52
-            session.query(PublicTransport).count() > 0 and
53
-            session.query(PostalCode).count() > 0
54
+            session.query(PublicTransport).count() > 0
55
+            and session.query(PostalCode).count() > 0
54 56
         )
55 57
         if is_built and not force:
56 58
             # No need to rebuild the database, skip
@@ -96,10 +98,7 @@ def load_data(model, constraint, config):
96 98
         # Load data for each area
97 99
         areas = list(set(areas))
98 100
         for area in areas:
99
-            results.extend(
100
-                session.query(model)
101
-                .filter(model.area == area).all()
102
-            )
101
+            results.extend(session.query(model).filter(model.area == area).all())
103 102
         # Expunge loaded data from the session to be able to use them
104 103
         # afterwards
105 104
         session.expunge_all()

+ 76
- 44
flatisfy/data_files/__init__.py View File

@@ -24,8 +24,8 @@ MODULE_DIR = os.path.dirname(os.path.realpath(__file__))
24 24
 
25 25
 titlecase.set_small_word_list(
26 26
     # Add French small words
27
-    r"l|d|un|une|et|à|a|sur|ou|le|la|de|lès|les|" +
28
-    titlecase.SMALL
27
+    r"l|d|un|une|et|à|a|sur|ou|le|la|de|lès|les|"
28
+    + titlecase.SMALL
29 29
 )
30 30
 
31 31
 TRANSPORT_DATA_FILES = {
@@ -33,7 +33,7 @@ TRANSPORT_DATA_FILES = {
33 33
     "FR-NW": "stops_fr-nw.txt",
34 34
     "FR-NE": "stops_fr-ne.txt",
35 35
     "FR-SW": "stops_fr-sw.txt",
36
-    "FR-SE": "stops_fr-se.txt"
36
+    "FR-SE": "stops_fr-se.txt",
37 37
 }
38 38
 
39 39
 
@@ -51,8 +51,20 @@ def french_postal_codes_to_quarter(postal_code):
51 51
     # French departements
52 52
     # Taken from Wikipedia data.
53 53
     department_to_subdivision = {
54
-        "FR-ARA": ["01", "03", "07", "15", "26", "38", "42", "43", "63", "69",
55
-                   "73", "74"],
54
+        "FR-ARA": [
55
+            "01",
56
+            "03",
57
+            "07",
58
+            "15",
59
+            "26",
60
+            "38",
61
+            "42",
62
+            "43",
63
+            "63",
64
+            "69",
65
+            "73",
66
+            "74",
67
+        ],
56 68
         "FR-BFC": ["21", "25", "39", "58", "70", "71", "89", "90"],
57 69
         "FR-BRE": ["22", "29", "35", "44", "56"],
58 70
         "FR-CVL": ["18", "28", "36", "37", "41", "45"],
@@ -61,19 +73,44 @@ def french_postal_codes_to_quarter(postal_code):
61 73
         "FR-HDF": ["02", "59", "60", "62", "80"],
62 74
         "FR-IDF": ["75", "77", "78", "91", "92", "93", "94", "95"],
63 75
         "FR-NOR": ["14", "27", "50", "61", "76"],
64
-        "FR-NAQ": ["16", "17", "19", "23", "24", "33", "40", "47", "64", "79",
65
-                   "86", "87"],
66
-        "FR-OCC": ["09", "11", "12", "30", "31", "32", "34", "46", "48", "65",
67
-                   "66", "81", "82"],
76
+        "FR-NAQ": [
77
+            "16",
78
+            "17",
79
+            "19",
80
+            "23",
81
+            "24",
82
+            "33",
83
+            "40",
84
+            "47",
85
+            "64",
86
+            "79",
87
+            "86",
88
+            "87",
89
+        ],
90
+        "FR-OCC": [
91
+            "09",
92
+            "11",
93
+            "12",
94
+            "30",
95
+            "31",
96
+            "32",
97
+            "34",
98
+            "46",
99
+            "48",
100
+            "65",
101
+            "66",
102
+            "81",
103
+            "82",
104
+        ],
68 105
         "FR-PDL": ["44", "49", "53", "72", "85"],
69
-        "FR-PAC": ["04", "05", "06", "13", "83", "84"]
106
+        "FR-PAC": ["04", "05", "06", "13", "83", "84"],
70 107
     }
71 108
     subdivision_to_quarters = {
72
-        'FR-IDF': ['FR-IDF'],
73
-        'FR-NW': ['FR-BRE', 'FR-CVL', 'FR-NOR', 'FR-PDL'],
74
-        'FR-NE': ['FR-BFC', 'FR-GES', 'FR-HDF'],
75
-        'FR-SE': ['FR-ARA', 'FR-COR', 'FR-PAC', 'FR-OCC'],
76
-        'FR-SW': ['FR-NAQ']
109
+        "FR-IDF": ["FR-IDF"],
110
+        "FR-NW": ["FR-BRE", "FR-CVL", "FR-NOR", "FR-PDL"],
111
+        "FR-NE": ["FR-BFC", "FR-GES", "FR-HDF"],
112
+        "FR-SE": ["FR-ARA", "FR-COR", "FR-PAC", "FR-OCC"],
113
+        "FR-SW": ["FR-NAQ"],
77 114
     }
78 115
 
79 116
     subdivision = next(
@@ -82,7 +119,7 @@ def french_postal_codes_to_quarter(postal_code):
82 119
             for i, departments in department_to_subdivision.items()
83 120
             if departement in departments
84 121
         ),
85
-        None
122
+        None,
86 123
     )
87 124
     return next(
88 125
         (
@@ -90,7 +127,7 @@ def french_postal_codes_to_quarter(postal_code):
90 127
             for i, subdivisions in subdivision_to_quarters.items()
91 128
             if subdivision in subdivisions
92 129
         ),
93
-        None
130
+        None,
94 131
     )
95 132
 
96 133
 
@@ -106,9 +143,7 @@ def _preprocess_laposte():
106 143
     raw_laposte_data = []
107 144
     # Load opendata file
108 145
     try:
109
-        with io.open(
110
-            os.path.join(MODULE_DIR, data_file), "r", encoding='utf-8'
111
-        ) as fh:
146
+        with io.open(os.path.join(MODULE_DIR, data_file), "r", encoding="utf-8") as fh:
112 147
             raw_laposte_data = json.load(fh)
113 148
     except (IOError, ValueError):
114 149
         LOGGER.error("Invalid raw LaPoste opendata file.")
@@ -126,29 +161,31 @@ def _preprocess_laposte():
126 161
             if area is None:
127 162
                 LOGGER.info(
128 163
                     "No matching area found for postal code %s, skipping it.",
129
-                    fields["code_postal"]
164
+                    fields["code_postal"],
130 165
                 )
131 166
                 continue
132 167
 
133 168
             name = normalize_string(
134
-                titlecase.titlecase(fields["nom_de_la_commune"]),
135
-                lowercase=False
169
+                titlecase.titlecase(fields["nom_de_la_commune"]), lowercase=False
136 170
             )
137 171
 
138 172
             if (fields["code_postal"], name) in seen_postal_codes:
139 173
                 continue
140 174
 
141 175
             seen_postal_codes.append((fields["code_postal"], name))
142
-            postal_codes_data.append(PostalCode(
143
-                area=area,
144
-                postal_code=fields["code_postal"],
145
-                name=name,
146
-                lat=fields["coordonnees_gps"][0],
147
-                lng=fields["coordonnees_gps"][1]
148
-            ))
176
+            postal_codes_data.append(
177
+                PostalCode(
178
+                    area=area,
179
+                    postal_code=fields["code_postal"],
180
+                    name=name,
181
+                    lat=fields["coordonnees_gps"][0],
182
+                    lng=fields["coordonnees_gps"][1],
183
+                )
184
+            )
149 185
         except KeyError:
150
-            LOGGER.info("Missing data for postal code %s, skipping it.",
151
-                        fields["code_postal"])
186
+            LOGGER.info(
187
+                "Missing data for postal code %s, skipping it.", fields["code_postal"]
188
+            )
152 189
 
153 190
     return postal_codes_data
154 191
 
@@ -164,17 +201,15 @@ def _preprocess_public_transport():
164 201
     for area, data_file in TRANSPORT_DATA_FILES.items():
165 202
         LOGGER.info("Building from public transport data %s.", data_file)
166 203
         try:
167
-            with io.open(os.path.join(MODULE_DIR, data_file), "r",
168
-                         encoding='utf-8') as fh:
204
+            with io.open(
205
+                os.path.join(MODULE_DIR, data_file), "r", encoding="utf-8"
206
+            ) as fh:
169 207
                 filereader = csv.reader(fh)
170 208
                 next(filereader, None)  # Skip first row (headers)
171 209
                 for row in filereader:
172
-                    public_transport_data.append(PublicTransport(
173
-                        name=row[2],
174
-                        area=area,
175
-                        lat=row[3],
176
-                        lng=row[4]
177
-                    ))
210
+                    public_transport_data.append(
211
+                        PublicTransport(name=row[2], area=area, lat=row[3], lng=row[4])
212
+                    )
178 213
         except (IOError, IndexError):
179 214
             LOGGER.error("Invalid raw opendata file: %s.", data_file)
180 215
             return []
@@ -183,7 +218,4 @@ def _preprocess_public_transport():
183 218
 
184 219
 
185 220
 # List of all the available preprocessing functions. Order can be important.
186
-PREPROCESSING_FUNCTIONS = [
187
-    _preprocess_laposte,
188
-    _preprocess_public_transport
189
-]
221
+PREPROCESSING_FUNCTIONS = [_preprocess_laposte, _preprocess_public_transport]

+ 1
- 3
flatisfy/database/__init__.py View File

@@ -47,9 +47,7 @@ def init_db(database_uri=None, search_db_uri=None):
47 47
     Session = sessionmaker(bind=engine)  # pylint: disable=locally-disabled,invalid-name
48 48
 
49 49
     if search_db_uri:
50
-        index_service = IndexService(
51
-            whoosh_base=search_db_uri
52
-        )
50
+        index_service = IndexService(whoosh_base=search_db_uri)
53 51
         index_service.register_class(flatisfy.models.flat.Flat)
54 52
 
55 53
     @contextmanager

+ 1
- 1
flatisfy/database/types.py View File

@@ -50,4 +50,4 @@ class StringyJSON(types.TypeDecorator):
50 50
 # TypeEngine.with_variant says "use StringyJSON instead when
51 51
 # connecting to 'sqlite'"
52 52
 # pylint: disable=locally-disabled,invalid-name
53
-MagicJSON = types.JSON().with_variant(StringyJSON, 'sqlite')
53
+MagicJSON = types.JSON().with_variant(StringyJSON, "sqlite")

+ 14
- 14
flatisfy/database/whooshalchemy.py View File

@@ -30,7 +30,6 @@ from whoosh.qparser import MultifieldParser
30 30
 
31 31
 
32 32
 class IndexService(object):
33
-
34 33
     def __init__(self, config=None, whoosh_base=None):
35 34
         if not whoosh_base and config:
36 35
             whoosh_base = config.get("WHOOSH_BASE")
@@ -84,8 +83,7 @@ class IndexService(object):
84 83
                 primary = field.name
85 84
                 continue
86 85
             if field.name in model_class.__searchable__:
87
-                schema[field.name] = whoosh.fields.TEXT(
88
-                    analyzer=StemmingAnalyzer())
86
+                schema[field.name] = whoosh.fields.TEXT(analyzer=StemmingAnalyzer())
89 87
         return Schema(**schema), primary
90 88
 
91 89
     def before_commit(self, session):
@@ -93,21 +91,24 @@ class IndexService(object):
93 91
 
94 92
         for model in session.new:
95 93
             model_class = model.__class__
96
-            if hasattr(model_class, '__searchable__'):
94
+            if hasattr(model_class, "__searchable__"):
97 95
                 self.to_update.setdefault(model_class.__name__, []).append(
98
-                    ("new", model))
96
+                    ("new", model)
97
+                )
99 98
 
100 99
         for model in session.deleted:
101 100
             model_class = model.__class__
102
-            if hasattr(model_class, '__searchable__'):
101
+            if hasattr(model_class, "__searchable__"):
103 102
                 self.to_update.setdefault(model_class.__name__, []).append(
104
-                    ("deleted", model))
103
+                    ("deleted", model)
104
+                )
105 105
 
106 106
         for model in session.dirty:
107 107
             model_class = model.__class__
108
-            if hasattr(model_class, '__searchable__'):
108
+            if hasattr(model_class, "__searchable__"):
109 109
                 self.to_update.setdefault(model_class.__name__, []).append(
110
-                    ("changed", model))
110
+                    ("changed", model)
111
+                )
111 112
 
112 113
     def after_commit(self, session):
113 114
         """
@@ -129,11 +130,11 @@ class IndexService(object):
129 130
                     # update.
130 131
 
131 132
                     writer.delete_by_term(
132
-                        primary_field, text_type(getattr(model, primary_field)))
133
+                        primary_field, text_type(getattr(model, primary_field))
134
+                    )
133 135
 
134 136
                     if change_type in ("new", "changed"):
135
-                        attrs = dict((key, getattr(model, key))
136
-                                     for key in searchable)
137
+                        attrs = dict((key, getattr(model, key)) for key in searchable)
137 138
                         attrs = {
138 139
                             attr: text_type(getattr(model, attr))
139 140
                             for attr in attrs.keys()
@@ -158,8 +159,7 @@ class Searcher(object):
158 159
         self.parser = MultifieldParser(list(fields), index.schema)
159 160
 
160 161
     def __call__(self, session, query, limit=None):
161
-        results = self.index.searcher().search(
162
-            self.parser.parse(query), limit=limit)
162
+        results = self.index.searcher().search(self.parser.parse(query), limit=limit)
163 163
 
164 164
         keys = [x[self.primary] for x in results]
165 165
         primary_column = getattr(self.model_class, self.primary)

+ 30
- 28
flatisfy/email.py View File

@@ -16,7 +16,9 @@ from email.utils import formatdate, make_msgid
16 16
 LOGGER = logging.getLogger(__name__)
17 17
 
18 18
 
19
-def send_email(server, port, subject, _from, _to, txt, html, username=None, password=None):
19
+def send_email(
20
+    server, port, subject, _from, _to, txt, html, username=None, password=None
21
+):
20 22
     """
21 23
     Send an email
22 24
 
@@ -36,15 +38,15 @@ def send_email(server, port, subject, _from, _to, txt, html, username=None, pass
36 38
     if username or password:
37 39
         server.login(username or "", password or "")
38 40
 
39
-    msg = MIMEMultipart('alternative')
40
-    msg['Subject'] = subject
41
-    msg['From'] = _from
42
-    msg['To'] = ', '.join(_to)
43
-    msg['Date'] = formatdate()
44
-    msg['Message-ID'] = make_msgid()
41
+    msg = MIMEMultipart("alternative")
42
+    msg["Subject"] = subject
43
+    msg["From"] = _from
44
+    msg["To"] = ", ".join(_to)
45
+    msg["Date"] = formatdate()
46
+    msg["Message-ID"] = make_msgid()
45 47
 
46
-    msg.attach(MIMEText(txt, 'plain', 'utf-8'))
47
-    msg.attach(MIMEText(html, 'html', 'utf-8'))
48
+    msg.attach(MIMEText(txt, "plain", "utf-8"))
49
+    msg.attach(MIMEText(html, "html", "utf-8"))
48 50
 
49 51
     server.sendmail(_from, _to, msg.as_string())
50 52
     server.quit()
@@ -61,7 +63,7 @@ def send_notification(config, flats):
61 63
     if not flats:
62 64
         return
63 65
 
64
-    txt = u'Hello dear user,\n\nThe following new flats have been found:\n\n'
66
+    txt = "Hello dear user,\n\nThe following new flats have been found:\n\n"
65 67
     html = """
66 68
     <html>
67 69
       <head></head>
@@ -81,10 +83,8 @@ def send_notification(config, flats):
81 83
         cost = str(flat.cost)
82 84
         currency = str(flat.currency)
83 85
 
84
-        txt += (
85
-            '- {}: {}#/flat/{} (area: {}, cost: {} {})\n'.format(
86
-                title, website_url, flat_id, area, cost, currency
87
-            )
86
+        txt += "- {}: {}#/flat/{} (area: {}, cost: {} {})\n".format(
87
+            title, website_url, flat_id, area, cost, currency
88 88
         )
89 89
 
90 90
         html += """
@@ -92,26 +92,28 @@ def send_notification(config, flats):
92 92
                 <a href="{}#/flat/{}">{}</a>
93 93
                 (area: {}, cost: {} {})
94 94
             </li>
95
-        """.format(website_url, flat_id, title, area, cost, currency)
95
+        """.format(
96
+            website_url, flat_id, title, area, cost, currency
97
+        )
96 98
 
97 99
     html += "</ul>"
98 100
 
99
-    signature = (
100
-        u"\nHope you'll find what you were looking for.\n\nBye!\nFlatisfy"
101
-    )
101
+    signature = "\nHope you'll find what you were looking for.\n\nBye!\nFlatisfy"
102 102
     txt += signature
103
-    html += signature.replace('\n', '<br>')
103
+    html += signature.replace("\n", "<br>")
104 104
 
105 105
     html += """</p>
106 106
       </body>
107 107
     </html>"""
108 108
 
109
-    send_email(config["smtp_server"],
110
-               config["smtp_port"],
111
-               "New flats found!",
112
-               config["smtp_from"],
113
-               config["smtp_to"],
114
-               txt,
115
-               html,
116
-               config.get("smtp_username"),
117
-               config.get("smtp_password"))
109
+    send_email(
110
+        config["smtp_server"],
111
+        config["smtp_port"],
112
+        "New flats found!",
113
+        config["smtp_from"],
114
+        config["smtp_to"],
115
+        txt,
116
+        html,
117
+        config.get("smtp_username"),
118
+        config.get("smtp_password"),
119
+    )

+ 1
- 0
flatisfy/exceptions.py View File

@@ -10,4 +10,5 @@ class DataBuildError(Exception):
10 10
     """
11 11
     Error occurring on building a data file.
12 12
     """
13
+
13 14
     pass

+ 35
- 55
flatisfy/fetch.py View File

@@ -24,8 +24,9 @@ try:
24 24
     from weboob.core.ouiboube import WebNip
25 25
     from weboob.tools.json import WeboobEncoder
26 26
 except ImportError:
27
-    LOGGER.error("Weboob is not available on your system. Make sure you "
28
-                 "installed it.")
27
+    LOGGER.error(
28
+        "Weboob is not available on your system. Make sure you " "installed it."
29
+    )
29 30
     raise
30 31
 
31 32
 
@@ -34,6 +35,7 @@ class WebOOBProxy(object):
34 35
     Wrapper around WebOOB ``WebNip`` class, to fetch housing posts without
35 36
     having to spawn a subprocess.
36 37
     """
38
+
37 39
     @staticmethod
38 40
     def version():
39 41
         """
@@ -78,12 +80,7 @@ class WebOOBProxy(object):
78 80
 
79 81
         # Create backends
80 82
         self.backends = [
81
-            self.webnip.load_backend(
82
-                module,
83
-                module,
84
-                params={}
85
-            )
86
-            for module in backends
83
+            self.webnip.load_backend(module, module, params={}) for module in backends
87 84
         ]
88 85
 
89 86
     def __enter__(self):
@@ -114,19 +111,15 @@ class WebOOBProxy(object):
114 111
             except CallErrors as exc:
115 112
                 # If an error occured, just log it
116 113
                 LOGGER.error(
117
-                    (
118
-                        "An error occured while building query for "
119
-                        "postal code %s: %s"
120
-                    ),
114
+                    ("An error occured while building query for " "postal code %s: %s"),
121 115
                     postal_code,
122
-                    str(exc)
116
+                    str(exc),
123 117
                 )
124 118
 
125 119
                 if not matching_cities:
126 120
                     # If postal code gave no match, warn the user
127 121
                     LOGGER.warn(
128
-                        "Postal code %s could not be matched with a city.",
129
-                        postal_code
122
+                        "Postal code %s could not be matched with a city.", postal_code
130 123
                     )
131 124
 
132 125
         # Remove "TOUTES COMMUNES" entry which are duplicates of the individual
@@ -134,8 +127,9 @@ class WebOOBProxy(object):
134 127
         matching_cities = [
135 128
             city
136 129
             for city in matching_cities
137
-            if not (city.backend == 'logicimmo' and
138
-                    city.name.startswith('TOUTES COMMUNES'))
130
+            if not (
131
+                city.backend == "logicimmo" and city.name.startswith("TOUTES COMMUNES")
132
+            )
139 133
         ]
140 134
 
141 135
         # Then, build queries by grouping cities by at most 3
@@ -145,10 +139,7 @@ class WebOOBProxy(object):
145 139
 
146 140
             try:
147 141
                 query.house_types = [
148
-                    getattr(
149
-                        HOUSE_TYPES,
150
-                        house_type.upper()
151
-                    )
142
+                    getattr(HOUSE_TYPES, house_type.upper())
152 143
                     for house_type in constraints_dict["house_types"]
153 144
                 ]
154 145
             except AttributeError:
@@ -156,10 +147,7 @@ class WebOOBProxy(object):
156 147
                 return None
157 148
 
158 149
             try:
159
-                query.type = getattr(
160
-                    POSTS_TYPES,
161
-                    constraints_dict["type"].upper()
162
-                )
150
+                query.type = getattr(POSTS_TYPES, constraints_dict["type"].upper())
163 151
             except AttributeError:
164 152
                 LOGGER.error("Invalid post type constraint.")
165 153
                 return None
@@ -190,16 +178,15 @@ class WebOOBProxy(object):
190 178
         # TODO: Handle max_entries better
191 179
         try:
192 180
             for housing in itertools.islice(
193
-                    self.webnip.do(
194
-                        'search_housings',
195
-                        query,
196
-                        # Only run the call on the required backends.
197
-                        # Otherwise, WebOOB is doing weird stuff and returning
198
-                        # nonsense.
199
-                        backends=[x for x in self.backends
200
-                                  if x.name in useful_backends]
201
-                    ),
202
-                    max_entries
181
+                self.webnip.do(
182
+                    "search_housings",
183
+                    query,
184
+                    # Only run the call on the required backends.
185
+                    # Otherwise, WebOOB is doing weird stuff and returning
186
+                    # nonsense.
187
+                    backends=[x for x in self.backends if x.name in useful_backends],
188
+                ),
189
+                max_entries,
203 190
             ):
204 191
                 if not store_personal_data:
205 192
                     housing.phone = None
@@ -207,8 +194,7 @@ class WebOOBProxy(object):
207 194
         except CallErrors as exc:
208 195
             # If an error occured, just log it
209 196
             LOGGER.error(
210
-                "An error occured while fetching the housing posts: %s",
211
-                str(exc)
197
+                "An error occured while fetching the housing posts: %s", str(exc)
212 198
             )
213 199
         return housings
214 200
 
@@ -225,9 +211,7 @@ class WebOOBProxy(object):
225 211
         flat_id, backend_name = full_flat_id.rsplit("@", 1)
226 212
         try:
227 213
             backend = next(
228
-                backend
229
-                for backend in self.backends
230
-                if backend.name == backend_name
214
+                backend for backend in self.backends if backend.name == backend_name
231 215
             )
232 216
         except StopIteration:
233 217
             LOGGER.error("Backend %s is not available.", backend_name)
@@ -240,7 +224,7 @@ class WebOOBProxy(object):
240 224
                 housing.phone = None
241 225
             else:
242 226
                 # Ensure phone is fetched
243
-                backend.fillobj(housing, 'phone')
227
+                backend.fillobj(housing, "phone")
244 228
             # Otherwise, we miss the @backend afterwards
245 229
             housing.id = full_flat_id
246 230
 
@@ -248,9 +232,7 @@ class WebOOBProxy(object):
248 232
         except Exception as exc:  # pylint: disable=broad-except
249 233
             # If an error occured, just log it
250 234
             LOGGER.error(
251
-                "An error occured while fetching housing %s: %s",
252
-                full_flat_id,
253
-                str(exc)
235
+                "An error occured while fetching housing %s: %s", full_flat_id, str(exc)
254 236
             )
255 237
             return "{}"
256 238
 
@@ -272,15 +254,17 @@ def fetch_flats(config):
272 254
             housing_posts = []
273 255
             for query in queries:
274 256
                 housing_posts.extend(
275
-                    webOOB_proxy.query(query, config["max_entries"],
276
-                                       config["store_personal_data"])
257
+                    webOOB_proxy.query(
258
+                        query, config["max_entries"], config["store_personal_data"]
259
+                    )
277 260
                 )
278
-        housing_posts = housing_posts[:config["max_entries"]]
261
+        housing_posts = housing_posts[: config["max_entries"]]
279 262
         LOGGER.info("Fetched %d flats.", len(housing_posts))
280 263
 
281 264
         constraint_flats_list = [json.loads(flat) for flat in housing_posts]
282
-        constraint_flats_list = [WebOOBProxy.restore_decimal_fields(flat)
283
-                                 for flat in constraint_flats_list]
265
+        constraint_flats_list = [
266
+            WebOOBProxy.restore_decimal_fields(flat) for flat in constraint_flats_list
267
+        ]
284 268
         fetched_flats[constraint_name] = constraint_flats_list
285 269
     return fetched_flats
286 270
 
@@ -295,8 +279,7 @@ def fetch_details(config, flat_id):
295 279
     """
296 280
     with WebOOBProxy(config) as webOOB_proxy:
297 281
         LOGGER.info("Loading additional details for flat %s.", flat_id)
298
-        webOOB_output = webOOB_proxy.info(flat_id,
299
-                                          config["store_personal_data"])
282
+        webOOB_output = webOOB_proxy.info(flat_id, config["store_personal_data"])
300 283
 
301 284
     flat_details = json.loads(webOOB_output)
302 285
     flat_details = WebOOBProxy.restore_decimal_fields(flat_details)
@@ -327,10 +310,7 @@ def load_flats_from_file(json_file, config):
327 310
         LOGGER.info("Found %d flats.", len(flats_list))
328 311
     except (IOError, ValueError):
329 312
         LOGGER.error("File %s is not a valid dump file.", json_file)
330
-    return {
331
-        constraint_name: flats_list
332
-        for constraint_name in config["constraints"]
333
-    }
313
+    return {constraint_name: flats_list for constraint_name in config["constraints"]}
334 314
 
335 315
 
336 316
 def load_flats_from_db(config):

+ 31
- 62
flatisfy/filters/__init__.py View File

@@ -36,10 +36,7 @@ def refine_with_housing_criteria(flats_list, constraint):
36 36
     for i, flat in enumerate(flats_list):
37 37
         # Check postal code
38 38
         postal_code = flat["flatisfy"].get("postal_code", None)
39
-        if (
40
-                postal_code and
41
-                postal_code not in constraint["postal_codes"]
42
-        ):
39
+        if postal_code and postal_code not in constraint["postal_codes"]:
43 40
             LOGGER.info("Postal code for flat %s is out of range.", flat["id"])
44 41
             is_ok[i] = is_ok[i] and False
45 42
 
@@ -47,37 +44,32 @@ def refine_with_housing_criteria(flats_list, constraint):
47 44
         for place_name, time in flat["flatisfy"].get("time_to", {}).items():
48 45
             time = time["time"]
49 46
             is_within_interval = tools.is_within_interval(
50
-                time,
51
-                *(constraint["time_to"][place_name]["time"])
47
+                time, *(constraint["time_to"][place_name]["time"])
52 48
             )
53 49
             if not is_within_interval:
54
-                LOGGER.info("Flat %s is too far from place %s: %ds.",
55
-                            flat["id"], place_name, time)
50
+                LOGGER.info(
51
+                    "Flat %s is too far from place %s: %ds.",
52
+                    flat["id"],
53
+                    place_name,
54
+                    time,
55
+                )
56 56
             is_ok[i] = is_ok[i] and is_within_interval
57 57
 
58 58
         # Check other fields
59 59
         for field in ["area", "cost", "rooms", "bedrooms"]:
60 60
             interval = constraint[field]
61 61
             is_within_interval = tools.is_within_interval(
62
-                flat.get(field, None),
63
-                *interval
62
+                flat.get(field, None), *interval
64 63
             )
65 64
             if not is_within_interval:
66
-                LOGGER.info("%s for flat %s is out of range.",
67
-                            field.capitalize(), flat["id"])
65
+                LOGGER.info(
66
+                    "%s for flat %s is out of range.", field.capitalize(), flat["id"]
67
+                )
68 68
             is_ok[i] = is_ok[i] and is_within_interval
69 69
 
70 70
     return (
71
-        [
72
-            flat
73
-            for i, flat in enumerate(flats_list)
74
-            if is_ok[i]
75
-        ],
76
-        [
77
-            flat
78
-            for i, flat in enumerate(flats_list)
79
-            if not is_ok[i]
80
-        ]
71
+        [flat for i, flat in enumerate(flats_list) if is_ok[i]],
72
+        [flat for i, flat in enumerate(flats_list) if not is_ok[i]],
81 73
     )
82 74
 
83 75
 
@@ -104,47 +96,37 @@ def refine_with_details_criteria(flats_list, constraint):
104 96
     for i, flat in enumerate(flats_list):
105 97
         # Check number of pictures
106 98
         has_enough_photos = tools.is_within_interval(
107
-            len(flat.get('photos', [])),
108
-            constraint['minimum_nb_photos'],
109
-            None
99
+            len(flat.get("photos", [])), constraint["minimum_nb_photos"], None
110 100
         )
111 101
         if not has_enough_photos:
112 102
             LOGGER.info(
113 103
                 "Flat %s only has %d photos, it should have at least %d.",
114 104
                 flat["id"],
115
-                len(flat['photos']),
116
-                constraint['minimum_nb_photos']
105
+                len(flat["photos"]),
106
+                constraint["minimum_nb_photos"],
117 107
             )
118 108
             is_ok[i] = False
119 109
 
120 110
         for term in constraint["description_should_contain"]:
121
-            if term.lower() not in flat['text'].lower():
111
+            if term.lower() not in flat["text"].lower():
122 112
                 LOGGER.info(
123 113
                     ("Description for flat %s does not contain required term '%s'."),
124 114
                     flat["id"],
125
-                    term
115
+                    term,
126 116
                 )
127 117
                 is_ok[i] = False
128 118
         for term in constraint["description_should_not_contain"]:
129
-            if term.lower() in flat['text'].lower():
119
+            if term.lower() in flat["text"].lower():
130 120
                 LOGGER.info(
131 121
                     ("Description for flat %s contains blacklisted term '%s'."),
132 122
                     flat["id"],
133
-                    term
123
+                    term,
134 124
                 )
135 125
                 is_ok[i] = False
136 126
 
137 127
     return (
138
-        [
139
-            flat
140
-            for i, flat in enumerate(flats_list)
141
-            if is_ok[i]
142
-        ],
143
-        [
144
-            flat
145
-            for i, flat in enumerate(flats_list)
146
-            if not is_ok[i]
147
-        ]
128
+        [flat for i, flat in enumerate(flats_list) if is_ok[i]],
129
+        [flat for i, flat in enumerate(flats_list) if not is_ok[i]],
148 130
     )
149 131
 
150 132
 
@@ -185,14 +167,10 @@ def first_pass(flats_list, constraint, config):
185 167
         flats_list = metadata.guess_stations(flats_list, constraint, config)
186 168
 
187 169
     # Remove returned housing posts that do not match criteria
188
-    flats_list, ignored_list = refine_with_housing_criteria(flats_list,
189
-                                                            constraint)
170
+    flats_list, ignored_list = refine_with_housing_criteria(flats_list, constraint)
171
+
172
+    return {"new": flats_list, "ignored": ignored_list, "duplicate": duplicates_by_urls}
190 173
 
191
-    return {
192
-        "new": flats_list,
193
-        "ignored": ignored_list,
194
-        "duplicate": duplicates_by_urls
195
-    }
196 174
 
197 175
 @tools.timeit
198 176
 def second_pass(flats_list, constraint, config):
@@ -226,22 +204,17 @@ def second_pass(flats_list, constraint, config):
226 204
         flats_list = metadata.compute_travel_times(flats_list, constraint, config)
227 205
 
228 206
     # Remove returned housing posts that do not match criteria
229
-    flats_list, ignored_list = refine_with_housing_criteria(flats_list,
230
-                                                            constraint)
207
+    flats_list, ignored_list = refine_with_housing_criteria(flats_list, constraint)
231 208
 
232 209
     # Remove returned housing posts which do not match criteria relying on
233 210
     # fetched details.
234
-    flats_list, ignored_list = refine_with_details_criteria(flats_list,
235
-                                                            constraint)
211
+    flats_list, ignored_list = refine_with_details_criteria(flats_list, constraint)
236 212
 
237 213
     if config["serve_images_locally"]:
238 214
         images.download_images(flats_list, config)
239 215
 
240
-    return {
241
-        "new": flats_list,
242
-        "ignored": ignored_list,
243
-        "duplicate": []
244
-    }
216
+    return {"new": flats_list, "ignored": ignored_list, "duplicate": []}
217
+
245 218
 
246 219
 @tools.timeit
247 220
 def third_pass(flats_list, config):
@@ -260,8 +233,4 @@ def third_pass(flats_list, config):
260 233
     # Deduplicate the list using every available data
261 234
     flats_list, duplicate_flats = duplicates.deep_detect(flats_list, config)
262 235
 
263
-    return {
264
-        "new": flats_list,
265
-        "ignored": [],
266
-        "duplicate": duplicate_flats
267
-    }
236
+    return {"new": flats_list, "ignored": [], "duplicate": duplicate_flats}

+ 4
- 4
flatisfy/filters/cache.py View File

@@ -16,10 +16,12 @@ import PIL.Image
16 16
 
17 17
 LOGGER = logging.getLogger(__name__)
18 18
 
19
+
19 20
 class MemoryCache(object):
20 21
     """
21 22
     A cache in memory.
22 23
     """
24
+
23 25
     @staticmethod
24 26
     def on_miss(key):
25 27
         """
@@ -87,6 +89,7 @@ class ImageCache(MemoryCache):
87 89
     """
88 90
     A cache for images, stored in memory.
89 91
     """
92
+
90 93
     @staticmethod
91 94
     def compute_filename(url):
92 95
         """
@@ -113,10 +116,7 @@ class ImageCache(MemoryCache):
113 116
         filepath = None
114 117
         # Try to load from local folder
115 118
         if self.storage_dir:
116
-            filepath = os.path.join(
117
-                self.storage_dir,
118
-                self.compute_filename(url)
119
-            )
119
+            filepath = os.path.join(self.storage_dir, self.compute_filename(url))
120 120
             if os.path.isfile(filepath):
121 121
                 return PIL.Image.open(filepath)
122 122
         # Otherwise, fetch it

+ 40
- 57
flatisfy/filters/duplicates.py View File

@@ -35,14 +35,14 @@ def homogeneize_phone_number(numbers):
35 35
 
36 36
     clean_numbers = []
37 37
 
38
-    for number in numbers.split(','):
38
+    for number in numbers.split(","):
39 39
         number = number.strip()
40 40
         number = number.replace(".", "")
41 41
         number = number.replace(" ", "")
42 42
         number = number.replace("-", "")
43 43
         number = number.replace("(", "")
44 44
         number = number.replace(")", "")
45
-        number = re.sub(r'^\+\d\d', "", number)
45
+        number = re.sub(r"^\+\d\d", "", number)
46 46
 
47 47
         if not number.startswith("0"):
48 48
             number = "0" + number
@@ -94,12 +94,7 @@ def compare_photos(photo1, photo2, photo_cache, hash_threshold):
94 94
         return False
95 95
 
96 96
 
97
-def find_number_common_photos(
98
-    flat1_photos,
99
-    flat2_photos,
100
-    photo_cache,
101
-    hash_threshold
102
-):
97
+def find_number_common_photos(flat1_photos, flat2_photos, photo_cache, hash_threshold):
103 98
     """
104 99
     Compute the number of common photos between the two lists of photos for the
105 100
     flats.
@@ -174,22 +169,23 @@ def detect(flats_list, key="id", merge=True, should_intersect=False):
174 169
             # Sort matching flats by backend precedence
175 170
             matching_flats.sort(
176 171
                 key=lambda flat: next(
177
-                    i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE)
172
+                    i
173
+                    for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE)
178 174
                     if flat["id"].endswith(backend)
179 175
                 ),
180
-                reverse=True
176
+                reverse=True,
181 177
             )
182 178
 
183 179
             if len(matching_flats) > 1:
184
-                LOGGER.info("Found duplicates using key \"%s\": %s.",
185
-                            key,
186
-                            [flat["id"] for flat in matching_flats])
180
+                LOGGER.info(
181
+                    'Found duplicates using key "%s": %s.',
182
+                    key,
183
+                    [flat["id"] for flat in matching_flats],
184
+                )
187 185
             # Otherwise, check the policy
188 186
             if merge:
189 187
                 # If a merge is requested, do the merge
190
-                unique_flats_list.append(
191
-                    tools.merge_dicts(*matching_flats)
192
-                )
188
+                unique_flats_list.append(tools.merge_dicts(*matching_flats))
193 189
             else:
194 190
                 # Otherwise, just keep the most important of them
195 191
                 unique_flats_list.append(matching_flats[-1])
@@ -203,8 +199,9 @@ def detect(flats_list, key="id", merge=True, should_intersect=False):
203 199
     if should_intersect:
204 200
         # We added some flats twice with the above method, let's deduplicate on
205 201
         # id.
206
-        unique_flats_list, _ = detect(unique_flats_list, key="id", merge=True,
207
-                                      should_intersect=False)
202
+        unique_flats_list, _ = detect(
203
+            unique_flats_list, key="id", merge=True, should_intersect=False
204
+        )
208 205
 
209 206
     return unique_flats_list, duplicate_flats
210 207
 
@@ -250,14 +247,12 @@ def get_duplicate_score(flat1, flat2, photo_cache, hash_threshold):
250 247
 
251 248
         # They should have the same postal code, if available
252 249
         if (
253
-                "flatisfy" in flat1 and "flatisfy" in flat2 and
254
-                flat1["flatisfy"].get("postal_code", None) and
255
-                flat2["flatisfy"].get("postal_code", None)
250
+            "flatisfy" in flat1
251
+            and "flatisfy" in flat2
252
+            and flat1["flatisfy"].get("postal_code", None)
253
+            and flat2["flatisfy"].get("postal_code", None)
256 254
         ):
257
-            assert (
258
-                flat1["flatisfy"]["postal_code"] ==
259
-                flat2["flatisfy"]["postal_code"]
260
-            )
255
+            assert flat1["flatisfy"]["postal_code"] == flat2["flatisfy"]["postal_code"]
261 256
             n_common_items += 1
262 257
 
263 258
         # TODO: Better text comparison (one included in the other, fuzzymatch)
@@ -282,25 +277,17 @@ def get_duplicate_score(flat1, flat2, photo_cache, hash_threshold):
282 277
         both_are_from_same_backend = (
283 278
             flat1["id"].split("@")[-1] == flat2["id"].split("@")[-1]
284 279
         )
285
-        both_have_float_part = (
286
-            (flat1["area"] % 1) > 0 and (flat2["area"] % 1) > 0
287
-        )
288
-        both_have_equal_float_part = (
289
-            (flat1["area"] % 1) == (flat2["area"] % 1)
290
-        )
280
+        both_have_float_part = (flat1["area"] % 1) > 0 and (flat2["area"] % 1) > 0
281
+        both_have_equal_float_part = (flat1["area"] % 1) == (flat2["area"] % 1)
291 282
         if both_have_float_part and both_are_from_same_backend:
292 283
             assert both_have_equal_float_part
293 284
 
294 285
         if flat1.get("photos", []) and flat2.get("photos", []):
295 286
             n_common_photos = find_number_common_photos(
296
-                flat1["photos"],
297
-                flat2["photos"],
298
-                photo_cache,
299
-                hash_threshold
287
+                flat1["photos"], flat2["photos"], photo_cache, hash_threshold
300 288
             )
301 289
 
302
-            min_number_photos = min(len(flat1["photos"]),
303
-                                    len(flat2["photos"]))
290
+            min_number_photos = min(len(flat1["photos"]), len(flat2["photos"]))
304 291
 
305 292
             # Either all the photos are the same, or there are at least
306 293
             # three common photos.
@@ -332,9 +319,7 @@ def deep_detect(flats_list, config):
332 319
         storage_dir = os.path.join(config["data_directory"], "images")
333 320
     else:
334 321
         storage_dir = None
335
-    photo_cache = ImageCache(
336
-        storage_dir=storage_dir
337
-    )
322
+    photo_cache = ImageCache(storage_dir=storage_dir)
338 323
 
339 324
     LOGGER.info("Running deep duplicates detection.")
340 325
     matching_flats = collections.defaultdict(list)
@@ -348,29 +333,30 @@ def deep_detect(flats_list, config):
348 333
                 continue
349 334
 
350 335
             n_common_items = get_duplicate_score(
351
-                flat1,
352
-                flat2,
353
-                photo_cache,
354
-                config["duplicate_image_hash_threshold"]
336
+                flat1, flat2, photo_cache, config["duplicate_image_hash_threshold"]
355 337
             )
356 338
 
357 339
             # Minimal score to consider they are duplicates
358 340
             if n_common_items >= config["duplicate_threshold"]:
359 341
                 # Mark flats as duplicates
360 342
                 LOGGER.info(
361
-                    ("Found duplicates using deep detection: (%s, %s). "
362
-                     "Score is %d."),
343
+                    (
344
+                        "Found duplicates using deep detection: (%s, %s). "
345
+                        "Score is %d."
346
+                    ),
363 347
                     flat1["id"],
364 348
                     flat2["id"],
365
-                    n_common_items
349
+                    n_common_items,
366 350
                 )
367 351
                 matching_flats[flat1["id"]].append(flat2["id"])
368 352
                 matching_flats[flat2["id"]].append(flat1["id"])
369 353
 
370 354
     if photo_cache.total():
371
-        LOGGER.debug("Photo cache: hits: %d%% / misses: %d%%.",
372
-                     photo_cache.hit_rate(),
373
-                     photo_cache.miss_rate())
355
+        LOGGER.debug(
356
+            "Photo cache: hits: %d%% / misses: %d%%.",
357
+            photo_cache.hit_rate(),
358
+            photo_cache.miss_rate(),
359
+        )
374 360
 
375 361
     seen_ids = []
376 362
     duplicate_flats = []
@@ -381,16 +367,13 @@ def deep_detect(flats_list, config):
381 367
 
382 368
         seen_ids.extend(matching_flats[flat_id])
383 369
         to_merge = sorted(
384
-            [
385
-                flat
386
-                for flat in flats_list
387
-                if flat["id"] in matching_flats[flat_id]
388
-            ],
370
+            [flat for flat in flats_list if flat["id"] in matching_flats[flat_id]],
389 371
             key=lambda flat: next(
390
-                i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE)
372
+                i
373
+                for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE)
391 374
                 if flat["id"].endswith(backend)
392 375
             ),
393
-            reverse=True
376
+            reverse=True,
394 377
         )
395 378
         unique_flats_list.append(tools.merge_dicts(*to_merge))
396 379
         # The ID of the added merged flat will be the one of the last item

+ 3
- 1
flatisfy/filters/images.py View File

@@ -29,7 +29,9 @@ def download_images(flats_list, config):
29 29
     for i, flat in enumerate(flats_list):
30 30
         LOGGER.info(
31 31
             "Downloading photos for flat %d/%d: %s.",
32
-            i + 1, flats_list_length, flat["id"]
32
+            i + 1,
33
+            flats_list_length,
34
+            flat["id"],
33 35
         )
34 36
         for photo in flat["photos"]:
35 37
             # Download photo

+ 74
- 85
flatisfy/filters/metadata.py View File

@@ -103,7 +103,7 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
103 103
             if choice in normalized_query
104 104
         ],
105 105
         key=lambda x: x[1],
106
-        reverse=True
106
+        reverse=True,
107 107
     )
108 108
     if limit:
109 109
         matches = matches[:limit]
@@ -111,10 +111,7 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
111 111
     # Update confidence
112 112
     if matches:
113 113
         max_confidence = max(match[1] for match in matches)
114
-        matches = [
115
-            (x[0], int(x[1] / max_confidence * 100))
116
-            for x in matches
117
-        ]
114
+        matches = [(x[0], int(x[1] / max_confidence * 100)) for x in matches]
118 115
 
119 116
     # Convert back matches to original strings
120 117
     # Also filter out matches below threshold
@@ -126,32 +123,27 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
126 123
 
127 124
     return matches
128 125
 
126
+
129 127
 def guess_location_position(location, cities, constraint):
130 128
     # try to find a city
131 129
     # Find all fuzzy-matching cities
132 130
     postal_code = None
133 131
     position = None
134 132
 
135
-    matched_cities = fuzzy_match(
136
-        location,
137
-        [x.name for x in cities],
138
-        limit=None
139
-    )
133
+    matched_cities = fuzzy_match(location, [x.name for x in cities], limit=None)
140 134
     if matched_cities:
141 135
         # Find associated postal codes
142 136
         matched_postal_codes = []
143 137
         for matched_city_name, _ in matched_cities:
144 138
             postal_code_objects_for_city = [
145
-                x for x in cities
146
-                if x.name == matched_city_name
139
+                x for x in cities if x.name == matched_city_name
147 140
             ]
148 141
             matched_postal_codes.extend(
149
-                pc.postal_code
150
-                for pc in postal_code_objects_for_city
142
+                pc.postal_code for pc in postal_code_objects_for_city
151 143
             )
152 144
         # Try to match them with postal codes in config constraint
153
-        matched_postal_codes_in_config = (
154
-            set(matched_postal_codes) & set(constraint["postal_codes"])
145
+        matched_postal_codes_in_config = set(matched_postal_codes) & set(
146
+            constraint["postal_codes"]
155 147
         )
156 148
         if matched_postal_codes_in_config:
157 149
             # If there are some matched postal codes which are also in
@@ -166,14 +158,17 @@ def guess_location_position(location, cities, constraint):
166 158
         # take the city position
167 159
         for matched_city_name, _ in matched_cities:
168 160
             postal_code_objects_for_city = [
169
-                x for x in cities
161
+                x
162
+                for x in cities
170 163
                 if x.name == matched_city_name and x.postal_code == postal_code
171 164
             ]
172 165
             if len(postal_code_objects_for_city):
173
-                position = {"lat": postal_code_objects_for_city[0].lat, "lng": postal_code_objects_for_city[0].lng}
166
+                position = {
167
+                    "lat": postal_code_objects_for_city[0].lat,
168
+                    "lng": postal_code_objects_for_city[0].lng,
169
+                }
174 170
                 LOGGER.debug(
175
-                    ("Found position %s using city %s."),
176
-                    position, matched_city_name
171
+                    ("Found position %s using city %s."), position, matched_city_name
177 172
                 )
178 173
                 break
179 174
 
@@ -194,25 +189,20 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
194 189
 
195 190
     :return: An updated list of flats dict with guessed postal code.
196 191
     """
197
-    opendata = {
198
-        "postal_codes": data.load_data(PostalCode, constraint, config)
199
-    }
192
+    opendata = {"postal_codes": data.load_data(PostalCode, constraint, config)}
200 193
 
201 194
     for flat in flats_list:
202 195
         location = flat.get("location", None)
203 196
         if not location:
204 197
             addr = flat.get("address", None)
205 198
             if addr:
206
-                location = addr['full_address']
199
+                location = addr["full_address"]
207 200
         if not location:
208 201
             # Skip everything if empty location
209 202
             LOGGER.info(
210
-                (
211
-                    "No location field for flat %s, skipping postal "
212
-                    "code lookup. (%s)"
213
-                ),
203
+                ("No location field for flat %s, skipping postal " "code lookup. (%s)"),
214 204
                 flat["id"],
215
-                flat.get("address")
205
+                flat.get("address"),
216 206
             )
217 207
             continue
218 208
 
@@ -230,17 +220,22 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
230 220
 
231 221
             LOGGER.debug(
232 222
                 "Found postal code in location field for flat %s: %s.",
233
-                flat["id"], postal_code
223
+                flat["id"],
224
+                postal_code,
234 225
             )
235 226
         except AssertionError:
236 227
             postal_code = None
237 228
 
238 229
         # Then fetch position (and postal_code is couldn't be found earlier)
239 230
         if postal_code:
240
-            cities = [x for x in opendata["postal_codes"] if x.postal_code == postal_code]
231
+            cities = [
232
+                x for x in opendata["postal_codes"] if x.postal_code == postal_code
233
+            ]
241 234
             (_, position) = guess_location_position(location, cities, constraint)
242 235
         else:
243
-            (postal_code, position) = guess_location_position(location, opendata["postal_codes"], constraint)
236
+            (postal_code, position) = guess_location_position(
237
+                location, opendata["postal_codes"], constraint
238
+            )
244 239
 
245 240
         # Check that postal code is not too far from the ones listed in config,
246 241
         # limit bad fuzzy matching
@@ -256,17 +251,19 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
256 251
                         (x.lat, x.lng)
257 252
                         for x in opendata["postal_codes"]
258 253
                         if x.postal_code == constraint_postal_code
259
-                    )
254
+                    ),
260 255
                 )
261 256
                 for constraint_postal_code in constraint["postal_codes"]
262 257
             )
263 258
 
264 259
             if distance > distance_threshold:
265 260
                 LOGGER.info(
266
-                    ("Postal code %s found for flat %s @ %s is off-constraints "
267
-                     "(distance is %dm > %dm). Let's consider it is an "
268
-                     "artifact match and keep the post without this postal "
269
-                     "code."),
261
+                    (
262
+                        "Postal code %s found for flat %s @ %s is off-constraints "
263
+                        "(distance is %dm > %dm). Let's consider it is an "
264
+                        "artifact match and keep the post without this postal "
265
+                        "code."
266
+                    ),
270 267
                     postal_code,
271 268
                     flat["id"],
272 269
                     location,
@@ -282,7 +279,9 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
282 279
             if existing_postal_code and existing_postal_code != postal_code:
283 280
                 LOGGER.warning(
284 281
                     "Replacing previous postal code %s by %s for flat %s.",
285
-                    existing_postal_code, postal_code, flat["id"]
282
+                    existing_postal_code,
283
+                    postal_code,
284
+                    flat["id"],
286 285
                 )
287 286
             flat["flatisfy"]["postal_code"] = postal_code
288 287
         else:
@@ -304,10 +303,10 @@ def guess_stations(flats_list, constraint, config):
304 303
 
305 304
     :return: An updated list of flats dict with guessed nearby stations.
306 305
     """
307
-    distance_threshold = config['max_distance_housing_station']
306
+    distance_threshold = config["max_distance_housing_station"]
308 307
     opendata = {
309 308
         "postal_codes": data.load_data(PostalCode, constraint, config),
310
-        "stations": data.load_data(PublicTransport, constraint, config)
309
+        "stations": data.load_data(PublicTransport, constraint, config),
311 310
     }
312 311
 
313 312
     for flat in flats_list:
@@ -316,13 +315,12 @@ def guess_stations(flats_list, constraint, config):
316 315
         if not flat_station:
317 316
             # Skip everything if empty station
318 317
             LOGGER.info(
319
-                "No stations field for flat %s, skipping stations lookup.",
320
-                flat["id"]
318
+                "No stations field for flat %s, skipping stations lookup.", flat["id"]
321 319
             )
322 320
             continue
323 321
 
324 322
         # Weboob modules can return several stations in a comma-separated list.
325
-        flat_stations = flat_station.split(',')
323
+        flat_stations = flat_station.split(",")
326 324
         # But some stations containing a comma exist, so let's add the initial
327 325
         # value to the list of stations to check if there was one.
328 326
         if len(flat_stations) > 1:
@@ -334,7 +332,7 @@ def guess_stations(flats_list, constraint, config):
334 332
                 tentative_station,
335 333
                 [x.name for x in opendata["stations"]],
336 334
                 limit=10,
337
-                threshold=50
335
+                threshold=50,
338 336
             )
339 337
 
340 338
         # Keep only one occurrence of each station
@@ -361,32 +359,34 @@ def guess_stations(flats_list, constraint, config):
361 359
                 ]
362 360
                 for station_data in stations_objects:
363 361
                     distance = tools.distance(
364
-                        (station_data.lat, station_data.lng),
365
-                        postal_code_gps
362
+                        (station_data.lat, station_data.lng), postal_code_gps
366 363
                     )
367 364
                     if distance < distance_threshold:
368 365
                         # If at least one of the coordinates for a given
369 366
                         # station is close enough, that's ok and we can add
370 367
                         # the station
371
-                        good_matched_stations.append({
372
-                            "key": station[0],
373
-                            "name": station_data.name,
374
-                            "confidence": station[1],
375
-                            "gps": (station_data.lat, station_data.lng)
376
-                        })
368
+                        good_matched_stations.append(
369
+                            {
370
+                                "key": station[0],
371
+                                "name": station_data.name,
372
+                                "confidence": station[1],
373
+                                "gps": (station_data.lat, station_data.lng),
374
+                            }
375
+                        )
377 376
                         break
378 377
                     LOGGER.info(
379
-                        ("Station %s is too far from flat %s (%dm > %dm), "
380
-                         "discarding this station."),
378
+                        (
379
+                            "Station %s is too far from flat %s (%dm > %dm), "
380
+                            "discarding this station."
381
+                        ),
381 382
                         station[0],
382 383
                         flat["id"],
383 384
                         int(distance),
384
-                        int(distance_threshold)
385
+                        int(distance_threshold),
385 386
                     )
386 387
         else:
387 388
             LOGGER.info(
388
-                "No postal code for flat %s, skipping stations detection.",
389
-                flat["id"]
389
+                "No postal code for flat %s, skipping stations detection.", flat["id"]
390 390
             )
391 391
 
392 392
         if not good_matched_stations:
@@ -394,7 +394,7 @@ def guess_stations(flats_list, constraint, config):
394 394
             LOGGER.info(
395 395
                 "No stations found for flat %s, matching %s.",
396 396
                 flat["id"],
397
-                flat["station"]
397
+                flat["station"],
398 398
             )
399 399
             continue
400 400
 
@@ -402,29 +402,20 @@ def guess_stations(flats_list, constraint, config):
402 402
             "Found stations for flat %s: %s (matching %s).",
403 403
             flat["id"],
404 404
             ", ".join(x["name"] for x in good_matched_stations),
405
-            flat["station"]
405
+            flat["station"],
406 406
         )
407 407
 
408 408
         # If some stations were already filled in and the result is different,
409 409
         # display some warning to the user
410
-        if (
411
-                "matched_stations" in flat["flatisfy"] and
412
-                (
413
-                    # Do a set comparison, as ordering is not important
414
-                    set([
415
-                        station["name"]
416
-                        for station in flat["flatisfy"]["matched_stations"]
417
-                    ]) !=
418
-                    set([
419
-                        station["name"]
420
-                        for station in good_matched_stations
421
-                    ])
422
-                )
410
+        if "matched_stations" in flat["flatisfy"] and (
411
+            # Do a set comparison, as ordering is not important
412
+            set([station["name"] for station in flat["flatisfy"]["matched_stations"]])
413
+            != set([station["name"] for station in good_matched_stations])
423 414
         ):
424 415
             LOGGER.warning(
425 416
                 "Replacing previously fetched stations for flat %s. Found "
426 417
                 "stations differ from the previously found ones.",
427
-                flat["id"]
418
+                flat["id"],
428 419
             )
429 420
 
430 421
         flat["flatisfy"]["matched_stations"] = good_matched_stations
@@ -449,9 +440,8 @@ def compute_travel_times(flats_list, constraint, config):
449 440
         if not flat["flatisfy"].get("matched_stations", []):
450 441
             # Skip any flat without matched stations
451 442
             LOGGER.info(
452
-                "Skipping travel time computation for flat %s. No matched "
453
-                "stations.",
454
-                flat["id"]
443
+                "Skipping travel time computation for flat %s. No matched " "stations.",
444
+                flat["id"],
455 445
             )
456 446
             continue
457 447
 
@@ -467,15 +457,11 @@ def compute_travel_times(flats_list, constraint, config):
467 457
             for station in flat["flatisfy"]["matched_stations"]:
468 458
                 # Time from station is a dict with time and route
469 459
                 time_from_station_dict = tools.get_travel_time_between(
470
-                    station["gps"],
471
-                    place["gps"],
472
-                    TimeToModes[mode],
473
-                    config
460
+                    station["gps"], place["gps"], TimeToModes[mode], config
474 461
                 )
475
-                if (
476
-                        time_from_station_dict and
477
-                        (time_from_station_dict["time"] < time_to_place_dict or
478
-                         time_to_place_dict is None)
462
+                if time_from_station_dict and (
463
+                    time_from_station_dict["time"] < time_to_place_dict
464
+                    or time_to_place_dict is None
479 465
                 ):
480 466
                     # If starting from this station makes the route to the
481 467
                     # specified place shorter, update
@@ -484,7 +470,10 @@ def compute_travel_times(flats_list, constraint, config):
484 470
             if time_to_place_dict:
485 471
                 LOGGER.info(
486 472
                     "Travel time between %s and flat %s by %s is %ds.",
487
-                    place_name, flat["id"], mode, time_to_place_dict["time"]
473
+                    place_name,
474
+                    flat["id"],
475
+                    mode,
476
+                    time_to_place_dict["time"],
488 477
                 )
489 478
                 flat["flatisfy"]["time_to"][place_name] = time_to_place_dict
490 479
     return flats_list

+ 29
- 32
flatisfy/models/flat.py View File

@@ -11,7 +11,15 @@ import enum
11 11
 import arrow
12 12
 
13 13
 from sqlalchemy import (
14
-    Boolean, Column, DateTime, Enum, Float, SmallInteger, String, Text, inspect
14
+    Boolean,
15
+    Column,
16
+    DateTime,
17
+    Enum,
18
+    Float,
19
+    SmallInteger,
20
+    String,
21
+    Text,
22
+    inspect,
15 23
 )
16 24
 from sqlalchemy.orm import validates
17 25
 
@@ -26,6 +34,7 @@ class FlatUtilities(enum.Enum):
26 34
     """
27 35
     An enum of the possible utilities status for a flat entry.
28 36
     """
37
+
29 38
     included = 10
30 39
     unknown = 0
31 40
     excluded = -10
@@ -35,6 +44,7 @@ class FlatStatus(enum.Enum):
35 44
     """
36 45
     An enum of the possible status for a flat entry.
37 46
     """
47
+
38 48
     user_deleted = -100
39 49
     duplicate = -20
40 50
     ignored = -10
@@ -47,21 +57,16 @@ class FlatStatus(enum.Enum):
47 57
 
48 58
 # List of statuses that are automatically handled, and which the user cannot
49 59
 # manually set through the UI.
50
-AUTOMATED_STATUSES = [
51
-    FlatStatus.new,
52
-    FlatStatus.duplicate,
53
-    FlatStatus.ignored
54
-]
60
+AUTOMATED_STATUSES = [FlatStatus.new, FlatStatus.duplicate, FlatStatus.ignored]
55 61
 
56 62
 
57 63
 class Flat(BASE):
58 64
     """
59 65
     SQLAlchemy ORM model to store a flat.
60 66
     """
67
+
61 68
     __tablename__ = "flats"
62
-    __searchable__ = [
63
-        "title", "text", "station", "location", "details", "notes"
64
-    ]
69
+    __searchable__ = ["title", "text", "station", "location", "details", "notes"]
65 70
 
66 71
     # Weboob data
67 72
     id = Column(String, primary_key=True)
@@ -99,7 +104,7 @@ class Flat(BASE):
99 104
     # Date for visit
100 105
     visit_date = Column(DateTime)
101 106
 
102
-    @validates('utilities')
107
+    @validates("utilities")
103 108
     def validate_utilities(self, _, utilities):
104 109
         """
105 110
         Utilities validation method
@@ -124,8 +129,7 @@ class Flat(BASE):
124 129
         try:
125 130
             return getattr(FlatStatus, status)
126 131
         except (AttributeError, TypeError):
127
-            LOGGER.warn("Unkown flat status %s, ignoring it.",
128
-                        status)
132
+            LOGGER.warn("Unkown flat status %s, ignoring it.", status)
129 133
             return self.status.default.arg
130 134
 
131 135
     @validates("notation")
@@ -137,7 +141,7 @@ class Flat(BASE):
137 141
             notation = int(notation)
138 142
             assert notation >= 0 and notation <= 5
139 143
         except (ValueError, AssertionError):
140
-            raise ValueError('notation should be an integer between 0 and 5')
144
+            raise ValueError("notation should be an integer between 0 and 5")
141 145
         return notation
142 146
 
143 147
     @validates("date")
@@ -178,25 +182,22 @@ class Flat(BASE):
178 182
         # Handle flatisfy metadata
179 183
         flat_dict = flat_dict.copy()
180 184
         if "flatisfy" in flat_dict:
181
-            flat_dict["flatisfy_stations"] = (
182
-                flat_dict["flatisfy"].get("matched_stations", [])
183
-            )
184
-            flat_dict["flatisfy_postal_code"] = (
185
-                flat_dict["flatisfy"].get("postal_code", None)
185
+            flat_dict["flatisfy_stations"] = flat_dict["flatisfy"].get(
186
+                "matched_stations", []
186 187
             )
187
-            flat_dict["flatisfy_position"] = (
188
-                flat_dict["flatisfy"].get("position", None)
188
+            flat_dict["flatisfy_postal_code"] = flat_dict["flatisfy"].get(
189
+                "postal_code", None
189 190
             )
190
-            flat_dict["flatisfy_time_to"] = (
191
-                flat_dict["flatisfy"].get("time_to", {})
192
-            )
193
-            flat_dict["flatisfy_constraint"] = (
194
-                flat_dict["flatisfy"].get("constraint", "default")
191
+            flat_dict["flatisfy_position"] = flat_dict["flatisfy"].get("position", None)
192
+            flat_dict["flatisfy_time_to"] = flat_dict["flatisfy"].get("time_to", {})
193
+            flat_dict["flatisfy_constraint"] = flat_dict["flatisfy"].get(
194
+                "constraint", "default"
195 195
             )
196 196
             del flat_dict["flatisfy"]
197 197
 
198
-        flat_dict = {k: v for k, v in flat_dict.items()
199
-                     if k in inspect(Flat).columns.keys()}
198
+        flat_dict = {
199
+            k: v for k, v in flat_dict.items() if k in inspect(Flat).columns.keys()
200
+        }
200 201
         return Flat(**flat_dict)
201 202
 
202 203
     def __repr__(self):
@@ -207,11 +208,7 @@ class Flat(BASE):
207 208
         Return a dict representation of this flat object that is JSON
208 209
         serializable.
209 210
         """
210
-        flat_repr = {
211
-            k: v
212
-            for k, v in self.__dict__.items()
213
-            if not k.startswith("_")
214
-        }
211
+        flat_repr = {k: v for k, v in self.__dict__.items() if not k.startswith("_")}
215 212
         if isinstance(flat_repr["status"], FlatStatus):
216 213
             flat_repr["status"] = flat_repr["status"].name
217 214
         if isinstance(flat_repr["utilities"], FlatUtilities):

+ 3
- 8
flatisfy/models/postal_code.py View File

@@ -7,9 +7,7 @@ from __future__ import absolute_import, print_function, unicode_literals
7 7
 
8 8
 import logging
9 9
 
10
-from sqlalchemy import (
11
-    Column, Float, Integer, String, UniqueConstraint
12
-)
10
+from sqlalchemy import Column, Float, Integer, String, UniqueConstraint
13 11
 
14 12
 from flatisfy.database.base import BASE
15 13
 
@@ -21,6 +19,7 @@ class PostalCode(BASE):
21 19
     """
22 20
     SQLAlchemy ORM model to store a postal code opendata.
23 21
     """
22
+
24 23
     __tablename__ = "postal_codes"
25 24
 
26 25
     id = Column(Integer, primary_key=True)
@@ -41,8 +40,4 @@ class PostalCode(BASE):
41 40
         Return a dict representation of this postal code object that is JSON
42 41
         serializable.
43 42
         """
44
-        return {
45
-            k: v
46
-            for k, v in self.__dict__.items()
47
-            if not k.startswith("_")
48
-        }
43
+        return {k: v for k, v in self.__dict__.items() if not k.startswith("_")}

+ 2
- 3
flatisfy/models/public_transport.py View File

@@ -7,9 +7,7 @@ from __future__ import absolute_import, print_function, unicode_literals
7 7
 
8 8
 import logging
9 9
 
10
-from sqlalchemy import (
11
-    Column, Float, Integer, String
12
-)
10
+from sqlalchemy import Column, Float, Integer, String
13 11
 
14 12
 from flatisfy.database.base import BASE
15 13
 
@@ -21,6 +19,7 @@ class PublicTransport(BASE):
21 19
     """
22 20
     SQLAlchemy ORM model to store public transport opendata.
23 21
     """
22
+
24 23
     __tablename__ = "public_transports"
25 24
 
26 25
     id = Column(Integer, primary_key=True)

+ 114
- 138
flatisfy/tests.py View File

@@ -30,6 +30,7 @@ class LocalImageCache(ImageCache):
30 30
     """
31 31
     A local cache for images, stored in memory.
32 32
     """
33
+
33 34
     @staticmethod
34 35
     def on_miss(path):
35 36
         """
@@ -46,48 +47,36 @@ class TestTexts(unittest.TestCase):
46 47
     """
47 48
     Checks string normalizations.
48 49
     """
50
+
49 51
     def test_roman_numbers(self):
50 52
         """
51 53
         Checks roman numbers replacement.
52 54
         """
53
-        self.assertEqual(
54
-            "XIV",
55
-            tools.convert_arabic_to_roman("14")
56
-        )
55
+        self.assertEqual("XIV", tools.convert_arabic_to_roman("14"))
57 56
 
58
-        self.assertEqual(
59
-            "XXXIX",
60
-            tools.convert_arabic_to_roman("39")
61
-        )
57
+        self.assertEqual("XXXIX", tools.convert_arabic_to_roman("39"))
62 58
 
63
-        self.assertEqual(
64
-            "40",
65
-            tools.convert_arabic_to_roman("40")
66
-        )
59
+        self.assertEqual("40", tools.convert_arabic_to_roman("40"))
67 60
 
68
-        self.assertEqual(
69
-            "1987",
70
-            tools.convert_arabic_to_roman("1987")
71
-        )
61
+        self.assertEqual("1987", tools.convert_arabic_to_roman("1987"))
72 62
 
73 63
         self.assertEqual(
74 64
             "Dans le XVe arrondissement",
75
-            tools.convert_arabic_to_roman_in_text("Dans le 15e arrondissement")
65
+            tools.convert_arabic_to_roman_in_text("Dans le 15e arrondissement"),
76 66
         )
77 67
 
78 68
         self.assertEqual(
79
-            "XXeme arr.",
80
-            tools.convert_arabic_to_roman_in_text("20eme arr.")
69
+            "XXeme arr.", tools.convert_arabic_to_roman_in_text("20eme arr.")
81 70
         )
82 71
 
83 72
         self.assertEqual(
84 73
             "A AIX EN PROVENCE",
85
-            tools.convert_arabic_to_roman_in_text("A AIX EN PROVENCE")
74
+            tools.convert_arabic_to_roman_in_text("A AIX EN PROVENCE"),
86 75
         )
87 76
 
88 77
         self.assertEqual(
89 78
             "Montigny Le Bretonneux",
90
-            tools.convert_arabic_to_roman_in_text("Montigny Le Bretonneux")
79
+            tools.convert_arabic_to_roman_in_text("Montigny Le Bretonneux"),
91 80
         )
92 81
 
93 82
     def test_roman_numbers_in_text(self):
@@ -97,58 +86,43 @@ class TestTexts(unittest.TestCase):
97 86
         """
98 87
         self.assertEqual(
99 88
             "dans le XVe arrondissement",
100
-            tools.normalize_string("Dans le 15e arrondissement")
89
+            tools.normalize_string("Dans le 15e arrondissement"),
101 90
         )
102 91
 
103
-        self.assertEqual(
104
-            "paris XVe, 75005",
105
-            tools.normalize_string("Paris 15e, 75005")
106
-        )
92
+        self.assertEqual("paris XVe, 75005", tools.normalize_string("Paris 15e, 75005"))
107 93
 
108
-        self.assertEqual(
109
-            "paris xve, 75005",
110
-            tools.normalize_string("Paris XVe, 75005")
111
-        )
94
+        self.assertEqual("paris xve, 75005", tools.normalize_string("Paris XVe, 75005"))
112 95
 
113 96
     def test_multiple_whitespaces(self):
114 97
         """
115 98
         Checks whitespaces are collapsed.
116 99
         """
117
-        self.assertEqual(
118
-            "avec ascenseur",
119
-            tools.normalize_string("avec   ascenseur")
120
-        )
100
+        self.assertEqual("avec ascenseur", tools.normalize_string("avec   ascenseur"))
121 101
 
122 102
     def test_whitespace_trim(self):
123 103
         """
124 104
         Checks that trailing and beginning whitespaces are trimmed.
125 105
         """
126
-        self.assertEqual(
127
-            "rennes 35000",
128
-            tools.normalize_string("  Rennes 35000 ")
129
-        )
106
+        self.assertEqual("rennes 35000", tools.normalize_string("  Rennes 35000 "))
130