Merge branch 'bagage/Flatisfy-master'

This commit is contained in:
Lucas Verney 2021-03-14 18:00:43 +01:00
commit 1a95495c30
59 changed files with 873792 additions and 1808 deletions

1
.dockerignore Normal file
View File

@ -0,0 +1 @@
data

10
.editorconfig Normal file
View File

@ -0,0 +1,10 @@
root = true
[*]
indent_style = space
indent_size = 4
end_of_line = lf
insert_final_newline = true
[*.py]
max_line_length=120

View File

@ -4,6 +4,9 @@
"env": { "env": {
"browser": true "browser": true
}, },
"parserOptions": {
"ecmaVersion": 8
},
rules: { rules: {
'indent': ["error", 4, { 'SwitchCase': 1 }], 'indent': ["error", 4, { 'SwitchCase': 1 }],
} }

2
.gitignore vendored
View File

@ -9,9 +9,7 @@ flatisfy/web/static/assets
data/ data/
package-lock.json package-lock.json
doc/_build doc/_build
yarn.lock
data_rework/ data_rework/
.env .env
weboob weboob
.htpasswd .htpasswd
.vscode

6
.vscode/extensions.json vendored Normal file
View File

@ -0,0 +1,6 @@
{
"recommendations": [
"mtxr.sqltools",
"mtxr.sqltools-driver-sqlite"
]
}

15
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,15 @@
{
"cSpell.words": [
"Weboob",
"flatisfy"
],
"sqltools.useNodeRuntime": true,
"sqltools.connections": [
{
"previewLimit": 50,
"driver": "SQLite",
"name": "flatisfy",
"database": "${workspaceFolder:flatisfy}/data/flatisfy.db"
}
]
}

View File

@ -227,4 +227,10 @@ schema might change from time to time. Here is how to update it automatically:
### Other tools more or less connected with Flatisfy ### Other tools more or less connected with Flatisfy
+ [ZipAround](https://github.com/guix77/ziparound) generates a list of ZIP codes centered on a city name, within a radius of N kilometers and within a certain travel time by car (France only) + [ZipAround](https://github.com/guix77/ziparound) generates a list of ZIP codes centered on a city name, within a radius of N kilometers and within a certain travel time by car (France only). You can invoke it with:
```sh
yarn ziparound
# or alternatively
yarn ziparound --code 75001 --distance 3
```

View File

@ -18,7 +18,8 @@
import os import os
import sys import sys
sys.path.insert(0, os.path.abspath('..'))
sys.path.insert(0, os.path.abspath(".."))
# -- General configuration ------------------------------------------------ # -- General configuration ------------------------------------------------
@ -30,19 +31,19 @@ sys.path.insert(0, os.path.abspath('..'))
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones. # ones.
extensions = [ extensions = [
'sphinx.ext.autodoc', "sphinx.ext.autodoc",
'sphinx.ext.viewcode', "sphinx.ext.viewcode",
] ]
# Add any paths that contain templates here, relative to this directory. # Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates'] templates_path = ["_templates"]
# The suffix(es) of source filenames. # The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string: # You can specify multiple suffix as a list of string:
# #
source_suffix = ['.rst', '.md'] source_suffix = [".rst", ".md"]
source_parsers = { source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser', ".md": "recommonmark.parser.CommonMarkParser",
} }
# The encoding of source files. # The encoding of source files.
@ -50,21 +51,21 @@ source_parsers = {
# source_encoding = 'utf-8-sig' # source_encoding = 'utf-8-sig'
# The master toctree document. # The master toctree document.
master_doc = 'index' master_doc = "index"
# General information about the project. # General information about the project.
project = u'Flatisfy' project = u"Flatisfy"
copyright = u'2017, Phyks (Lucas Verney)' copyright = u"2017, Phyks (Lucas Verney)"
author = u'Phyks (Lucas Verney)' author = u"Phyks (Lucas Verney)"
# The version info for the project you're documenting, acts as replacement for # The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the # |version| and |release|, also used in various other places throughout the
# built documents. # built documents.
# #
# The short X.Y version. # The short X.Y version.
version = u'0.1' version = u"0.1"
# The full version, including alpha/beta/rc tags. # The full version, including alpha/beta/rc tags.
release = u'0.1' release = u"0.1"
# The language for content autogenerated by Sphinx. Refer to documentation # The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages. # for a list of supported languages.
@ -85,7 +86,7 @@ language = None
# List of patterns, relative to source directory, that match files and # List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files. # directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path # This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The reST default role (used for this markup: `text`) to use for all # The reST default role (used for this markup: `text`) to use for all
# documents. # documents.
@ -107,7 +108,7 @@ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# show_authors = False # show_authors = False
# The name of the Pygments (syntax highlighting) style to use. # The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx' pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting. # A list of ignored prefixes for module index sorting.
# modindex_common_prefix = [] # modindex_common_prefix = []
@ -124,7 +125,7 @@ todo_include_todos = False
# The theme to use for HTML and HTML Help pages. See the documentation for # The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes. # a list of builtin themes.
# #
html_theme = 'classic' html_theme = "classic"
# Theme options are theme-specific and customize the look and feel of a theme # Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the # further. For a list of options available for each theme, see the
@ -158,7 +159,7 @@ html_theme = 'classic'
# Add any paths that contain custom static files (such as style sheets) here, # Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files, # relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css". # so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static'] html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or # Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied # .htaccess) here, relative to this directory. These files are copied
@ -238,34 +239,36 @@ html_static_path = ['_static']
# html_search_scorer = 'scorer.js' # html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder. # Output file base name for HTML help builder.
htmlhelp_basename = 'Flatisfydoc' htmlhelp_basename = "Flatisfydoc"
# -- Options for LaTeX output --------------------------------------------- # -- Options for LaTeX output ---------------------------------------------
latex_elements = { latex_elements = {
# The paper size ('letterpaper' or 'a4paper'). # The paper size ('letterpaper' or 'a4paper').
# #
# 'papersize': 'letterpaper', # 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# The font size ('10pt', '11pt' or '12pt'). #
# # 'pointsize': '10pt',
# 'pointsize': '10pt', # Additional stuff for the LaTeX preamble.
#
# Additional stuff for the LaTeX preamble. # 'preamble': '',
# # Latex figure (float) alignment
# 'preamble': '', #
# 'figure_align': 'htbp',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
} }
# Grouping the document tree into LaTeX files. List of tuples # Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, # (source start file, target name, title,
# author, documentclass [howto, manual, or own class]). # author, documentclass [howto, manual, or own class]).
latex_documents = [ latex_documents = [
(master_doc, 'Flatisfy.tex', u'Flatisfy Documentation', (
u'Phyks (Lucas Verney)', 'manual'), master_doc,
"Flatisfy.tex",
u"Flatisfy Documentation",
u"Phyks (Lucas Verney)",
"manual",
),
] ]
# The name of an image file (relative to this directory) to place at the top of # The name of an image file (relative to this directory) to place at the top of
@ -305,10 +308,7 @@ latex_documents = [
# One entry per manual page. List of tuples # One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section). # (source start file, name, description, authors, manual section).
man_pages = [ man_pages = [(master_doc, "flatisfy", u"Flatisfy Documentation", [author], 1)]
(master_doc, 'flatisfy', u'Flatisfy Documentation',
[author], 1)
]
# If true, show URL addresses after external links. # If true, show URL addresses after external links.
# #
@ -321,9 +321,15 @@ man_pages = [
# (source start file, target name, title, author, # (source start file, target name, title, author,
# dir menu entry, description, category) # dir menu entry, description, category)
texinfo_documents = [ texinfo_documents = [
(master_doc, 'Flatisfy', u'Flatisfy Documentation', (
author, 'Flatisfy', 'One line description of project.', master_doc,
'Miscellaneous'), "Flatisfy",
u"Flatisfy Documentation",
author,
"Flatisfy",
"One line description of project.",
"Miscellaneous",
),
] ]
# Documents to append as an appendix to all manuals. # Documents to append as an appendix to all manuals.

View File

@ -19,7 +19,7 @@ RUN curl -sL https://deb.nodesource.com/setup_10.x | bash - \
&& apt-get install -y nodejs && apt-get install -y nodejs
# Install weboob's code itself. # Install weboob's code itself.
RUN git clone --depth 1 https://git.weboob.org/weboob/devel /home/user/weboob \ RUN git clone --depth 1 https://git.weboob.org/weboob/weboob /home/user/weboob \
&& cd /home/user/weboob \ && cd /home/user/weboob \
&& pip install . && pip install .

View File

@ -9,3 +9,4 @@ services:
- ./data:/flatisfy - ./data:/flatisfy
ports: ports:
- "8080:8080" - "8080:8080"
working_dir: /home/user/app

View File

@ -17,6 +17,7 @@ from flatisfy import data
from flatisfy import fetch from flatisfy import fetch
from flatisfy import tools from flatisfy import tools
from flatisfy import tests from flatisfy import tests
# pylint: enable=locally-disabled,wrong-import-position # pylint: enable=locally-disabled,wrong-import-position
@ -27,68 +28,47 @@ def parse_args(argv=None):
""" """
Create parser and parse arguments. Create parser and parse arguments.
""" """
parser = argparse.ArgumentParser(prog="Flatisfy", parser = argparse.ArgumentParser(prog="Flatisfy", description="Find the perfect flat.")
description="Find the perfect flat.")
# Parent parser containing arguments common to any subcommand # Parent parser containing arguments common to any subcommand
parent_parser = argparse.ArgumentParser(add_help=False) parent_parser = argparse.ArgumentParser(add_help=False)
parent_parser.add_argument("--data-dir", help="Location of Flatisfy data directory.")
parent_parser.add_argument("--config", help="Configuration file to use.")
parent_parser.add_argument( parent_parser.add_argument(
"--data-dir", "--passes",
help="Location of Flatisfy data directory." choices=[0, 1, 2, 3],
type=int,
help="Number of passes to do on the filtered data.",
) )
parent_parser.add_argument("--max-entries", type=int, help="Maximum number of entries to fetch.")
parent_parser.add_argument("-v", "--verbose", action="store_true", help="Verbose logging output.")
parent_parser.add_argument("-vv", action="store_true", help="Debug logging output.")
parent_parser.add_argument( parent_parser.add_argument(
"--config", "--constraints",
help="Configuration file to use." type=str,
) help="Comma-separated list of constraints to consider.",
parent_parser.add_argument(
"--passes", choices=[0, 1, 2, 3], type=int,
help="Number of passes to do on the filtered data."
)
parent_parser.add_argument(
"--max-entries", type=int,
help="Maximum number of entries to fetch."
)
parent_parser.add_argument(
"-v", "--verbose", action="store_true",
help="Verbose logging output."
)
parent_parser.add_argument(
"-vv", action="store_true",
help="Debug logging output."
)
parent_parser.add_argument(
"--constraints", type=str,
help="Comma-separated list of constraints to consider."
) )
# Subcommands # Subcommands
subparsers = parser.add_subparsers( subparsers = parser.add_subparsers(dest="cmd", help="Available subcommands")
dest="cmd", help="Available subcommands"
)
# Build data subcommand # Build data subcommand
subparsers.add_parser( subparsers.add_parser("build-data", parents=[parent_parser], help="Build necessary data")
"build-data", parents=[parent_parser],
help="Build necessary data"
)
# Init config subcommand # Init config subcommand
parser_init_config = subparsers.add_parser( parser_init_config = subparsers.add_parser(
"init-config", parents=[parent_parser], "init-config", parents=[parent_parser], help="Initialize empty configuration."
help="Initialize empty configuration."
)
parser_init_config.add_argument(
"output", nargs="?", help="Output config file. Use '-' for stdout."
) )
parser_init_config.add_argument("output", nargs="?", help="Output config file. Use '-' for stdout.")
# Fetch subcommand parser # Fetch subcommand parser
subparsers.add_parser("fetch", parents=[parent_parser], subparsers.add_parser("fetch", parents=[parent_parser], help="Fetch housings posts")
help="Fetch housings posts")
# Filter subcommand parser # Filter subcommand parser
parser_filter = subparsers.add_parser( parser_filter = subparsers.add_parser(
"filter", parents=[parent_parser], "filter",
help="Filter housings posts according to constraints in config." parents=[parent_parser],
help="Filter housings posts according to constraints in config.",
) )
parser_filter.add_argument( parser_filter.add_argument(
"--input", "--input",
@ -97,28 +77,29 @@ def parse_args(argv=None):
"no additional fetching of infos is done, and the script outputs " "no additional fetching of infos is done, and the script outputs "
"a filtered JSON dump on stdout. If not provided, update status " "a filtered JSON dump on stdout. If not provided, update status "
"of the flats in the database." "of the flats in the database."
) ),
) )
# Import subcommand parser # Import subcommand parser
subparsers.add_parser("import", parents=[parent_parser], import_filter = subparsers.add_parser("import", parents=[parent_parser], help="Import housing posts in database.")
help="Import housing posts in database.") import_filter.add_argument(
"--new-only",
action="store_true",
help=("Download new housing posts only but do not refresh existing ones"),
)
# Purge subcommand parser # Purge subcommand parser
subparsers.add_parser("purge", parents=[parent_parser], subparsers.add_parser("purge", parents=[parent_parser], help="Purge database.")
help="Purge database.")
# Serve subcommand parser # Serve subcommand parser
parser_serve = subparsers.add_parser("serve", parents=[parent_parser], parser_serve = subparsers.add_parser("serve", parents=[parent_parser], help="Serve the web app.")
help="Serve the web app.")
parser_serve.add_argument("--port", type=int, help="Port to bind to.") parser_serve.add_argument("--port", type=int, help="Port to bind to.")
parser_serve.add_argument("--host", help="Host to listen on.") parser_serve.add_argument("--host", help="Host to listen on.")
# Test subcommand parser # Test subcommand parser
subparsers.add_parser("test", parents=[parent_parser], subparsers.add_parser("test", parents=[parent_parser], help="Unit testing.")
help="Unit testing.")
return parser.parse_args(argv) return parser, parser.parse_args(argv)
def main(): def main():
@ -127,25 +108,30 @@ def main():
""" """
# pylint: disable=locally-disabled,too-many-branches # pylint: disable=locally-disabled,too-many-branches
# Parse arguments # Parse arguments
args = parse_args() parser, args = parse_args()
# Set logger # Set logger
if args.vv: if getattr(args, 'vv', False):
logging.getLogger('').setLevel(logging.DEBUG) logging.getLogger("").setLevel(logging.DEBUG)
logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG) logging.getLogger("titlecase").setLevel(logging.INFO)
elif args.verbose: logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
logging.getLogger('').setLevel(logging.INFO) elif getattr(args, 'verbose', False):
logging.getLogger("").setLevel(logging.INFO)
# sqlalchemy INFO level is way too loud, just stick with WARNING # sqlalchemy INFO level is way too loud, just stick with WARNING
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING) logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
else: else:
logging.getLogger('').setLevel(logging.WARNING) logging.getLogger("").setLevel(logging.WARNING)
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING) logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
# Init-config command # Init-config command
if args.cmd == "init-config": if args.cmd == "init-config":
flatisfy.config.init_config(args.output) flatisfy.config.init_config(args.output)
sys.exit(0) sys.exit(0)
else: else:
if not args.cmd:
parser.print_help()
sys.exit(0)
# Load config # Load config
if args.cmd == "build-data": if args.cmd == "build-data":
# Data not yet built, do not use it in config checks # Data not yet built, do not use it in config checks
@ -153,9 +139,11 @@ def main():
else: else:
config = flatisfy.config.load_config(args, check_with_data=True) config = flatisfy.config.load_config(args, check_with_data=True)
if config is None: if config is None:
LOGGER.error("Invalid configuration. Exiting. " LOGGER.error(
"Run init-config before if this is the first time " "Invalid configuration. Exiting. "
"you run Flatisfy.") "Run init-config before if this is the first time "
"you run Flatisfy."
)
sys.exit(1) sys.exit(1)
# Purge command # Purge command
@ -171,18 +159,11 @@ def main():
if args.cmd == "fetch": if args.cmd == "fetch":
# Fetch and filter flats list # Fetch and filter flats list
fetched_flats = fetch.fetch_flats(config) fetched_flats = fetch.fetch_flats(config)
fetched_flats = cmds.filter_fetched_flats(config, fetched_flats = cmds.filter_fetched_flats(config, fetched_flats=fetched_flats, fetch_details=True)
fetched_flats=fetched_flats,
fetch_details=True)
# Sort by cost # Sort by cost
fetched_flats = { fetched_flats = {k: tools.sort_list_of_dicts_by(v["new"], "cost") for k, v in fetched_flats.items()}
k: tools.sort_list_of_dicts_by(v["new"], "cost")
for k, v in fetched_flats.items()
}
print( print(tools.pretty_json(fetched_flats))
tools.pretty_json(fetched_flats)
)
return return
# Filter command # Filter command
elif args.cmd == "filter": elif args.cmd == "filter":
@ -190,28 +171,19 @@ def main():
if args.input: if args.input:
fetched_flats = fetch.load_flats_from_file(args.input, config) fetched_flats = fetch.load_flats_from_file(args.input, config)
fetched_flats = cmds.filter_fetched_flats( fetched_flats = cmds.filter_fetched_flats(config, fetched_flats=fetched_flats, fetch_details=False)
config,
fetched_flats=fetched_flats,
fetch_details=False
)
# Sort by cost # Sort by cost
fetched_flats = { fetched_flats = {k: tools.sort_list_of_dicts_by(v["new"], "cost") for k, v in fetched_flats.items()}
k: tools.sort_list_of_dicts_by(v["new"], "cost")
for k, v in fetched_flats.items()
}
# Output to stdout # Output to stdout
print( print(tools.pretty_json(fetched_flats))
tools.pretty_json(fetched_flats)
)
else: else:
cmds.import_and_filter(config, load_from_db=True) cmds.import_and_filter(config, load_from_db=True)
return return
# Import command # Import command
elif args.cmd == "import": elif args.cmd == "import":
cmds.import_and_filter(config, load_from_db=False) cmds.import_and_filter(config, load_from_db=False, new_only=args.new_only)
return return
# Serve command # Serve command
elif args.cmd == "serve": elif args.cmd == "serve":

View File

@ -18,21 +18,23 @@ from flatisfy import fetch
from flatisfy import tools from flatisfy import tools
from flatisfy.filters import metadata from flatisfy.filters import metadata
from flatisfy.web import app as web_app from flatisfy.web import app as web_app
import time
from ratelimit.exception import RateLimitException
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
def filter_flats_list(config, constraint_name, flats_list, fetch_details=True): def filter_flats_list(config, constraint_name, flats_list, fetch_details=True, past_flats=None):
""" """
Filter the available flats list. Then, filter it according to criteria. Filter the available flats list. Then, filter it according to criteria.
:param config: A config dict. :param config: A config dict.
:param constraint_name: The constraint name that the ``flats_list`` should :param constraint_name: The constraint name that the ``flats_list`` should
satisfy. satisfy.
:param flats_list: The initial list of flat objects to filter.
:param fetch_details: Whether additional details should be fetched between :param fetch_details: Whether additional details should be fetched between
the two passes. the two passes.
:param flats_list: The initial list of flat objects to filter. :param past_flats: The list of already fetched flats
:return: A dict mapping flat status and list of flat objects. :return: A dict mapping flat status and list of flat objects.
""" """
# Add the flatisfy metadata entry and prepare the flat objects # Add the flatisfy metadata entry and prepare the flat objects
@ -44,13 +46,9 @@ def filter_flats_list(config, constraint_name, flats_list, fetch_details=True):
except KeyError: except KeyError:
LOGGER.error( LOGGER.error(
"Missing constraint %s. Skipping filtering for these posts.", "Missing constraint %s. Skipping filtering for these posts.",
constraint_name constraint_name,
) )
return { return {"new": [], "duplicate": [], "ignored": []}
"new": [],
"duplicate": [],
"ignored": []
}
first_pass_result = collections.defaultdict(list) first_pass_result = collections.defaultdict(list)
second_pass_result = collections.defaultdict(list) second_pass_result = collections.defaultdict(list)
@ -58,52 +56,55 @@ def filter_flats_list(config, constraint_name, flats_list, fetch_details=True):
# Do a first pass with the available infos to try to remove as much # Do a first pass with the available infos to try to remove as much
# unwanted postings as possible # unwanted postings as possible
if config["passes"] > 0: if config["passes"] > 0:
first_pass_result = flatisfy.filters.first_pass(flats_list, first_pass_result = flatisfy.filters.first_pass(flats_list, constraint, config)
constraint,
config)
else: else:
first_pass_result["new"] = flats_list first_pass_result["new"] = flats_list
# Load additional infos # Load additional infos
if fetch_details: if fetch_details:
past_ids = {x["id"]: x for x in past_flats} if past_flats else {}
for i, flat in enumerate(first_pass_result["new"]): for i, flat in enumerate(first_pass_result["new"]):
details = fetch.fetch_details(config, flat["id"]) details = None
use_cache = past_ids.get(flat["id"])
if use_cache:
LOGGER.debug("Skipping details download for %s.", flat["id"])
details = use_cache
else:
if flat["id"].split("@")[1] in ["seloger", "leboncoin"]:
try:
details = fetch.fetch_details_rate_limited(config, flat["id"])
except RateLimitException:
time.sleep(60)
details = fetch.fetch_details_rate_limited(config, flat["id"])
else:
details = fetch.fetch_details(config, flat["id"])
first_pass_result["new"][i] = tools.merge_dicts(flat, details) first_pass_result["new"][i] = tools.merge_dicts(flat, details)
# Do a second pass to consolidate all the infos we found and make use of # Do a second pass to consolidate all the infos we found and make use of
# additional infos # additional infos
if config["passes"] > 1: if config["passes"] > 1:
second_pass_result = flatisfy.filters.second_pass( second_pass_result = flatisfy.filters.second_pass(first_pass_result["new"], constraint, config)
first_pass_result["new"], constraint, config
)
else: else:
second_pass_result["new"] = first_pass_result["new"] second_pass_result["new"] = first_pass_result["new"]
# Do a third pass to deduplicate better # Do a third pass to deduplicate better
if config["passes"] > 2: if config["passes"] > 2:
third_pass_result = flatisfy.filters.third_pass( third_pass_result = flatisfy.filters.third_pass(second_pass_result["new"], config)
second_pass_result["new"],
config
)
else: else:
third_pass_result["new"] = second_pass_result["new"] third_pass_result["new"] = second_pass_result["new"]
return { return {
"new": third_pass_result["new"], "new": third_pass_result["new"],
"duplicate": ( "duplicate": (
first_pass_result["duplicate"] + first_pass_result["duplicate"] + second_pass_result["duplicate"] + third_pass_result["duplicate"]
second_pass_result["duplicate"] +
third_pass_result["duplicate"]
), ),
"ignored": ( "ignored": (first_pass_result["ignored"] + second_pass_result["ignored"] + third_pass_result["ignored"]),
first_pass_result["ignored"] +
second_pass_result["ignored"] +
third_pass_result["ignored"]
)
} }
def filter_fetched_flats(config, fetched_flats, fetch_details=True): def filter_fetched_flats(config, fetched_flats, fetch_details=True, past_flats={}):
""" """
Filter the available flats list. Then, filter it according to criteria. Filter the available flats list. Then, filter it according to criteria.
@ -120,12 +121,13 @@ def filter_fetched_flats(config, fetched_flats, fetch_details=True):
config, config,
constraint_name, constraint_name,
flats_list, flats_list,
fetch_details fetch_details,
past_flats.get(constraint_name, None),
) )
return fetched_flats return fetched_flats
def import_and_filter(config, load_from_db=False): def import_and_filter(config, load_from_db=False, new_only=False):
""" """
Fetch the available flats list. Then, filter it according to criteria. Fetch the available flats list. Then, filter it according to criteria.
Finally, store it in the database. Finally, store it in the database.
@ -136,17 +138,23 @@ def import_and_filter(config, load_from_db=False):
:return: ``None``. :return: ``None``.
""" """
# Fetch and filter flats list # Fetch and filter flats list
past_flats = fetch.load_flats_from_db(config)
if load_from_db: if load_from_db:
fetched_flats = fetch.load_flats_from_db(config) fetched_flats = past_flats
else: else:
fetched_flats = fetch.fetch_flats(config) fetched_flats = fetch.fetch_flats(config)
# Do not fetch additional details if we loaded data from the db. # Do not fetch additional details if we loaded data from the db.
flats_by_status = filter_fetched_flats(config, fetched_flats=fetched_flats, flats_by_status = filter_fetched_flats(
fetch_details=(not load_from_db)) config,
fetched_flats=fetched_flats,
fetch_details=(not load_from_db),
past_flats=past_flats if new_only else {},
)
# Create database connection # Create database connection
get_session = database.init_db(config["database"], config["search_index"]) get_session = database.init_db(config["database"], config["search_index"])
new_flats = [] new_flats = []
result = []
LOGGER.info("Merging fetched flats in database...") LOGGER.info("Merging fetched flats in database...")
# Flatten the flats_by_status dict # Flatten the flats_by_status dict
@ -159,14 +167,11 @@ def import_and_filter(config, load_from_db=False):
# Set is_expired to true for all existing flats. # Set is_expired to true for all existing flats.
# This will be set back to false if we find them during importing. # This will be set back to false if we find them during importing.
for flat in session.query(flat_model.Flat).all(): for flat in session.query(flat_model.Flat).all():
flat.is_expired = True; flat.is_expired = True
for status, flats_list in flatten_flats_by_status.items(): for status, flats_list in flatten_flats_by_status.items():
# Build SQLAlchemy Flat model objects for every available flat # Build SQLAlchemy Flat model objects for every available flat
flats_objects = { flats_objects = {flat_dict["id"]: flat_model.Flat.from_dict(flat_dict) for flat_dict in flats_list}
flat_dict["id"]: flat_model.Flat.from_dict(flat_dict)
for flat_dict in flats_list
}
if flats_objects: if flats_objects:
# If there are some flats, try to merge them with the ones in # If there are some flats, try to merge them with the ones in
@ -179,9 +184,7 @@ def import_and_filter(config, load_from_db=False):
# status if the user defined it # status if the user defined it
flat_object = flats_objects[each.id] flat_object = flats_objects[each.id]
if each.status in flat_model.AUTOMATED_STATUSES: if each.status in flat_model.AUTOMATED_STATUSES:
flat_object.status = getattr( flat_object.status = getattr(flat_model.FlatStatus, status)
flat_model.FlatStatus, status
)
else: else:
flat_object.status = each.status flat_object.status = each.status
@ -198,21 +201,22 @@ def import_and_filter(config, load_from_db=False):
flat.status = getattr(flat_model.FlatStatus, status) flat.status = getattr(flat_model.FlatStatus, status)
if flat.status == flat_model.FlatStatus.new: if flat.status == flat_model.FlatStatus.new:
new_flats.append(flat) new_flats.append(flat)
result.append(flat.id)
session.add_all(flats_objects.values()) session.add_all(flats_objects.values())
if config["send_email"]: if config["send_email"]:
email.send_notification(config, new_flats) email.send_notification(config, new_flats)
LOGGER.info(f"Found {len(result)} new flats.")
# Touch a file to indicate last update timestamp # Touch a file to indicate last update timestamp
ts_file = os.path.join( ts_file = os.path.join(config["data_directory"], "timestamp")
config["data_directory"], with open(ts_file, "w"):
"timestamp"
)
with open(ts_file, 'w'):
os.utime(ts_file, None) os.utime(ts_file, None)
LOGGER.info("Done!") LOGGER.info("Done!")
return result
def purge_db(config): def purge_db(config):
@ -253,4 +257,4 @@ def serve(config):
server = web_app.QuietWSGIRefServer server = web_app.QuietWSGIRefServer
print("Launching web viewer running on http://%s:%s" % (config["host"], config["port"])) print("Launching web viewer running on http://%s:%s" % (config["host"], config["port"]))
app.run(host=config["host"], port=config["port"], server=server) app.run(host=config["host"], port=config["port"], server=server)

View File

@ -30,24 +30,25 @@ DEFAULT_CONFIG = {
"default": { "default": {
"type": None, # RENT, SALE, SHARING "type": None, # RENT, SALE, SHARING
"house_types": [], # List of house types, must be in APART, HOUSE, "house_types": [], # List of house types, must be in APART, HOUSE,
# PARKING, LAND, OTHER or UNKNOWN # PARKING, LAND, OTHER or UNKNOWN
"postal_codes": [], # List of postal codes "postal_codes": [], # List of postal codes
"insees": [], # List of postal codes
"area": (None, None), # (min, max) in m^2 "area": (None, None), # (min, max) in m^2
"cost": (None, None), # (min, max) in currency unit "cost": (None, None), # (min, max) in currency unit
"rooms": (None, None), # (min, max) "rooms": (None, None), # (min, max)
"bedrooms": (None, None), # (min, max) "bedrooms": (None, None), # (min, max)
"minimum_nb_photos": None, # min number of photos "minimum_nb_photos": None, # min number of photos
"description_should_contain": [], # list of terms "description_should_contain": [], # list of terms
"description_should_not_contain": [ # list of terms "description_should_not_contain": [
"vendu", "vendu",
"Vendu", "Vendu",
"VENDU", "VENDU",
"recherche" "recherche",
], ],
"time_to": {} # Dict mapping names to {"gps": [lat, lng], "time_to": {} # Dict mapping names to {"gps": [lat, lng],
# "time": (min, max), # "time": (min, max),
# "mode": Valid mode } # "mode": Valid mode }
# Time is in seconds # Time is in seconds
} }
}, },
# Whether or not to store personal data from housing posts (phone number # Whether or not to store personal data from housing posts (phone number
@ -91,15 +92,17 @@ DEFAULT_CONFIG = {
"backends": None, "backends": None,
# Should email notifications be sent? # Should email notifications be sent?
"send_email": False, "send_email": False,
"smtp_server": 'localhost', "smtp_server": "localhost",
"smtp_port": 25, "smtp_port": 25,
"smtp_username": None, "smtp_username": None,
"smtp_password": None, "smtp_password": None,
"smtp_from": "noreply@flatisfy.org", "smtp_from": "noreply@flatisfy.org",
"smtp_to": [], "smtp_to": [],
"notification_lang": "en",
# The web site url, to be used in email notifications. (doesn't matter # The web site url, to be used in email notifications. (doesn't matter
# whether the trailing slash is present or not) # whether the trailing slash is present or not)
"website_url": "http://127.0.0.1:8080" "website_url": "http://127.0.0.1:8080",
"ignore_station": False,
} }
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
@ -114,20 +117,14 @@ def validate_config(config, check_with_data):
check the config values. check the config values.
:return: ``True`` if the configuration is valid, ``False`` otherwise. :return: ``True`` if the configuration is valid, ``False`` otherwise.
""" """
def _check_constraints_bounds(bounds): def _check_constraints_bounds(bounds):
""" """
Check the bounds for numeric constraints. Check the bounds for numeric constraints.
""" """
assert isinstance(bounds, list) assert isinstance(bounds, list)
assert len(bounds) == 2 assert len(bounds) == 2
assert all( assert all(x is None or (isinstance(x, (float, int)) and x >= 0) for x in bounds)
x is None or
(
isinstance(x, (float, int)) and
x >= 0
)
for x in bounds
)
if bounds[0] is not None and bounds[1] is not None: if bounds[0] is not None and bounds[1] is not None:
assert bounds[1] > bounds[0] assert bounds[1] > bounds[0]
@ -139,7 +136,9 @@ def validate_config(config, check_with_data):
# pylint: disable=locally-disabled,line-too-long # pylint: disable=locally-disabled,line-too-long
assert config["passes"] in [0, 1, 2, 3] assert config["passes"] in [0, 1, 2, 3]
assert config["max_entries"] is None or (isinstance(config["max_entries"], int) and config["max_entries"] > 0) # noqa: E501 assert config["max_entries"] is None or (
isinstance(config["max_entries"], int) and config["max_entries"] > 0
) # noqa: E501
assert config["data_directory"] is None or isinstance(config["data_directory"], str) # noqa: E501 assert config["data_directory"] is None or isinstance(config["data_directory"], str) # noqa: E501
assert os.path.isdir(config["data_directory"]) assert os.path.isdir(config["data_directory"])
@ -159,6 +158,7 @@ def validate_config(config, check_with_data):
assert config["smtp_username"] is None or isinstance(config["smtp_username"], str) # noqa: E501 assert config["smtp_username"] is None or isinstance(config["smtp_username"], str) # noqa: E501
assert config["smtp_password"] is None or isinstance(config["smtp_password"], str) # noqa: E501 assert config["smtp_password"] is None or isinstance(config["smtp_password"], str) # noqa: E501
assert config["smtp_to"] is None or isinstance(config["smtp_to"], list) assert config["smtp_to"] is None or isinstance(config["smtp_to"], list)
assert config["notification_lang"] is None or isinstance(config["notification_lang"], str)
assert isinstance(config["store_personal_data"], bool) assert isinstance(config["store_personal_data"], bool)
assert isinstance(config["max_distance_housing_station"], (int, float)) assert isinstance(config["max_distance_housing_station"], (int, float))
@ -169,6 +169,8 @@ def validate_config(config, check_with_data):
assert config["navitia_api_key"] is None or isinstance(config["navitia_api_key"], str) # noqa: E501 assert config["navitia_api_key"] is None or isinstance(config["navitia_api_key"], str) # noqa: E501
assert config["mapbox_api_key"] is None or isinstance(config["mapbox_api_key"], str) # noqa: E501 assert config["mapbox_api_key"] is None or isinstance(config["mapbox_api_key"], str) # noqa: E501
assert config["ignore_station"] is None or isinstance(config["ignore_station"], bool) # noqa: E501
# Ensure constraints are ok # Ensure constraints are ok
assert config["constraints"] assert config["constraints"]
for constraint in config["constraints"].values(): for constraint in config["constraints"].values():
@ -188,8 +190,7 @@ def validate_config(config, check_with_data):
assert isinstance(term, str) assert isinstance(term, str)
assert "description_should_not_contain" in constraint assert "description_should_not_contain" in constraint
assert isinstance(constraint["description_should_not_contain"], assert isinstance(constraint["description_should_not_contain"], list)
list)
if constraint["description_should_not_contain"]: if constraint["description_should_not_contain"]:
for term in constraint["description_should_not_contain"]: for term in constraint["description_should_not_contain"]:
assert isinstance(term, str) assert isinstance(term, str)
@ -202,16 +203,22 @@ def validate_config(config, check_with_data):
assert "postal_codes" in constraint assert "postal_codes" in constraint
assert constraint["postal_codes"] assert constraint["postal_codes"]
assert all(isinstance(x, str) for x in constraint["postal_codes"]) assert all(isinstance(x, str) for x in constraint["postal_codes"])
if "insee_codes" in constraint:
assert constraint["insee_codes"]
assert all(isinstance(x, str) for x in constraint["insee_codes"])
if check_with_data: if check_with_data:
# Ensure data is built into db # Ensure data is built into db
data.preprocess_data(config, force=False) data.preprocess_data(config, force=False)
# Check postal codes # Check postal codes
opendata_postal_codes = [ opendata = data.load_data(PostalCode, constraint, config)
x.postal_code opendata_postal_codes = [x.postal_code for x in opendata]
for x in data.load_data(PostalCode, constraint, config) opendata_insee_codes = [x.insee_code for x in opendata]
]
for postal_code in constraint["postal_codes"]: for postal_code in constraint["postal_codes"]:
assert postal_code in opendata_postal_codes # noqa: E501 assert postal_code in opendata_postal_codes # noqa: E501
if "insee_codes" in constraint:
for insee in constraint["insee_codes"]:
assert insee in opendata_insee_codes # noqa: E501
assert "area" in constraint assert "area" in constraint
_check_constraints_bounds(constraint["area"]) _check_constraints_bounds(constraint["area"])
@ -264,22 +271,18 @@ def load_config(args=None, check_with_data=True):
config_data.update(json.load(fh)) config_data.update(json.load(fh))
except (IOError, ValueError) as exc: except (IOError, ValueError) as exc:
LOGGER.error( LOGGER.error(
"Unable to load configuration from file, " "Unable to load configuration from file, using default configuration: %s.",
"using default configuration: %s.", exc,
exc
) )
# Overload config with arguments # Overload config with arguments
if args and getattr(args, "passes", None) is not None: if args and getattr(args, "passes", None) is not None:
LOGGER.debug( LOGGER.debug("Overloading number of passes from CLI arguments: %d.", args.passes)
"Overloading number of passes from CLI arguments: %d.",
args.passes
)
config_data["passes"] = args.passes config_data["passes"] = args.passes
if args and getattr(args, "max_entries", None) is not None: if args and getattr(args, "max_entries", None) is not None:
LOGGER.debug( LOGGER.debug(
"Overloading maximum number of entries from CLI arguments: %d.", "Overloading maximum number of entries from CLI arguments: %d.",
args.max_entries args.max_entries,
) )
config_data["max_entries"] = args.max_entries config_data["max_entries"] = args.max_entries
if args and getattr(args, "port", None) is not None: if args and getattr(args, "port", None) is not None:
@ -294,49 +297,39 @@ def load_config(args=None, check_with_data=True):
LOGGER.debug("Overloading data directory from CLI arguments.") LOGGER.debug("Overloading data directory from CLI arguments.")
config_data["data_directory"] = args.data_dir config_data["data_directory"] = args.data_dir
elif config_data["data_directory"] is None: elif config_data["data_directory"] is None:
config_data["data_directory"] = appdirs.user_data_dir( config_data["data_directory"] = appdirs.user_data_dir("flatisfy", "flatisfy")
"flatisfy", LOGGER.debug("Using default XDG data directory: %s.", config_data["data_directory"])
"flatisfy"
)
LOGGER.debug("Using default XDG data directory: %s.",
config_data["data_directory"])
if not os.path.isdir(config_data["data_directory"]): if not os.path.isdir(config_data["data_directory"]):
LOGGER.info("Creating data directory according to config: %s", LOGGER.info(
config_data["data_directory"]) "Creating data directory according to config: %s",
config_data["data_directory"],
)
os.makedirs(config_data["data_directory"]) os.makedirs(config_data["data_directory"])
os.makedirs(os.path.join(config_data["data_directory"], "images")) os.makedirs(os.path.join(config_data["data_directory"], "images"))
if config_data["database"] is None: if config_data["database"] is None:
config_data["database"] = "sqlite:///" + os.path.join( config_data["database"] = "sqlite:///" + os.path.join(config_data["data_directory"], "flatisfy.db")
config_data["data_directory"],
"flatisfy.db"
)
if config_data["search_index"] is None: if config_data["search_index"] is None:
config_data["search_index"] = os.path.join( config_data["search_index"] = os.path.join(config_data["data_directory"], "search_index")
config_data["data_directory"],
"search_index"
)
# Handle constraints filtering # Handle constraints filtering
if args and getattr(args, "constraints", None) is not None: if args and getattr(args, "constraints", None) is not None:
LOGGER.info( LOGGER.info(
("Filtering constraints from config according to CLI argument. " (
"Using only the following constraints: %s."), "Filtering constraints from config according to CLI argument. "
args.constraints.replace(",", ", ") "Using only the following constraints: %s."
),
args.constraints.replace(",", ", "),
) )
constraints_filter = args.constraints.split(",") constraints_filter = args.constraints.split(",")
config_data["constraints"] = { config_data["constraints"] = {k: v for k, v in config_data["constraints"].items() if k in constraints_filter}
k: v
for k, v in config_data["constraints"].items()
if k in constraints_filter
}
# Sanitize website url # Sanitize website url
if config_data["website_url"] is not None: if config_data["website_url"] is not None:
if config_data["website_url"][-1] != '/': if config_data["website_url"][-1] != "/":
config_data["website_url"] += '/' config_data["website_url"] += "/"
config_validation = validate_config(config_data, check_with_data) config_validation = validate_config(config_data, check_with_data)
if config_validation is True: if config_validation is True:

View File

@ -16,7 +16,7 @@ BACKENDS_BY_PRECEDENCE = [
"pap", "pap",
"leboncoin", "leboncoin",
"explorimmo", "explorimmo",
"logicimmo" "logicimmo",
] ]

View File

@ -24,11 +24,13 @@ except ImportError:
try: try:
from functools32 import lru_cache from functools32 import lru_cache
except ImportError: except ImportError:
def lru_cache(maxsize=None): # pylint: disable=unused-argument def lru_cache(maxsize=None): # pylint: disable=unused-argument
""" """
Identity implementation of ``lru_cache`` for fallback. Identity implementation of ``lru_cache`` for fallback.
""" """
return lambda func: func return lambda func: func
LOGGER.warning( LOGGER.warning(
"`functools.lru_cache` is not available on your system. Consider " "`functools.lru_cache` is not available on your system. Consider "
"installing `functools32` Python module if using Python2 for " "installing `functools32` Python module if using Python2 for "
@ -48,10 +50,7 @@ def preprocess_data(config, force=False):
# Check if a build is required # Check if a build is required
get_session = database.init_db(config["database"], config["search_index"]) get_session = database.init_db(config["database"], config["search_index"])
with get_session() as session: with get_session() as session:
is_built = ( is_built = session.query(PublicTransport).count() > 0 and session.query(PostalCode).count() > 0
session.query(PublicTransport).count() > 0 and
session.query(PostalCode).count() > 0
)
if is_built and not force: if is_built and not force:
# No need to rebuild the database, skip # No need to rebuild the database, skip
return False return False
@ -64,9 +63,7 @@ def preprocess_data(config, force=False):
for preprocess in data_files.PREPROCESSING_FUNCTIONS: for preprocess in data_files.PREPROCESSING_FUNCTIONS:
data_objects = preprocess() data_objects = preprocess()
if not data_objects: if not data_objects:
raise flatisfy.exceptions.DataBuildError( raise flatisfy.exceptions.DataBuildError("Error with %s." % preprocess.__name__)
"Error with %s." % preprocess.__name__
)
with get_session() as session: with get_session() as session:
session.add_all(data_objects) session.add_all(data_objects)
LOGGER.info("Done building data!") LOGGER.info("Done building data!")
@ -96,10 +93,7 @@ def load_data(model, constraint, config):
# Load data for each area # Load data for each area
areas = list(set(areas)) areas = list(set(areas))
for area in areas: for area in areas:
results.extend( results.extend(session.query(model).filter(model.area == area).all())
session.query(model)
.filter(model.area == area).all()
)
# Expunge loaded data from the session to be able to use them # Expunge loaded data from the session to be able to use them
# afterwards # afterwards
session.expunge_all() session.expunge_all()

View File

@ -24,8 +24,8 @@ MODULE_DIR = os.path.dirname(os.path.realpath(__file__))
titlecase.set_small_word_list( titlecase.set_small_word_list(
# Add French small words # Add French small words
r"l|d|un|une|et|à|a|sur|ou|le|la|de|lès|les|" + r"l|d|un|une|et|à|a|sur|ou|le|la|de|lès|les|"
titlecase.SMALL + titlecase.SMALL
) )
TRANSPORT_DATA_FILES = { TRANSPORT_DATA_FILES = {
@ -33,7 +33,7 @@ TRANSPORT_DATA_FILES = {
"FR-NW": "stops_fr-nw.txt", "FR-NW": "stops_fr-nw.txt",
"FR-NE": "stops_fr-ne.txt", "FR-NE": "stops_fr-ne.txt",
"FR-SW": "stops_fr-sw.txt", "FR-SW": "stops_fr-sw.txt",
"FR-SE": "stops_fr-se.txt" "FR-SE": "stops_fr-se.txt",
} }
@ -51,8 +51,20 @@ def french_postal_codes_to_quarter(postal_code):
# French departements # French departements
# Taken from Wikipedia data. # Taken from Wikipedia data.
department_to_subdivision = { department_to_subdivision = {
"FR-ARA": ["01", "03", "07", "15", "26", "38", "42", "43", "63", "69", "FR-ARA": [
"73", "74"], "01",
"03",
"07",
"15",
"26",
"38",
"42",
"43",
"63",
"69",
"73",
"74",
],
"FR-BFC": ["21", "25", "39", "58", "70", "71", "89", "90"], "FR-BFC": ["21", "25", "39", "58", "70", "71", "89", "90"],
"FR-BRE": ["22", "29", "35", "44", "56"], "FR-BRE": ["22", "29", "35", "44", "56"],
"FR-CVL": ["18", "28", "36", "37", "41", "45"], "FR-CVL": ["18", "28", "36", "37", "41", "45"],
@ -61,36 +73,53 @@ def french_postal_codes_to_quarter(postal_code):
"FR-HDF": ["02", "59", "60", "62", "80"], "FR-HDF": ["02", "59", "60", "62", "80"],
"FR-IDF": ["75", "77", "78", "91", "92", "93", "94", "95"], "FR-IDF": ["75", "77", "78", "91", "92", "93", "94", "95"],
"FR-NOR": ["14", "27", "50", "61", "76"], "FR-NOR": ["14", "27", "50", "61", "76"],
"FR-NAQ": ["16", "17", "19", "23", "24", "33", "40", "47", "64", "79", "FR-NAQ": [
"86", "87"], "16",
"FR-OCC": ["09", "11", "12", "30", "31", "32", "34", "46", "48", "65", "17",
"66", "81", "82"], "19",
"23",
"24",
"33",
"40",
"47",
"64",
"79",
"86",
"87",
],
"FR-OCC": [
"09",
"11",
"12",
"30",
"31",
"32",
"34",
"46",
"48",
"65",
"66",
"81",
"82",
],
"FR-PDL": ["44", "49", "53", "72", "85"], "FR-PDL": ["44", "49", "53", "72", "85"],
"FR-PAC": ["04", "05", "06", "13", "83", "84"] "FR-PAC": ["04", "05", "06", "13", "83", "84"],
} }
subdivision_to_quarters = { subdivision_to_quarters = {
'FR-IDF': ['FR-IDF'], "FR-IDF": ["FR-IDF"],
'FR-NW': ['FR-BRE', 'FR-CVL', 'FR-NOR', 'FR-PDL'], "FR-NW": ["FR-BRE", "FR-CVL", "FR-NOR", "FR-PDL"],
'FR-NE': ['FR-BFC', 'FR-GES', 'FR-HDF'], "FR-NE": ["FR-BFC", "FR-GES", "FR-HDF"],
'FR-SE': ['FR-ARA', 'FR-COR', 'FR-PAC', 'FR-OCC'], "FR-SE": ["FR-ARA", "FR-COR", "FR-PAC", "FR-OCC"],
'FR-SW': ['FR-NAQ'] "FR-SW": ["FR-NAQ"],
} }
subdivision = next( subdivision = next(
( (i for i, departments in department_to_subdivision.items() if departement in departments),
i None,
for i, departments in department_to_subdivision.items()
if departement in departments
),
None
) )
return next( return next(
( (i for i, subdivisions in subdivision_to_quarters.items() if subdivision in subdivisions),
i None,
for i, subdivisions in subdivision_to_quarters.items()
if subdivision in subdivisions
),
None
) )
@ -106,9 +135,7 @@ def _preprocess_laposte():
raw_laposte_data = [] raw_laposte_data = []
# Load opendata file # Load opendata file
try: try:
with io.open( with io.open(os.path.join(MODULE_DIR, data_file), "r", encoding="utf-8") as fh:
os.path.join(MODULE_DIR, data_file), "r", encoding='utf-8'
) as fh:
raw_laposte_data = json.load(fh) raw_laposte_data = json.load(fh)
except (IOError, ValueError): except (IOError, ValueError):
LOGGER.error("Invalid raw LaPoste opendata file.") LOGGER.error("Invalid raw LaPoste opendata file.")
@ -124,31 +151,30 @@ def _preprocess_laposte():
try: try:
area = french_postal_codes_to_quarter(fields["code_postal"]) area = french_postal_codes_to_quarter(fields["code_postal"])
if area is None: if area is None:
LOGGER.info( LOGGER.debug(
"No matching area found for postal code %s, skipping it.", "No matching area found for postal code %s, skipping it.",
fields["code_postal"] fields["code_postal"],
) )
continue continue
name = normalize_string( name = normalize_string(titlecase.titlecase(fields["nom_de_la_commune"]), lowercase=False)
titlecase.titlecase(fields["nom_de_la_commune"]),
lowercase=False
)
if (fields["code_postal"], name) in seen_postal_codes: if (fields["code_postal"], name) in seen_postal_codes:
continue continue
seen_postal_codes.append((fields["code_postal"], name)) seen_postal_codes.append((fields["code_postal"], name))
postal_codes_data.append(PostalCode( postal_codes_data.append(
area=area, PostalCode(
postal_code=fields["code_postal"], area=area,
name=name, postal_code=fields["code_postal"],
lat=fields["coordonnees_gps"][0], insee_code=fields["code_commune_insee"],
lng=fields["coordonnees_gps"][1] name=name,
)) lat=fields["coordonnees_gps"][0],
lng=fields["coordonnees_gps"][1],
)
)
except KeyError: except KeyError:
LOGGER.info("Missing data for postal code %s, skipping it.", LOGGER.debug("Missing data for postal code %s, skipping it.", fields["code_postal"])
fields["code_postal"])
return postal_codes_data return postal_codes_data
@ -164,17 +190,11 @@ def _preprocess_public_transport():
for area, data_file in TRANSPORT_DATA_FILES.items(): for area, data_file in TRANSPORT_DATA_FILES.items():
LOGGER.info("Building from public transport data %s.", data_file) LOGGER.info("Building from public transport data %s.", data_file)
try: try:
with io.open(os.path.join(MODULE_DIR, data_file), "r", with io.open(os.path.join(MODULE_DIR, data_file), "r", encoding="utf-8") as fh:
encoding='utf-8') as fh:
filereader = csv.reader(fh) filereader = csv.reader(fh)
next(filereader, None) # Skip first row (headers) next(filereader, None) # Skip first row (headers)
for row in filereader: for row in filereader:
public_transport_data.append(PublicTransport( public_transport_data.append(PublicTransport(name=row[2], area=area, lat=row[3], lng=row[4]))
name=row[2],
area=area,
lat=row[3],
lng=row[4]
))
except (IOError, IndexError): except (IOError, IndexError):
LOGGER.error("Invalid raw opendata file: %s.", data_file) LOGGER.error("Invalid raw opendata file: %s.", data_file)
return [] return []
@ -183,7 +203,4 @@ def _preprocess_public_transport():
# List of all the available preprocessing functions. Order can be important. # List of all the available preprocessing functions. Order can be important.
PREPROCESSING_FUNCTIONS = [ PREPROCESSING_FUNCTIONS = [_preprocess_laposte, _preprocess_public_transport]
_preprocess_laposte,
_preprocess_public_transport
]

File diff suppressed because one or more lines are too long

View File

@ -47,9 +47,7 @@ def init_db(database_uri=None, search_db_uri=None):
Session = sessionmaker(bind=engine) # pylint: disable=locally-disabled,invalid-name Session = sessionmaker(bind=engine) # pylint: disable=locally-disabled,invalid-name
if search_db_uri: if search_db_uri:
index_service = IndexService( index_service = IndexService(whoosh_base=search_db_uri)
whoosh_base=search_db_uri
)
index_service.register_class(flatisfy.models.flat.Flat) index_service.register_class(flatisfy.models.flat.Flat)
@contextmanager @contextmanager

View File

@ -50,4 +50,4 @@ class StringyJSON(types.TypeDecorator):
# TypeEngine.with_variant says "use StringyJSON instead when # TypeEngine.with_variant says "use StringyJSON instead when
# connecting to 'sqlite'" # connecting to 'sqlite'"
# pylint: disable=locally-disabled,invalid-name # pylint: disable=locally-disabled,invalid-name
MagicJSON = types.JSON().with_variant(StringyJSON, 'sqlite') MagicJSON = types.JSON().with_variant(StringyJSON, "sqlite")

View File

@ -30,7 +30,6 @@ from whoosh.qparser import MultifieldParser
class IndexService(object): class IndexService(object):
def __init__(self, config=None, whoosh_base=None): def __init__(self, config=None, whoosh_base=None):
if not whoosh_base and config: if not whoosh_base and config:
whoosh_base = config.get("WHOOSH_BASE") whoosh_base = config.get("WHOOSH_BASE")
@ -84,8 +83,7 @@ class IndexService(object):
primary = field.name primary = field.name
continue continue
if field.name in model_class.__searchable__: if field.name in model_class.__searchable__:
schema[field.name] = whoosh.fields.TEXT( schema[field.name] = whoosh.fields.TEXT(analyzer=StemmingAnalyzer())
analyzer=StemmingAnalyzer())
return Schema(**schema), primary return Schema(**schema), primary
def before_commit(self, session): def before_commit(self, session):
@ -93,21 +91,18 @@ class IndexService(object):
for model in session.new: for model in session.new:
model_class = model.__class__ model_class = model.__class__
if hasattr(model_class, '__searchable__'): if hasattr(model_class, "__searchable__"):
self.to_update.setdefault(model_class.__name__, []).append( self.to_update.setdefault(model_class.__name__, []).append(("new", model))
("new", model))
for model in session.deleted: for model in session.deleted:
model_class = model.__class__ model_class = model.__class__
if hasattr(model_class, '__searchable__'): if hasattr(model_class, "__searchable__"):
self.to_update.setdefault(model_class.__name__, []).append( self.to_update.setdefault(model_class.__name__, []).append(("deleted", model))
("deleted", model))
for model in session.dirty: for model in session.dirty:
model_class = model.__class__ model_class = model.__class__
if hasattr(model_class, '__searchable__'): if hasattr(model_class, "__searchable__"):
self.to_update.setdefault(model_class.__name__, []).append( self.to_update.setdefault(model_class.__name__, []).append(("changed", model))
("changed", model))
def after_commit(self, session): def after_commit(self, session):
""" """
@ -128,16 +123,11 @@ class IndexService(object):
# added as a new doc. Could probably replace this with a whoosh # added as a new doc. Could probably replace this with a whoosh
# update. # update.
writer.delete_by_term( writer.delete_by_term(primary_field, text_type(getattr(model, primary_field)))
primary_field, text_type(getattr(model, primary_field)))
if change_type in ("new", "changed"): if change_type in ("new", "changed"):
attrs = dict((key, getattr(model, key)) attrs = dict((key, getattr(model, key)) for key in searchable)
for key in searchable) attrs = {attr: text_type(getattr(model, attr)) for attr in attrs.keys()}
attrs = {
attr: text_type(getattr(model, attr))
for attr in attrs.keys()
}
attrs[primary_field] = text_type(getattr(model, primary_field)) attrs[primary_field] = text_type(getattr(model, primary_field))
writer.add_document(**attrs) writer.add_document(**attrs)
@ -158,8 +148,7 @@ class Searcher(object):
self.parser = MultifieldParser(list(fields), index.schema) self.parser = MultifieldParser(list(fields), index.schema)
def __call__(self, session, query, limit=None): def __call__(self, session, query, limit=None):
results = self.index.searcher().search( results = self.index.searcher().search(self.parser.parse(query), limit=limit)
self.parser.parse(query), limit=limit)
keys = [x[self.primary] for x in results] keys = [x[self.primary] for x in results]
primary_column = getattr(self.model_class, self.primary) primary_column = getattr(self.model_class, self.primary)

View File

@ -8,7 +8,7 @@ from builtins import str
import logging import logging
import smtplib import smtplib
from money import Money
from email.mime.multipart import MIMEMultipart from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText from email.mime.text import MIMEText
from email.utils import formatdate, make_msgid from email.utils import formatdate, make_msgid
@ -36,15 +36,15 @@ def send_email(server, port, subject, _from, _to, txt, html, username=None, pass
if username or password: if username or password:
server.login(username or "", password or "") server.login(username or "", password or "")
msg = MIMEMultipart('alternative') msg = MIMEMultipart("alternative")
msg['Subject'] = subject msg["Subject"] = subject
msg['From'] = _from msg["From"] = _from
msg['To'] = ', '.join(_to) msg["To"] = ", ".join(_to)
msg['Date'] = formatdate() msg["Date"] = formatdate()
msg['Message-ID'] = make_msgid() msg["Message-ID"] = make_msgid()
msg.attach(MIMEText(txt, 'plain', 'utf-8')) msg.attach(MIMEText(txt, "plain", "utf-8"))
msg.attach(MIMEText(html, 'html', 'utf-8')) msg.attach(MIMEText(html, "html", "utf-8"))
server.sendmail(_from, _to, msg.as_string()) server.sendmail(_from, _to, msg.as_string())
server.quit() server.quit()
@ -61,13 +61,33 @@ def send_notification(config, flats):
if not flats: if not flats:
return return
txt = u'Hello dear user,\n\nThe following new flats have been found:\n\n' i18n = {
html = """ "en": {
"subject": f"{len(flats)} new flats found!",
"hello": "Hello dear user",
"following_new_flats": "The following new flats have been found:",
"area": "area",
"cost": "cost",
"signature": "Hope you'll find what you were looking for.",
},
"fr": {
"subject": f"{len(flats)} nouvelles annonces disponibles !",
"hello": "Bonjour cher utilisateur",
"following_new_flats": "Voici les nouvelles annonces :",
"area": "surface",
"cost": "coût",
"signature": "Bonne recherche",
},
}
trs = i18n.get(config["notification_lang"], "en")
txt = trs["hello"] + ",\n\n\n\n"
html = f"""
<html> <html>
<head></head> <head></head>
<body> <body>
<p>Hello dear user!</p> <p>{trs["hello"]}!</p>
<p>The following new flats have been found: <p>{trs["following_new_flats"]}
<ul> <ul>
""" """
@ -77,41 +97,47 @@ def send_notification(config, flats):
for flat in flats: for flat in flats:
title = str(flat.title) title = str(flat.title)
flat_id = str(flat.id) flat_id = str(flat.id)
area = str(flat.area) area = str(int(flat.area))
cost = str(flat.cost) cost = int(flat.cost)
currency = str(flat.currency) currency = str(flat.currency)
txt += ( txt += f"- {title}: {website_url}#/flat/{flat_id}"
'- {}: {}#/flat/{} (area: {}, cost: {} {})\n'.format( html += f"""
title, website_url, flat_id, area, cost, currency
)
)
html += """
<li> <li>
<a href="{}#/flat/{}">{}</a> <a href="{website_url}#/flat/{flat_id}">{title}</a>
(area: {}, cost: {} {}) """
</li>
""".format(website_url, flat_id, title, area, cost, currency) fields = []
if area:
fields.append(f"{trs['area']}: {area}")
if cost:
money = Money(cost, currency).format(config["notification_lang"])
fields.append(f"{trs['cost']}: {money}")
if len(fields):
txt += f'({", ".join(fields)})'
html += f'({", ".join(fields)})'
html += "</li>"
html += "</ul>" html += "</ul>"
signature = ( signature = f"\n{trs['signature']}\n\nBye!\nFlatisfy"
u"\nHope you'll find what you were looking for.\n\nBye!\nFlatisfy"
)
txt += signature txt += signature
html += signature.replace('\n', '<br>') html += signature.replace("\n", "<br>")
html += """</p> html += """</p>
</body> </body>
</html>""" </html>"""
send_email(config["smtp_server"], send_email(
config["smtp_port"], config["smtp_server"],
"New flats found!", config["smtp_port"],
config["smtp_from"], trs["subject"],
config["smtp_to"], config["smtp_from"],
txt, config["smtp_to"],
html, txt,
config.get("smtp_username"), html,
config.get("smtp_password")) config.get("smtp_username"),
config.get("smtp_password"),
)

View File

@ -10,4 +10,5 @@ class DataBuildError(Exception):
""" """
Error occurring on building a data file. Error occurring on building a data file.
""" """
pass pass

View File

@ -9,6 +9,7 @@ import collections
import itertools import itertools
import json import json
import logging import logging
from ratelimit import limits
from flatisfy import database from flatisfy import database
from flatisfy import tools from flatisfy import tools
@ -24,8 +25,7 @@ try:
from weboob.core.ouiboube import WebNip from weboob.core.ouiboube import WebNip
from weboob.tools.json import WeboobEncoder from weboob.tools.json import WeboobEncoder
except ImportError: except ImportError:
LOGGER.error("Weboob is not available on your system. Make sure you " LOGGER.error("Weboob is not available on your system. Make sure you installed it.")
"installed it.")
raise raise
@ -34,6 +34,7 @@ class WebOOBProxy(object):
Wrapper around WebOOB ``WebNip`` class, to fetch housing posts without Wrapper around WebOOB ``WebNip`` class, to fetch housing posts without
having to spawn a subprocess. having to spawn a subprocess.
""" """
@staticmethod @staticmethod
def version(): def version():
""" """
@ -77,14 +78,14 @@ class WebOOBProxy(object):
self.webnip = WebNip(modules_path=config["modules_path"]) self.webnip = WebNip(modules_path=config["modules_path"])
# Create backends # Create backends
self.backends = [ self.backends = []
self.webnip.load_backend( for module in backends:
module, try:
module, self.backends.append(
params={} self.webnip.load_backend(module, module, params={})
) )
for module in backends except Exception as exc:
] raise Exception('Unable to load module ' + module) from exc
def __enter__(self): def __enter__(self):
return self return self
@ -114,28 +115,21 @@ class WebOOBProxy(object):
except CallErrors as exc: except CallErrors as exc:
# If an error occured, just log it # If an error occured, just log it
LOGGER.error( LOGGER.error(
( ("An error occured while building query for postal code %s: %s"),
"An error occured while building query for "
"postal code %s: %s"
),
postal_code, postal_code,
str(exc) str(exc),
) )
if not matching_cities: if not matching_cities:
# If postal code gave no match, warn the user # If postal code gave no match, warn the user
LOGGER.warn( LOGGER.warn("Postal code %s could not be matched with a city.", postal_code)
"Postal code %s could not be matched with a city.",
postal_code
)
# Remove "TOUTES COMMUNES" entry which are duplicates of the individual # Remove "TOUTES COMMUNES" entry which are duplicates of the individual
# cities entries in Logicimmo module. # cities entries in Logicimmo module.
matching_cities = [ matching_cities = [
city city
for city in matching_cities for city in matching_cities
if not (city.backend == 'logicimmo' and if not (city.backend == "logicimmo" and city.name.startswith("TOUTES COMMUNES"))
city.name.startswith('TOUTES COMMUNES'))
] ]
# Then, build queries by grouping cities by at most 3 # Then, build queries by grouping cities by at most 3
@ -145,21 +139,14 @@ class WebOOBProxy(object):
try: try:
query.house_types = [ query.house_types = [
getattr( getattr(HOUSE_TYPES, house_type.upper()) for house_type in constraints_dict["house_types"]
HOUSE_TYPES,
house_type.upper()
)
for house_type in constraints_dict["house_types"]
] ]
except AttributeError: except AttributeError:
LOGGER.error("Invalid house types constraint.") LOGGER.error("Invalid house types constraint.")
return None return None
try: try:
query.type = getattr( query.type = getattr(POSTS_TYPES, constraints_dict["type"].upper())
POSTS_TYPES,
constraints_dict["type"].upper()
)
except AttributeError: except AttributeError:
LOGGER.error("Invalid post type constraint.") LOGGER.error("Invalid post type constraint.")
return None return None
@ -190,26 +177,22 @@ class WebOOBProxy(object):
# TODO: Handle max_entries better # TODO: Handle max_entries better
try: try:
for housing in itertools.islice( for housing in itertools.islice(
self.webnip.do( self.webnip.do(
'search_housings', "search_housings",
query, query,
# Only run the call on the required backends. # Only run the call on the required backends.
# Otherwise, WebOOB is doing weird stuff and returning # Otherwise, WebOOB is doing weird stuff and returning
# nonsense. # nonsense.
backends=[x for x in self.backends backends=[x for x in self.backends if x.name in useful_backends],
if x.name in useful_backends] ),
), max_entries,
max_entries
): ):
if not store_personal_data: if not store_personal_data:
housing.phone = None housing.phone = None
housings.append(json.dumps(housing, cls=WeboobEncoder)) housings.append(json.dumps(housing, cls=WeboobEncoder))
except CallErrors as exc: except CallErrors as exc:
# If an error occured, just log it # If an error occured, just log it
LOGGER.error( LOGGER.error("An error occured while fetching the housing posts: %s", str(exc))
"An error occured while fetching the housing posts: %s",
str(exc)
)
return housings return housings
def info(self, full_flat_id, store_personal_data=False): def info(self, full_flat_id, store_personal_data=False):
@ -224,34 +207,26 @@ class WebOOBProxy(object):
""" """
flat_id, backend_name = full_flat_id.rsplit("@", 1) flat_id, backend_name = full_flat_id.rsplit("@", 1)
try: try:
backend = next( backend = next(backend for backend in self.backends if backend.name == backend_name)
backend
for backend in self.backends
if backend.name == backend_name
)
except StopIteration: except StopIteration:
LOGGER.error("Backend %s is not available.", backend_name) LOGGER.error("Backend %s is not available.", backend_name)
return "{}" return "{}"
try: try:
housing = backend.get_housing(flat_id) housing = backend.get_housing(flat_id)
# Otherwise, we miss the @backend afterwards
housing.id = full_flat_id
if not store_personal_data: if not store_personal_data:
# Ensure phone is cleared # Ensure phone is cleared
housing.phone = None housing.phone = None
else: else:
# Ensure phone is fetched # Ensure phone is fetched
backend.fillobj(housing, 'phone') backend.fillobj(housing, "phone")
# Otherwise, we miss the @backend afterwards
housing.id = full_flat_id
return json.dumps(housing, cls=WeboobEncoder) return json.dumps(housing, cls=WeboobEncoder)
except Exception as exc: # pylint: disable=broad-except except Exception as exc: # pylint: disable=broad-except
# If an error occured, just log it # If an error occured, just log it
LOGGER.error( LOGGER.error("An error occured while fetching housing %s: %s", full_flat_id, str(exc))
"An error occured while fetching housing %s: %s",
full_flat_id,
str(exc)
)
return "{}" return "{}"
@ -271,19 +246,24 @@ def fetch_flats(config):
queries = webOOB_proxy.build_queries(constraint) queries = webOOB_proxy.build_queries(constraint)
housing_posts = [] housing_posts = []
for query in queries: for query in queries:
housing_posts.extend( housing_posts.extend(webOOB_proxy.query(query, config["max_entries"], config["store_personal_data"]))
webOOB_proxy.query(query, config["max_entries"], housing_posts = housing_posts[: config["max_entries"]]
config["store_personal_data"])
)
LOGGER.info("Fetched %d flats.", len(housing_posts)) LOGGER.info("Fetched %d flats.", len(housing_posts))
constraint_flats_list = [json.loads(flat) for flat in housing_posts] constraint_flats_list = [json.loads(flat) for flat in housing_posts]
constraint_flats_list = [WebOOBProxy.restore_decimal_fields(flat) constraint_flats_list = [WebOOBProxy.restore_decimal_fields(flat) for flat in constraint_flats_list]
for flat in constraint_flats_list]
fetched_flats[constraint_name] = constraint_flats_list fetched_flats[constraint_name] = constraint_flats_list
return fetched_flats return fetched_flats
@limits(calls=10, period=60)
def fetch_details_rate_limited(config, flat_id):
"""
Limit flats fetching to at most 10 calls per minute to avoid rate banning
"""
return fetch_details(config, flat_id)
def fetch_details(config, flat_id): def fetch_details(config, flat_id):
""" """
Fetch the additional details for a flat using Flatboob / WebOOB. Fetch the additional details for a flat using Flatboob / WebOOB.
@ -294,8 +274,7 @@ def fetch_details(config, flat_id):
""" """
with WebOOBProxy(config) as webOOB_proxy: with WebOOBProxy(config) as webOOB_proxy:
LOGGER.info("Loading additional details for flat %s.", flat_id) LOGGER.info("Loading additional details for flat %s.", flat_id)
webOOB_output = webOOB_proxy.info(flat_id, webOOB_output = webOOB_proxy.info(flat_id, config["store_personal_data"])
config["store_personal_data"])
flat_details = json.loads(webOOB_output) flat_details = json.loads(webOOB_output)
flat_details = WebOOBProxy.restore_decimal_fields(flat_details) flat_details = WebOOBProxy.restore_decimal_fields(flat_details)
@ -326,10 +305,7 @@ def load_flats_from_file(json_file, config):
LOGGER.info("Found %d flats.", len(flats_list)) LOGGER.info("Found %d flats.", len(flats_list))
except (IOError, ValueError): except (IOError, ValueError):
LOGGER.error("File %s is not a valid dump file.", json_file) LOGGER.error("File %s is not a valid dump file.", json_file)
return { return {constraint_name: flats_list for constraint_name in config["constraints"]}
constraint_name: flats_list
for constraint_name in config["constraints"]
}
def load_flats_from_db(config): def load_flats_from_db(config):

View File

@ -36,48 +36,51 @@ def refine_with_housing_criteria(flats_list, constraint):
for i, flat in enumerate(flats_list): for i, flat in enumerate(flats_list):
# Check postal code # Check postal code
postal_code = flat["flatisfy"].get("postal_code", None) postal_code = flat["flatisfy"].get("postal_code", None)
if ( if postal_code and postal_code not in constraint["postal_codes"]:
postal_code and LOGGER.info(
postal_code not in constraint["postal_codes"] "Postal code %s for flat %s is out of range (%s).",
): postal_code,
LOGGER.info("Postal code for flat %s is out of range.", flat["id"]) flat["id"],
is_ok[i] = is_ok[i] and False ", ".join(constraint["postal_codes"]),
)
is_ok[i] = False
# Check insee code
insee_code = flat["flatisfy"].get("insee_code", None)
if insee_code and "insee_codes" in constraint and insee_code not in constraint["insee_codes"]:
LOGGER.info(
"insee code %s for flat %s is out of range (%s).",
insee_code,
flat["id"],
", ".join(constraint["insee_codes"]),
)
is_ok[i] = False
# Check time_to # Check time_to
for place_name, time in flat["flatisfy"].get("time_to", {}).items(): for place_name, time in flat["flatisfy"].get("time_to", {}).items():
time = time["time"] time = time["time"]
is_within_interval = tools.is_within_interval( is_within_interval = tools.is_within_interval(time, *(constraint["time_to"][place_name]["time"]))
time,
*(constraint["time_to"][place_name]["time"])
)
if not is_within_interval: if not is_within_interval:
LOGGER.info("Flat %s is too far from place %s: %ds.", LOGGER.info(
flat["id"], place_name, time) "Flat %s is too far from place %s: %ds.",
flat["id"],
place_name,
time,
)
is_ok[i] = is_ok[i] and is_within_interval is_ok[i] = is_ok[i] and is_within_interval
# Check other fields # Check other fields
for field in ["area", "cost", "rooms", "bedrooms"]: for field in ["area", "cost", "rooms", "bedrooms"]:
interval = constraint[field] interval = constraint[field]
is_within_interval = tools.is_within_interval( is_within_interval = tools.is_within_interval(flat.get(field, None), *interval)
flat.get(field, None),
*interval
)
if not is_within_interval: if not is_within_interval:
LOGGER.info("%s for flat %s is out of range.", LOGGER.info(
field.capitalize(), flat["id"]) "%s %s for flat %s is out of range.", field.capitalize(), str(flat.get(field, None)), flat["id"]
)
is_ok[i] = is_ok[i] and is_within_interval is_ok[i] = is_ok[i] and is_within_interval
return ( return (
[ [flat for i, flat in enumerate(flats_list) if is_ok[i]],
flat [flat for i, flat in enumerate(flats_list) if not is_ok[i]],
for i, flat in enumerate(flats_list)
if is_ok[i]
],
[
flat
for i, flat in enumerate(flats_list)
if not is_ok[i]
]
) )
@ -103,54 +106,36 @@ def refine_with_details_criteria(flats_list, constraint):
for i, flat in enumerate(flats_list): for i, flat in enumerate(flats_list):
# Check number of pictures # Check number of pictures
has_enough_photos = tools.is_within_interval( has_enough_photos = tools.is_within_interval(len(flat.get("photos", [])), constraint["minimum_nb_photos"], None)
len(flat.get('photos', [])),
constraint['minimum_nb_photos'],
None
)
if not has_enough_photos: if not has_enough_photos:
LOGGER.info( LOGGER.info(
"Flat %s only has %d photos, it should have at least %d.", "Flat %s only has %d photos, it should have at least %d.",
flat["id"], flat["id"],
len(flat['photos']), len(flat["photos"]),
constraint['minimum_nb_photos'] constraint["minimum_nb_photos"],
) )
is_ok[i] = False is_ok[i] = False
has_all_good_terms_in_description = True for term in constraint["description_should_contain"]:
if constraint["description_should_contain"]: if term.lower() not in flat["text"].lower():
has_all_good_terms_in_description = all( LOGGER.info(
term in flat['text'] ("Description for flat %s does not contain required term '%s'."),
for term in constraint["description_should_contain"] flat["id"],
) term,
)
has_a_bad_term_in_description = False is_ok[i] = False
if constraint["description_should_not_contain"]: for term in constraint["description_should_not_contain"]:
has_a_bad_term_in_description = any( if term.lower() in flat["text"].lower():
term in flat['text'] LOGGER.info(
for term in constraint["description_should_not_contain"] ("Description for flat %s contains blacklisted term '%s'."),
) flat["id"],
term,
if (not has_all_good_terms_in_description )
or has_a_bad_term_in_description): is_ok[i] = False
LOGGER.info(
("Description for flat %s does not contain all the required "
"terms, or contains a blacklisted term."),
flat["id"]
)
is_ok[i] = False
return ( return (
[ [flat for i, flat in enumerate(flats_list) if is_ok[i]],
flat [flat for i, flat in enumerate(flats_list) if not is_ok[i]],
for i, flat in enumerate(flats_list)
if is_ok[i]
],
[
flat
for i, flat in enumerate(flats_list)
if not is_ok[i]
]
) )
@ -172,30 +157,25 @@ def first_pass(flats_list, constraint, config):
# Handle duplicates based on ids # Handle duplicates based on ids
# Just remove them (no merge) as they should be the exact same object. # Just remove them (no merge) as they should be the exact same object.
flats_list, _ = duplicates.detect( flats_list, _ = duplicates.detect(flats_list, key="id", merge=False, should_intersect=False)
flats_list, key="id", merge=False, should_intersect=False
)
# Also merge duplicates based on urls (these may come from different # Also merge duplicates based on urls (these may come from different
# flatboob backends) # flatboob backends)
# This is especially useful as some websites such as entreparticuliers # This is especially useful as some websites such as entreparticuliers
# contains a lot of leboncoin housings posts. # contains a lot of leboncoin housings posts.
flats_list, duplicates_by_urls = duplicates.detect( flats_list, duplicates_by_urls = duplicates.detect(flats_list, key="urls", merge=True, should_intersect=True)
flats_list, key="urls", merge=True, should_intersect=True
)
# Guess the postal codes # Guess the postal codes
flats_list = metadata.guess_postal_code(flats_list, constraint, config) flats_list = metadata.guess_postal_code(flats_list, constraint, config)
# Try to match with stations
flats_list = metadata.guess_stations(flats_list, constraint, config)
# Remove returned housing posts that do not match criteria
flats_list, ignored_list = refine_with_housing_criteria(flats_list,
constraint)
return { if not config["ignore_station"]:
"new": flats_list, # Try to match with stations
"ignored": ignored_list, flats_list = metadata.guess_stations(flats_list, constraint, config)
"duplicate": duplicates_by_urls
} # Remove returned housing posts that do not match criteria
flats_list, ignored_list = refine_with_housing_criteria(flats_list, constraint)
return {"new": flats_list, "ignored": ignored_list, "duplicate": duplicates_by_urls}
@tools.timeit @tools.timeit
def second_pass(flats_list, constraint, config): def second_pass(flats_list, constraint, config):
@ -222,28 +202,24 @@ def second_pass(flats_list, constraint, config):
flats_list = metadata.guess_postal_code(flats_list, constraint, config) flats_list = metadata.guess_postal_code(flats_list, constraint, config)
# Better match with stations (confirm and check better) # Better match with stations (confirm and check better)
flats_list = metadata.guess_stations(flats_list, constraint, config) if not config["ignore_station"]:
flats_list = metadata.guess_stations(flats_list, constraint, config)
# Compute travel time to specified points # Compute travel time to specified points
flats_list = metadata.compute_travel_times(flats_list, constraint, config) flats_list = metadata.compute_travel_times(flats_list, constraint, config)
# Remove returned housing posts that do not match criteria # Remove returned housing posts that do not match criteria
flats_list, ignored_list = refine_with_housing_criteria(flats_list, flats_list, ignored_list = refine_with_housing_criteria(flats_list, constraint)
constraint)
# Remove returned housing posts which do not match criteria relying on # Remove returned housing posts which do not match criteria relying on
# fetched details. # fetched details.
flats_list, ignored_list = refine_with_details_criteria(flats_list, flats_list, ignored_list = refine_with_details_criteria(flats_list, constraint)
constraint)
if config["serve_images_locally"]: if config["serve_images_locally"]:
images.download_images(flats_list, config) images.download_images(flats_list, config)
return { return {"new": flats_list, "ignored": ignored_list, "duplicate": []}
"new": flats_list,
"ignored": ignored_list,
"duplicate": []
}
@tools.timeit @tools.timeit
def third_pass(flats_list, config): def third_pass(flats_list, config):
@ -262,8 +238,4 @@ def third_pass(flats_list, config):
# Deduplicate the list using every available data # Deduplicate the list using every available data
flats_list, duplicate_flats = duplicates.deep_detect(flats_list, config) flats_list, duplicate_flats = duplicates.deep_detect(flats_list, config)
return { return {"new": flats_list, "ignored": [], "duplicate": duplicate_flats}
"new": flats_list,
"ignored": [],
"duplicate": duplicate_flats
}

View File

@ -9,15 +9,19 @@ import collections
import hashlib import hashlib
import os import os
import requests import requests
import logging
from io import BytesIO from io import BytesIO
import PIL.Image import PIL.Image
LOGGER = logging.getLogger(__name__)
class MemoryCache(object): class MemoryCache(object):
""" """
A cache in memory. A cache in memory.
""" """
@staticmethod @staticmethod
def on_miss(key): def on_miss(key):
""" """
@ -85,6 +89,7 @@ class ImageCache(MemoryCache):
""" """
A cache for images, stored in memory. A cache for images, stored in memory.
""" """
@staticmethod @staticmethod
def compute_filename(url): def compute_filename(url):
""" """
@ -104,23 +109,27 @@ class ImageCache(MemoryCache):
if len(self.map.keys()) > self.max_items: if len(self.map.keys()) > self.max_items:
self.map.popitem(last=False) self.map.popitem(last=False)
if url.endswith(".svg"):
# Skip SVG photo which are unsupported and unlikely to be relevant
return None
filepath = None
# Try to load from local folder # Try to load from local folder
if self.storage_dir: if self.storage_dir:
filepath = os.path.join( filepath = os.path.join(self.storage_dir, self.compute_filename(url))
self.storage_dir,
self.compute_filename(url)
)
if os.path.isfile(filepath): if os.path.isfile(filepath):
return PIL.Image.open(filepath) return PIL.Image.open(filepath)
# Otherwise, fetch it # Otherwise, fetch it
try: try:
LOGGER.debug(f"Download photo from {url} to {filepath}")
req = requests.get(url) req = requests.get(url)
req.raise_for_status() req.raise_for_status()
image = PIL.Image.open(BytesIO(req.content)) image = PIL.Image.open(BytesIO(req.content))
if self.storage_dir: if filepath:
image.save(filepath, format=image.format) image.save(filepath, format=image.format)
return image return image
except (requests.HTTPError, IOError): except (requests.HTTPError, IOError) as exc:
LOGGER.info(f"Download photo from {url} failed: {exc}")
return None return None
def __init__(self, max_items=200, storage_dir=None): def __init__(self, max_items=200, storage_dir=None):

View File

@ -35,14 +35,14 @@ def homogeneize_phone_number(numbers):
clean_numbers = [] clean_numbers = []
for number in numbers.split(','): for number in numbers.split(","):
number = number.strip() number = number.strip()
number = number.replace(".", "") number = number.replace(".", "")
number = number.replace(" ", "") number = number.replace(" ", "")
number = number.replace("-", "") number = number.replace("-", "")
number = number.replace("(", "") number = number.replace("(", "")
number = number.replace(")", "") number = number.replace(")", "")
number = re.sub(r'^\+\d\d', "", number) number = re.sub(r"^\+\d\d", "", number)
if not number.startswith("0"): if not number.startswith("0"):
number = "0" + number number = "0" + number
@ -94,12 +94,7 @@ def compare_photos(photo1, photo2, photo_cache, hash_threshold):
return False return False
def find_number_common_photos( def find_number_common_photos(flat1_photos, flat2_photos, photo_cache, hash_threshold):
flat1_photos,
flat2_photos,
photo_cache,
hash_threshold
):
""" """
Compute the number of common photos between the two lists of photos for the Compute the number of common photos between the two lists of photos for the
flats. flats.
@ -174,22 +169,21 @@ def detect(flats_list, key="id", merge=True, should_intersect=False):
# Sort matching flats by backend precedence # Sort matching flats by backend precedence
matching_flats.sort( matching_flats.sort(
key=lambda flat: next( key=lambda flat: next(
i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE) i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE) if flat["id"].endswith(backend)
if flat["id"].endswith(backend)
), ),
reverse=True reverse=True,
) )
if len(matching_flats) > 1: if len(matching_flats) > 1:
LOGGER.info("Found duplicates using key \"%s\": %s.", LOGGER.info(
key, 'Found duplicates using key "%s": %s.',
[flat["id"] for flat in matching_flats]) key,
[flat["id"] for flat in matching_flats],
)
# Otherwise, check the policy # Otherwise, check the policy
if merge: if merge:
# If a merge is requested, do the merge # If a merge is requested, do the merge
unique_flats_list.append( unique_flats_list.append(tools.merge_dicts(*matching_flats))
tools.merge_dicts(*matching_flats)
)
else: else:
# Otherwise, just keep the most important of them # Otherwise, just keep the most important of them
unique_flats_list.append(matching_flats[-1]) unique_flats_list.append(matching_flats[-1])
@ -203,8 +197,7 @@ def detect(flats_list, key="id", merge=True, should_intersect=False):
if should_intersect: if should_intersect:
# We added some flats twice with the above method, let's deduplicate on # We added some flats twice with the above method, let's deduplicate on
# id. # id.
unique_flats_list, _ = detect(unique_flats_list, key="id", merge=True, unique_flats_list, _ = detect(unique_flats_list, key="id", merge=True, should_intersect=False)
should_intersect=False)
return unique_flats_list, duplicate_flats return unique_flats_list, duplicate_flats
@ -250,14 +243,12 @@ def get_duplicate_score(flat1, flat2, photo_cache, hash_threshold):
# They should have the same postal code, if available # They should have the same postal code, if available
if ( if (
"flatisfy" in flat1 and "flatisfy" in flat2 and "flatisfy" in flat1
flat1["flatisfy"].get("postal_code", None) and and "flatisfy" in flat2
flat2["flatisfy"].get("postal_code", None) and flat1["flatisfy"].get("postal_code", None)
and flat2["flatisfy"].get("postal_code", None)
): ):
assert ( assert flat1["flatisfy"]["postal_code"] == flat2["flatisfy"]["postal_code"]
flat1["flatisfy"]["postal_code"] ==
flat2["flatisfy"]["postal_code"]
)
n_common_items += 1 n_common_items += 1
# TODO: Better text comparison (one included in the other, fuzzymatch) # TODO: Better text comparison (one included in the other, fuzzymatch)
@ -279,28 +270,16 @@ def get_duplicate_score(flat1, flat2, photo_cache, hash_threshold):
# If the two flats are from the same website and have a # If the two flats are from the same website and have a
# different float part, consider they cannot be duplicates. See # different float part, consider they cannot be duplicates. See
# https://framagit.org/phyks/Flatisfy/issues/100. # https://framagit.org/phyks/Flatisfy/issues/100.
both_are_from_same_backend = ( both_are_from_same_backend = flat1["id"].split("@")[-1] == flat2["id"].split("@")[-1]
flat1["id"].split("@")[-1] == flat2["id"].split("@")[-1] both_have_float_part = (flat1["area"] % 1) > 0 and (flat2["area"] % 1) > 0
) both_have_equal_float_part = (flat1["area"] % 1) == (flat2["area"] % 1)
both_have_float_part = (
(flat1["area"] % 1) > 0 and (flat2["area"] % 1) > 0
)
both_have_equal_float_part = (
(flat1["area"] % 1) == (flat2["area"] % 1)
)
if both_have_float_part and both_are_from_same_backend: if both_have_float_part and both_are_from_same_backend:
assert both_have_equal_float_part assert both_have_equal_float_part
if flat1.get("photos", []) and flat2.get("photos", []): if flat1.get("photos", []) and flat2.get("photos", []):
n_common_photos = find_number_common_photos( n_common_photos = find_number_common_photos(flat1["photos"], flat2["photos"], photo_cache, hash_threshold)
flat1["photos"],
flat2["photos"],
photo_cache,
hash_threshold
)
min_number_photos = min(len(flat1["photos"]), min_number_photos = min(len(flat1["photos"]), len(flat2["photos"]))
len(flat2["photos"]))
# Either all the photos are the same, or there are at least # Either all the photos are the same, or there are at least
# three common photos. # three common photos.
@ -332,9 +311,7 @@ def deep_detect(flats_list, config):
storage_dir = os.path.join(config["data_directory"], "images") storage_dir = os.path.join(config["data_directory"], "images")
else: else:
storage_dir = None storage_dir = None
photo_cache = ImageCache( photo_cache = ImageCache(storage_dir=storage_dir)
storage_dir=storage_dir
)
LOGGER.info("Running deep duplicates detection.") LOGGER.info("Running deep duplicates detection.")
matching_flats = collections.defaultdict(list) matching_flats = collections.defaultdict(list)
@ -347,30 +324,26 @@ def deep_detect(flats_list, config):
if flat2["id"] in matching_flats[flat1["id"]]: if flat2["id"] in matching_flats[flat1["id"]]:
continue continue
n_common_items = get_duplicate_score( n_common_items = get_duplicate_score(flat1, flat2, photo_cache, config["duplicate_image_hash_threshold"])
flat1,
flat2,
photo_cache,
config["duplicate_image_hash_threshold"]
)
# Minimal score to consider they are duplicates # Minimal score to consider they are duplicates
if n_common_items >= config["duplicate_threshold"]: if n_common_items >= config["duplicate_threshold"]:
# Mark flats as duplicates # Mark flats as duplicates
LOGGER.info( LOGGER.info(
("Found duplicates using deep detection: (%s, %s). " ("Found duplicates using deep detection: (%s, %s). Score is %d."),
"Score is %d."),
flat1["id"], flat1["id"],
flat2["id"], flat2["id"],
n_common_items n_common_items,
) )
matching_flats[flat1["id"]].append(flat2["id"]) matching_flats[flat1["id"]].append(flat2["id"])
matching_flats[flat2["id"]].append(flat1["id"]) matching_flats[flat2["id"]].append(flat1["id"])
if photo_cache.total(): if photo_cache.total():
LOGGER.debug("Photo cache: hits: %d%% / misses: %d%%.", LOGGER.debug(
photo_cache.hit_rate(), "Photo cache: hits: %d%% / misses: %d%%.",
photo_cache.miss_rate()) photo_cache.hit_rate(),
photo_cache.miss_rate(),
)
seen_ids = [] seen_ids = []
duplicate_flats = [] duplicate_flats = []
@ -381,16 +354,11 @@ def deep_detect(flats_list, config):
seen_ids.extend(matching_flats[flat_id]) seen_ids.extend(matching_flats[flat_id])
to_merge = sorted( to_merge = sorted(
[ [flat for flat in flats_list if flat["id"] in matching_flats[flat_id]],
flat
for flat in flats_list
if flat["id"] in matching_flats[flat_id]
],
key=lambda flat: next( key=lambda flat: next(
i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE) i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE) if flat["id"].endswith(backend)
if flat["id"].endswith(backend)
), ),
reverse=True reverse=True,
) )
unique_flats_list.append(tools.merge_dicts(*to_merge)) unique_flats_list.append(tools.merge_dicts(*to_merge))
# The ID of the added merged flat will be the one of the last item # The ID of the added merged flat will be the one of the last item

View File

@ -22,15 +22,8 @@ def download_images(flats_list, config):
:param flats_list: A list of flats dicts. :param flats_list: A list of flats dicts.
:param config: A config dict. :param config: A config dict.
""" """
photo_cache = ImageCache( photo_cache = ImageCache(storage_dir=os.path.join(config["data_directory"], "images"))
storage_dir=os.path.join(config["data_directory"], "images") for flat in flats_list:
)
flats_list_length = len(flats_list)
for i, flat in enumerate(flats_list):
LOGGER.info(
"Downloading photos for flat %d/%d: %s.",
i + 1, flats_list_length, flat["id"]
)
for photo in flat["photos"]: for photo in flat["photos"]:
# Download photo # Download photo
image = photo_cache.get(photo["url"]) image = photo_cache.get(photo["url"])

View File

@ -76,10 +76,10 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
Example:: Example::
>>> match("Paris 14ème", ["Ris", "ris", "Paris 14"], limit=1) >>> fuzzy_match("Paris 14ème", ["Ris", "ris", "Paris 14"], limit=1)
[("Paris 14", 100) [("Paris 14", 100)
>>> match( \ >>> fuzzy_match( \
"Saint-Jacques, Denfert-Rochereau (Colonel Rol-Tanguy), " \ "Saint-Jacques, Denfert-Rochereau (Colonel Rol-Tanguy), " \
"Mouton-Duvernet", \ "Mouton-Duvernet", \
["saint-jacques", "denfert rochereau", "duvernet", "toto"], \ ["saint-jacques", "denfert rochereau", "duvernet", "toto"], \
@ -88,8 +88,8 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
[('denfert rochereau', 100), ('saint-jacques', 76)] [('denfert rochereau', 100), ('saint-jacques', 76)]
""" """
# TODO: Is there a better confidence measure? # TODO: Is there a better confidence measure?
normalized_query = tools.normalize_string(query) normalized_query = tools.normalize_string(query).replace("saint", "st")
normalized_choices = [tools.normalize_string(choice) for choice in choices] normalized_choices = [tools.normalize_string(choice).replace("saint", "st") for choice in choices]
# Remove duplicates in the choices list # Remove duplicates in the choices list
unique_normalized_choices = tools.uniqify(normalized_choices) unique_normalized_choices = tools.uniqify(normalized_choices)
@ -97,13 +97,9 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
# Get the matches (normalized strings) # Get the matches (normalized strings)
# Keep only ``limit`` matches. # Keep only ``limit`` matches.
matches = sorted( matches = sorted(
[ [(choice, len(choice)) for choice in tools.uniqify(unique_normalized_choices) if choice in normalized_query],
(choice, len(choice))
for choice in tools.uniqify(unique_normalized_choices)
if choice in normalized_query
],
key=lambda x: x[1], key=lambda x: x[1],
reverse=True reverse=True,
) )
if limit: if limit:
matches = matches[:limit] matches = matches[:limit]
@ -111,22 +107,66 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
# Update confidence # Update confidence
if matches: if matches:
max_confidence = max(match[1] for match in matches) max_confidence = max(match[1] for match in matches)
matches = [ matches = [(x[0], int(x[1] / max_confidence * 100)) for x in matches]
(x[0], int(x[1] / max_confidence * 100))
for x in matches
]
# Convert back matches to original strings # Convert back matches to original strings
# Also filter out matches below threshold # Also filter out matches below threshold
matches = [ matches = [(choices[normalized_choices.index(x[0])], x[1]) for x in matches if x[1] >= threshold]
(choices[normalized_choices.index(x[0])], x[1])
for x in matches
if x[1] >= threshold
]
return matches return matches
def guess_location_position(location, cities, constraint, must_match):
# try to find a city
# Find all fuzzy-matching cities
postal_code = None
insee_code = None
position = None
matched_cities = fuzzy_match(location, [x.name for x in cities], limit=None)
if matched_cities:
# Find associated postal codes
matched_postal_codes = []
for matched_city_name, _ in matched_cities:
postal_code_objects_for_city = [x for x in cities if x.name == matched_city_name]
insee_code = [pc.insee_code for pc in postal_code_objects_for_city][0]
matched_postal_codes.extend(pc.postal_code for pc in postal_code_objects_for_city)
# Try to match them with postal codes in config constraint
matched_postal_codes_in_config = set(matched_postal_codes) & set(constraint["postal_codes"])
if matched_postal_codes_in_config:
# If there are some matched postal codes which are also in
# config, use them preferentially. This avoid ignoring
# incorrectly some flats in cities with multiple postal
# codes, see #110.
postal_code = next(iter(matched_postal_codes_in_config))
else:
# Otherwise, simply take any matched postal code.
postal_code = matched_postal_codes[0]
# take the city position
for matched_city_name, _ in matched_cities:
postal_code_objects_for_city = [
x for x in cities if x.name == matched_city_name and x.postal_code == postal_code
]
if len(postal_code_objects_for_city):
position = {
"lat": postal_code_objects_for_city[0].lat,
"lng": postal_code_objects_for_city[0].lng,
}
LOGGER.debug(("Found position %s using city %s."), position, matched_city_name)
break
if not postal_code and must_match:
postal_code = cities[0].postal_code
position = {
"lat": cities[0].lat,
"lng": cities[0].lng,
}
insee_code = cities[0].insee_code
return (postal_code, insee_code, position)
def guess_postal_code(flats_list, constraint, config, distance_threshold=20000): def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
""" """
Try to guess the postal code from the location of the flats. Try to guess the postal code from the location of the flats.
@ -141,24 +181,27 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
:return: An updated list of flats dict with guessed postal code. :return: An updated list of flats dict with guessed postal code.
""" """
opendata = { opendata = {"postal_codes": data.load_data(PostalCode, constraint, config)}
"postal_codes": data.load_data(PostalCode, constraint, config)
}
for flat in flats_list: for flat in flats_list:
location = flat.get("location", None) location = flat.get("location", None)
if not location:
addr = flat.get("address", None)
if addr:
location = addr["full_address"]
if not location: if not location:
# Skip everything if empty location # Skip everything if empty location
LOGGER.info( LOGGER.info(
( ("No location field for flat %s, skipping postal code lookup. (%s)"),
"No location field for flat %s, skipping postal " flat["id"],
"code lookup." flat.get("address"),
),
flat["id"]
) )
continue continue
postal_code = None postal_code = None
insee_code = None
position = None
# Try to find a postal code directly # Try to find a postal code directly
try: try:
postal_code = re.search(r"[0-9]{5}", location) postal_code = re.search(r"[0-9]{5}", location)
@ -166,86 +209,51 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
postal_code = postal_code.group(0) postal_code = postal_code.group(0)
# Check the postal code is within the db # Check the postal code is within the db
assert postal_code in [x.postal_code assert postal_code in [x.postal_code for x in opendata["postal_codes"]]
for x in opendata["postal_codes"]]
LOGGER.info( LOGGER.debug(
"Found postal code in location field for flat %s: %s.", "Found postal code directly in location field for flat %s: %s.",
flat["id"], postal_code flat["id"],
postal_code,
) )
except AssertionError: except AssertionError:
postal_code = None postal_code = None
# If not found, try to find a city # Then fetch position (and postal_code is couldn't be found earlier)
if not postal_code: cities = opendata["postal_codes"]
# Find all fuzzy-matching cities if postal_code:
matched_cities = fuzzy_match( cities = [x for x in cities if x.postal_code == postal_code]
location, (postal_code, insee_code, position) = guess_location_position(
[x.name for x in opendata["postal_codes"]], location, cities, constraint, postal_code is not None
limit=None )
)
if matched_cities:
# Find associated postal codes
matched_postal_codes = []
for matched_city_name, _ in matched_cities:
postal_code_objects_for_city = [
x for x in opendata["postal_codes"]
if x.name == matched_city_name
]
matched_postal_codes.extend(
pc.postal_code
for pc in postal_code_objects_for_city
)
# Try to match them with postal codes in config constraint
matched_postal_codes_in_config = (
set(matched_postal_codes) & set(constraint["postal_codes"])
)
if matched_postal_codes_in_config:
# If there are some matched postal codes which are also in
# config, use them preferentially. This avoid ignoring
# incorrectly some flats in cities with multiple postal
# codes, see #110.
postal_code = next(iter(matched_postal_codes_in_config))
else:
# Otherwise, simply take any matched postal code.
postal_code = matched_postal_codes[0]
LOGGER.info(
("Found postal code in location field through city lookup "
"for flat %s: %s."),
flat["id"], postal_code
)
# Check that postal code is not too far from the ones listed in config, # Check that postal code is not too far from the ones listed in config,
# limit bad fuzzy matching # limit bad fuzzy matching
if postal_code and distance_threshold: if postal_code and distance_threshold:
distance = min( distance = min(
tools.distance( tools.distance(
next( next((x.lat, x.lng) for x in opendata["postal_codes"] if x.postal_code == postal_code),
(x.lat, x.lng) next((x.lat, x.lng) for x in opendata["postal_codes"] if x.postal_code == constraint_postal_code),
for x in opendata["postal_codes"]
if x.postal_code == postal_code
),
next(
(x.lat, x.lng)
for x in opendata["postal_codes"]
if x.postal_code == constraint_postal_code
)
) )
for constraint_postal_code in constraint["postal_codes"] for constraint_postal_code in constraint["postal_codes"]
) )
if distance > distance_threshold: if distance > distance_threshold:
LOGGER.info( LOGGER.info(
("Postal code %s found for flat %s is off-constraints " (
"(distance is %dm > %dm). Let's consider it is an " "Postal code %s found for flat %s @ %s is off-constraints "
"artifact match and keep the post without this postal " "(distance is %dm > %dm). Let's consider it is an "
"code."), "artifact match and keep the post without this postal "
"code."
),
postal_code, postal_code,
flat["id"], flat["id"],
location,
int(distance), int(distance),
int(distance_threshold) int(distance_threshold),
) )
postal_code = None postal_code = None
position = None
# Store it # Store it
if postal_code: if postal_code:
@ -253,12 +261,28 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
if existing_postal_code and existing_postal_code != postal_code: if existing_postal_code and existing_postal_code != postal_code:
LOGGER.warning( LOGGER.warning(
"Replacing previous postal code %s by %s for flat %s.", "Replacing previous postal code %s by %s for flat %s.",
existing_postal_code, postal_code, flat["id"] existing_postal_code,
postal_code,
flat["id"],
) )
flat["flatisfy"]["postal_code"] = postal_code flat["flatisfy"]["postal_code"] = postal_code
else: else:
LOGGER.info("No postal code found for flat %s.", flat["id"]) LOGGER.info("No postal code found for flat %s.", flat["id"])
if insee_code:
flat["flatisfy"]["insee_code"] = insee_code
if position:
flat["flatisfy"]["position"] = position
LOGGER.debug(
"found postal_code=%s insee_code=%s position=%s for flat %s (%s).",
postal_code,
insee_code,
position,
flat["id"],
location,
)
return flats_list return flats_list
@ -272,10 +296,10 @@ def guess_stations(flats_list, constraint, config):
:return: An updated list of flats dict with guessed nearby stations. :return: An updated list of flats dict with guessed nearby stations.
""" """
distance_threshold = config['max_distance_housing_station'] distance_threshold = config["max_distance_housing_station"]
opendata = { opendata = {
"postal_codes": data.load_data(PostalCode, constraint, config), "postal_codes": data.load_data(PostalCode, constraint, config),
"stations": data.load_data(PublicTransport, constraint, config) "stations": data.load_data(PublicTransport, constraint, config),
} }
for flat in flats_list: for flat in flats_list:
@ -283,14 +307,11 @@ def guess_stations(flats_list, constraint, config):
if not flat_station: if not flat_station:
# Skip everything if empty station # Skip everything if empty station
LOGGER.info( LOGGER.info("No stations field for flat %s, skipping stations lookup.", flat["id"])
"No stations field for flat %s, skipping stations lookup.",
flat["id"]
)
continue continue
# Weboob modules can return several stations in a comma-separated list. # Weboob modules can return several stations in a comma-separated list.
flat_stations = flat_station.split(',') flat_stations = flat_station.split(",")
# But some stations containing a comma exist, so let's add the initial # But some stations containing a comma exist, so let's add the initial
# value to the list of stations to check if there was one. # value to the list of stations to check if there was one.
if len(flat_stations) > 1: if len(flat_stations) > 1:
@ -302,7 +323,7 @@ def guess_stations(flats_list, constraint, config):
tentative_station, tentative_station,
[x.name for x in opendata["stations"]], [x.name for x in opendata["stations"]],
limit=10, limit=10,
threshold=50 threshold=50,
) )
# Keep only one occurrence of each station # Keep only one occurrence of each station
@ -315,54 +336,43 @@ def guess_stations(flats_list, constraint, config):
if postal_code: if postal_code:
# If there is a postal code, check that the matched station is # If there is a postal code, check that the matched station is
# closed to it # closed to it
postal_code_gps = next( postal_code_gps = next((x.lat, x.lng) for x in opendata["postal_codes"] if x.postal_code == postal_code)
(x.lat, x.lng)
for x in opendata["postal_codes"]
if x.postal_code == postal_code
)
for station in matched_stations: for station in matched_stations:
# Note that multiple stations with the same name exist in a # Note that multiple stations with the same name exist in a
# city, hence the list of stations objects for a given matching # city, hence the list of stations objects for a given matching
# station name. # station name.
stations_objects = [ stations_objects = [x for x in opendata["stations"] if x.name == station[0]]
x for x in opendata["stations"] if x.name == station[0]
]
for station_data in stations_objects: for station_data in stations_objects:
distance = tools.distance( distance = tools.distance((station_data.lat, station_data.lng), postal_code_gps)
(station_data.lat, station_data.lng),
postal_code_gps
)
if distance < distance_threshold: if distance < distance_threshold:
# If at least one of the coordinates for a given # If at least one of the coordinates for a given
# station is close enough, that's ok and we can add # station is close enough, that's ok and we can add
# the station # the station
good_matched_stations.append({ good_matched_stations.append(
"key": station[0], {
"name": station_data.name, "key": station[0],
"confidence": station[1], "name": station_data.name,
"gps": (station_data.lat, station_data.lng) "confidence": station[1],
}) "gps": (station_data.lat, station_data.lng),
}
)
break break
LOGGER.info( LOGGER.info(
("Station %s is too far from flat %s (%dm > %dm), " ("Station %s is too far from flat %s (%dm > %dm), discarding this station."),
"discarding this station."),
station[0], station[0],
flat["id"], flat["id"],
int(distance), int(distance),
int(distance_threshold) int(distance_threshold),
) )
else: else:
LOGGER.info( LOGGER.info("No postal code for flat %s, skipping stations detection.", flat["id"])
"No postal code for flat %s, skipping stations detection.",
flat["id"]
)
if not good_matched_stations: if not good_matched_stations:
# No stations found, log it and cotninue with next housing # No stations found, log it and cotninue with next housing
LOGGER.info( LOGGER.info(
"No stations found for flat %s, matching %s.", "No stations found for flat %s, matching %s.",
flat["id"], flat["id"],
flat["station"] flat["station"],
) )
continue continue
@ -370,29 +380,20 @@ def guess_stations(flats_list, constraint, config):
"Found stations for flat %s: %s (matching %s).", "Found stations for flat %s: %s (matching %s).",
flat["id"], flat["id"],
", ".join(x["name"] for x in good_matched_stations), ", ".join(x["name"] for x in good_matched_stations),
flat["station"] flat["station"],
) )
# If some stations were already filled in and the result is different, # If some stations were already filled in and the result is different,
# display some warning to the user # display some warning to the user
if ( if "matched_stations" in flat["flatisfy"] and (
"matched_stations" in flat["flatisfy"] and # Do a set comparison, as ordering is not important
( set([station["name"] for station in flat["flatisfy"]["matched_stations"]])
# Do a set comparison, as ordering is not important != set([station["name"] for station in good_matched_stations])
set([
station["name"]
for station in flat["flatisfy"]["matched_stations"]
]) !=
set([
station["name"]
for station in good_matched_stations
])
)
): ):
LOGGER.warning( LOGGER.warning(
"Replacing previously fetched stations for flat %s. Found " "Replacing previously fetched stations for flat %s. Found "
"stations differ from the previously found ones.", "stations differ from the previously found ones.",
flat["id"] flat["id"],
) )
flat["flatisfy"]["matched_stations"] = good_matched_stations flat["flatisfy"]["matched_stations"] = good_matched_stations
@ -417,9 +418,8 @@ def compute_travel_times(flats_list, constraint, config):
if not flat["flatisfy"].get("matched_stations", []): if not flat["flatisfy"].get("matched_stations", []):
# Skip any flat without matched stations # Skip any flat without matched stations
LOGGER.info( LOGGER.info(
"Skipping travel time computation for flat %s. No matched " "Skipping travel time computation for flat %s. No matched stations.",
"stations.", flat["id"],
flat["id"]
) )
continue continue
@ -435,15 +435,10 @@ def compute_travel_times(flats_list, constraint, config):
for station in flat["flatisfy"]["matched_stations"]: for station in flat["flatisfy"]["matched_stations"]:
# Time from station is a dict with time and route # Time from station is a dict with time and route
time_from_station_dict = tools.get_travel_time_between( time_from_station_dict = tools.get_travel_time_between(
station["gps"], station["gps"], place["gps"], TimeToModes[mode], config
place["gps"],
TimeToModes[mode],
config
) )
if ( if time_from_station_dict and (
time_from_station_dict and time_from_station_dict["time"] < time_to_place_dict or time_to_place_dict is None
(time_from_station_dict["time"] < time_to_place_dict or
time_to_place_dict is None)
): ):
# If starting from this station makes the route to the # If starting from this station makes the route to the
# specified place shorter, update # specified place shorter, update
@ -452,7 +447,10 @@ def compute_travel_times(flats_list, constraint, config):
if time_to_place_dict: if time_to_place_dict:
LOGGER.info( LOGGER.info(
"Travel time between %s and flat %s by %s is %ds.", "Travel time between %s and flat %s by %s is %ds.",
place_name, flat["id"], mode, time_to_place_dict["time"] place_name,
flat["id"],
mode,
time_to_place_dict["time"],
) )
flat["flatisfy"]["time_to"][place_name] = time_to_place_dict flat["flatisfy"]["time_to"][place_name] = time_to_place_dict
return flats_list return flats_list

View File

@ -11,7 +11,15 @@ import enum
import arrow import arrow
from sqlalchemy import ( from sqlalchemy import (
Boolean, Column, DateTime, Enum, Float, SmallInteger, String, Text, inspect Boolean,
Column,
DateTime,
Enum,
Float,
SmallInteger,
String,
Text,
inspect,
) )
from sqlalchemy.orm import validates from sqlalchemy.orm import validates
@ -26,6 +34,7 @@ class FlatUtilities(enum.Enum):
""" """
An enum of the possible utilities status for a flat entry. An enum of the possible utilities status for a flat entry.
""" """
included = 10 included = 10
unknown = 0 unknown = 0
excluded = -10 excluded = -10
@ -35,6 +44,7 @@ class FlatStatus(enum.Enum):
""" """
An enum of the possible status for a flat entry. An enum of the possible status for a flat entry.
""" """
user_deleted = -100 user_deleted = -100
duplicate = -20 duplicate = -20
ignored = -10 ignored = -10
@ -47,21 +57,16 @@ class FlatStatus(enum.Enum):
# List of statuses that are automatically handled, and which the user cannot # List of statuses that are automatically handled, and which the user cannot
# manually set through the UI. # manually set through the UI.
AUTOMATED_STATUSES = [ AUTOMATED_STATUSES = [FlatStatus.new, FlatStatus.duplicate, FlatStatus.ignored]
FlatStatus.new,
FlatStatus.duplicate,
FlatStatus.ignored
]
class Flat(BASE): class Flat(BASE):
""" """
SQLAlchemy ORM model to store a flat. SQLAlchemy ORM model to store a flat.
""" """
__tablename__ = "flats" __tablename__ = "flats"
__searchable__ = [ __searchable__ = ["title", "text", "station", "location", "details", "notes"]
"title", "text", "station", "location", "details", "notes"
]
# Weboob data # Weboob data
id = Column(String, primary_key=True) id = Column(String, primary_key=True)
@ -91,6 +96,7 @@ class Flat(BASE):
flatisfy_postal_code = Column(String) flatisfy_postal_code = Column(String)
flatisfy_time_to = Column(MagicJSON) flatisfy_time_to = Column(MagicJSON)
flatisfy_constraint = Column(String) flatisfy_constraint = Column(String)
flatisfy_position = Column(MagicJSON)
# Status # Status
status = Column(Enum(FlatStatus), default=FlatStatus.new) status = Column(Enum(FlatStatus), default=FlatStatus.new)
@ -98,7 +104,7 @@ class Flat(BASE):
# Date for visit # Date for visit
visit_date = Column(DateTime) visit_date = Column(DateTime)
@validates('utilities') @validates("utilities")
def validate_utilities(self, _, utilities): def validate_utilities(self, _, utilities):
""" """
Utilities validation method Utilities validation method
@ -123,8 +129,7 @@ class Flat(BASE):
try: try:
return getattr(FlatStatus, status) return getattr(FlatStatus, status)
except (AttributeError, TypeError): except (AttributeError, TypeError):
LOGGER.warn("Unkown flat status %s, ignoring it.", LOGGER.warn("Unkown flat status %s, ignoring it.", status)
status)
return self.status.default.arg return self.status.default.arg
@validates("notation") @validates("notation")
@ -136,7 +141,7 @@ class Flat(BASE):
notation = int(notation) notation = int(notation)
assert notation >= 0 and notation <= 5 assert notation >= 0 and notation <= 5
except (ValueError, AssertionError): except (ValueError, AssertionError):
raise ValueError('notation should be an integer between 0 and 5') raise ValueError("notation should be an integer between 0 and 5")
return notation return notation
@validates("date") @validates("date")
@ -144,14 +149,18 @@ class Flat(BASE):
""" """
Date validation method Date validation method
""" """
return arrow.get(date).naive if date:
return arrow.get(date).naive
return None
@validates("visit_date") @validates("visit_date")
def validate_visit_date(self, _, visit_date): def validate_visit_date(self, _, visit_date):
""" """
Visit date validation method Visit date validation method
""" """
return arrow.get(visit_date).naive if visit_date:
return arrow.get(visit_date).naive
return None
@validates("photos") @validates("photos")
def validate_photos(self, _, photos): def validate_photos(self, _, photos):
@ -177,22 +186,14 @@ class Flat(BASE):
# Handle flatisfy metadata # Handle flatisfy metadata
flat_dict = flat_dict.copy() flat_dict = flat_dict.copy()
if "flatisfy" in flat_dict: if "flatisfy" in flat_dict:
flat_dict["flatisfy_stations"] = ( flat_dict["flatisfy_stations"] = flat_dict["flatisfy"].get("matched_stations", [])
flat_dict["flatisfy"].get("matched_stations", []) flat_dict["flatisfy_postal_code"] = flat_dict["flatisfy"].get("postal_code", None)
) flat_dict["flatisfy_position"] = flat_dict["flatisfy"].get("position", None)
flat_dict["flatisfy_postal_code"] = ( flat_dict["flatisfy_time_to"] = flat_dict["flatisfy"].get("time_to", {})
flat_dict["flatisfy"].get("postal_code", None) flat_dict["flatisfy_constraint"] = flat_dict["flatisfy"].get("constraint", "default")
)
flat_dict["flatisfy_time_to"] = (
flat_dict["flatisfy"].get("time_to", {})
)
flat_dict["flatisfy_constraint"] = (
flat_dict["flatisfy"].get("constraint", "default")
)
del flat_dict["flatisfy"] del flat_dict["flatisfy"]
flat_dict = {k: v for k, v in flat_dict.items() flat_dict = {k: v for k, v in flat_dict.items() if k in inspect(Flat).columns.keys()}
if k in inspect(Flat).columns.keys()}
return Flat(**flat_dict) return Flat(**flat_dict)
def __repr__(self): def __repr__(self):
@ -203,11 +204,7 @@ class Flat(BASE):
Return a dict representation of this flat object that is JSON Return a dict representation of this flat object that is JSON
serializable. serializable.
""" """
flat_repr = { flat_repr = {k: v for k, v in self.__dict__.items() if not k.startswith("_")}
k: v
for k, v in self.__dict__.items()
if not k.startswith("_")
}
if isinstance(flat_repr["status"], FlatStatus): if isinstance(flat_repr["status"], FlatStatus):
flat_repr["status"] = flat_repr["status"].name flat_repr["status"] = flat_repr["status"].name
if isinstance(flat_repr["utilities"], FlatUtilities): if isinstance(flat_repr["utilities"], FlatUtilities):

View File

@ -7,9 +7,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import logging import logging
from sqlalchemy import ( from sqlalchemy import Column, Float, Integer, String, UniqueConstraint
Column, Float, Integer, String, UniqueConstraint
)
from flatisfy.database.base import BASE from flatisfy.database.base import BASE
@ -21,6 +19,7 @@ class PostalCode(BASE):
""" """
SQLAlchemy ORM model to store a postal code opendata. SQLAlchemy ORM model to store a postal code opendata.
""" """
__tablename__ = "postal_codes" __tablename__ = "postal_codes"
id = Column(Integer, primary_key=True) id = Column(Integer, primary_key=True)
@ -28,6 +27,7 @@ class PostalCode(BASE):
# following ISO 3166-2. # following ISO 3166-2.
area = Column(String, index=True) area = Column(String, index=True)
postal_code = Column(String, index=True) postal_code = Column(String, index=True)
insee_code = Column(String, index=True)
name = Column(String, index=True) name = Column(String, index=True)
lat = Column(Float) lat = Column(Float)
lng = Column(Float) lng = Column(Float)
@ -41,8 +41,4 @@ class PostalCode(BASE):
Return a dict representation of this postal code object that is JSON Return a dict representation of this postal code object that is JSON
serializable. serializable.
""" """
return { return {k: v for k, v in self.__dict__.items() if not k.startswith("_")}
k: v
for k, v in self.__dict__.items()
if not k.startswith("_")
}

View File

@ -7,9 +7,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import logging import logging
from sqlalchemy import ( from sqlalchemy import Column, Float, Integer, String
Column, Float, Integer, String
)
from flatisfy.database.base import BASE from flatisfy.database.base import BASE
@ -21,6 +19,7 @@ class PublicTransport(BASE):
""" """
SQLAlchemy ORM model to store public transport opendata. SQLAlchemy ORM model to store public transport opendata.
""" """
__tablename__ = "public_transports" __tablename__ = "public_transports"
id = Column(Integer, primary_key=True) id = Column(Integer, primary_key=True)

View File

@ -30,6 +30,7 @@ class LocalImageCache(ImageCache):
""" """
A local cache for images, stored in memory. A local cache for images, stored in memory.
""" """
@staticmethod @staticmethod
def on_miss(path): def on_miss(path):
""" """
@ -46,48 +47,34 @@ class TestTexts(unittest.TestCase):
""" """
Checks string normalizations. Checks string normalizations.
""" """
def test_roman_numbers(self): def test_roman_numbers(self):
""" """
Checks roman numbers replacement. Checks roman numbers replacement.
""" """
self.assertEqual( self.assertEqual("XIV", tools.convert_arabic_to_roman("14"))
"XIV",
tools.convert_arabic_to_roman("14")
)
self.assertEqual( self.assertEqual("XXXIX", tools.convert_arabic_to_roman("39"))
"XXXIX",
tools.convert_arabic_to_roman("39")
)
self.assertEqual( self.assertEqual("40", tools.convert_arabic_to_roman("40"))
"40",
tools.convert_arabic_to_roman("40")
)
self.assertEqual( self.assertEqual("1987", tools.convert_arabic_to_roman("1987"))
"1987",
tools.convert_arabic_to_roman("1987")
)
self.assertEqual( self.assertEqual(
"Dans le XVe arrondissement", "Dans le XVe arrondissement",
tools.convert_arabic_to_roman_in_text("Dans le 15e arrondissement") tools.convert_arabic_to_roman_in_text("Dans le 15e arrondissement"),
) )
self.assertEqual( self.assertEqual("XXeme arr.", tools.convert_arabic_to_roman_in_text("20eme arr."))
"XXeme arr.",
tools.convert_arabic_to_roman_in_text("20eme arr.")
)
self.assertEqual( self.assertEqual(
"A AIX EN PROVENCE", "A AIX EN PROVENCE",
tools.convert_arabic_to_roman_in_text("A AIX EN PROVENCE") tools.convert_arabic_to_roman_in_text("A AIX EN PROVENCE"),
) )
self.assertEqual( self.assertEqual(
"Montigny Le Bretonneux", "Montigny Le Bretonneux",
tools.convert_arabic_to_roman_in_text("Montigny Le Bretonneux") tools.convert_arabic_to_roman_in_text("Montigny Le Bretonneux"),
) )
def test_roman_numbers_in_text(self): def test_roman_numbers_in_text(self):
@ -97,77 +84,54 @@ class TestTexts(unittest.TestCase):
""" """
self.assertEqual( self.assertEqual(
"dans le XVe arrondissement", "dans le XVe arrondissement",
tools.normalize_string("Dans le 15e arrondissement") tools.normalize_string("Dans le 15e arrondissement"),
) )
self.assertEqual( self.assertEqual("paris XVe, 75005", tools.normalize_string("Paris 15e, 75005"))
"paris XVe, 75005",
tools.normalize_string("Paris 15e, 75005")
)
self.assertEqual( self.assertEqual("paris xve, 75005", tools.normalize_string("Paris XVe, 75005"))
"paris xve, 75005",
tools.normalize_string("Paris XVe, 75005")
)
def test_multiple_whitespaces(self): def test_multiple_whitespaces(self):
""" """
Checks whitespaces are collapsed. Checks whitespaces are collapsed.
""" """
self.assertEqual( self.assertEqual("avec ascenseur", tools.normalize_string("avec ascenseur"))
"avec ascenseur",
tools.normalize_string("avec ascenseur")
)
def test_whitespace_trim(self): def test_whitespace_trim(self):
""" """
Checks that trailing and beginning whitespaces are trimmed. Checks that trailing and beginning whitespaces are trimmed.
""" """
self.assertEqual( self.assertEqual("rennes 35000", tools.normalize_string(" Rennes 35000 "))
"rennes 35000",
tools.normalize_string(" Rennes 35000 ")
)
def test_accents(self): def test_accents(self):
""" """
Checks accents are replaced. Checks accents are replaced.
""" """
self.assertEqual( self.assertEqual("eeeaui", tools.normalize_string(u"éèêàüï"))
"eeeaui",
tools.normalize_string(u"éèêàüï")
)
class TestPhoneNumbers(unittest.TestCase): class TestPhoneNumbers(unittest.TestCase):
""" """
Checks phone numbers normalizations. Checks phone numbers normalizations.
""" """
def test_prefix(self): def test_prefix(self):
""" """
Checks phone numbers with international prefixes. Checks phone numbers with international prefixes.
""" """
self.assertEqual( self.assertEqual("0605040302", duplicates.homogeneize_phone_number("+33605040302"))
"0605040302",
duplicates.homogeneize_phone_number("+33605040302")
)
def test_dots_separators(self): def test_dots_separators(self):
""" """
Checks phone numbers with dots. Checks phone numbers with dots.
""" """
self.assertEqual( self.assertEqual("0605040302", duplicates.homogeneize_phone_number("06.05.04.03.02"))
"0605040302",
duplicates.homogeneize_phone_number("06.05.04.03.02")
)
def test_spaces_separators(self): def test_spaces_separators(self):
""" """
Checks phone numbers with spaces. Checks phone numbers with spaces.
""" """
self.assertEqual( self.assertEqual("0605040302", duplicates.homogeneize_phone_number("06 05 04 03 02"))
"0605040302",
duplicates.homogeneize_phone_number("06 05 04 03 02")
)
class TestPhotos(unittest.TestCase): class TestPhotos(unittest.TestCase):
@ -183,96 +147,104 @@ class TestPhotos(unittest.TestCase):
""" """
Compares a photo against itself. Compares a photo against itself.
""" """
photo = { photo = {"url": TESTS_DATA_DIR + "127028739@seloger.jpg"}
"url": TESTS_DATA_DIR + "127028739@seloger.jpg"
}
self.assertTrue(duplicates.compare_photos( self.assertTrue(duplicates.compare_photos(photo, photo, self.IMAGE_CACHE, self.HASH_THRESHOLD))
photo,
photo,
self.IMAGE_CACHE,
self.HASH_THRESHOLD
))
def test_different_photos(self): def test_different_photos(self):
""" """
Compares two different photos. Compares two different photos.
""" """
self.assertFalse(duplicates.compare_photos( self.assertFalse(
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"}, duplicates.compare_photos(
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"}, {"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
self.IMAGE_CACHE, {"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
self.HASH_THRESHOLD self.IMAGE_CACHE,
)) self.HASH_THRESHOLD,
)
)
self.assertFalse(duplicates.compare_photos( self.assertFalse(
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"}, duplicates.compare_photos(
{"url": TESTS_DATA_DIR + "127028739-3@seloger.jpg"}, {"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
self.IMAGE_CACHE, {"url": TESTS_DATA_DIR + "127028739-3@seloger.jpg"},
self.HASH_THRESHOLD self.IMAGE_CACHE,
)) self.HASH_THRESHOLD,
)
)
def test_matching_photos(self): def test_matching_photos(self):
""" """
Compares two matching photos with different size and source. Compares two matching photos with different size and source.
""" """
self.assertTrue(duplicates.compare_photos( self.assertTrue(
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"}, duplicates.compare_photos(
{"url": TESTS_DATA_DIR + "14428129@explorimmo.jpg"}, {"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
self.IMAGE_CACHE, {"url": TESTS_DATA_DIR + "14428129@explorimmo.jpg"},
self.HASH_THRESHOLD self.IMAGE_CACHE,
)) self.HASH_THRESHOLD,
)
)
self.assertTrue(duplicates.compare_photos( self.assertTrue(
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"}, duplicates.compare_photos(
{"url": TESTS_DATA_DIR + "14428129-2@explorimmo.jpg"}, {"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
self.IMAGE_CACHE, {"url": TESTS_DATA_DIR + "14428129-2@explorimmo.jpg"},
self.HASH_THRESHOLD self.IMAGE_CACHE,
)) self.HASH_THRESHOLD,
)
)
self.assertTrue(duplicates.compare_photos( self.assertTrue(
{"url": TESTS_DATA_DIR + "127028739-3@seloger.jpg"}, duplicates.compare_photos(
{"url": TESTS_DATA_DIR + "14428129-3@explorimmo.jpg"}, {"url": TESTS_DATA_DIR + "127028739-3@seloger.jpg"},
self.IMAGE_CACHE, {"url": TESTS_DATA_DIR + "14428129-3@explorimmo.jpg"},
self.HASH_THRESHOLD self.IMAGE_CACHE,
)) self.HASH_THRESHOLD,
)
)
self.assertTrue(duplicates.compare_photos( self.assertTrue(
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"}, duplicates.compare_photos(
{"url": TESTS_DATA_DIR + "127028739-watermark@seloger.jpg"}, {"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
self.IMAGE_CACHE, {"url": TESTS_DATA_DIR + "127028739-watermark@seloger.jpg"},
self.HASH_THRESHOLD self.IMAGE_CACHE,
)) self.HASH_THRESHOLD,
)
)
def test_matching_cropped_photos(self): def test_matching_cropped_photos(self):
""" """
Compares two matching photos with one being cropped. Compares two matching photos with one being cropped.
""" """
# Fixme: the image hash treshold should be 10 ideally # Fixme: the image hash treshold should be 10 ideally
self.assertTrue(duplicates.compare_photos( self.assertTrue(
{"url": TESTS_DATA_DIR + "vertical.jpg"}, duplicates.compare_photos(
{"url": TESTS_DATA_DIR + "vertical-cropped.jpg"}, {"url": TESTS_DATA_DIR + "vertical.jpg"},
self.IMAGE_CACHE, {"url": TESTS_DATA_DIR + "vertical-cropped.jpg"},
20 self.IMAGE_CACHE,
)) 20,
)
)
# Fixme: the image hash treshold should be 10 ideally # Fixme: the image hash treshold should be 10 ideally
self.assertTrue(duplicates.compare_photos( self.assertTrue(
{"url": TESTS_DATA_DIR + "13783671@explorimmo.jpg"}, duplicates.compare_photos(
{"url": TESTS_DATA_DIR + "124910113@seloger.jpg"}, {"url": TESTS_DATA_DIR + "13783671@explorimmo.jpg"},
self.IMAGE_CACHE, {"url": TESTS_DATA_DIR + "124910113@seloger.jpg"},
20 self.IMAGE_CACHE,
)) 20,
)
)
class TestImageCache(unittest.TestCase): class TestImageCache(unittest.TestCase):
""" """
Checks image cache is working as expected. Checks image cache is working as expected.
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self.IMAGE_CACHE = ImageCache( # pylint: disable=invalid-name self.IMAGE_CACHE = ImageCache(storage_dir=tempfile.mkdtemp(prefix="flatisfy-")) # pylint: disable=invalid-name
storage_dir=tempfile.mkdtemp(prefix="flatisfy-")
)
super(TestImageCache, self).__init__(*args, **kwargs) super(TestImageCache, self).__init__(*args, **kwargs)
def test_invalid_url(self): def test_invalid_url(self):
@ -280,27 +252,22 @@ class TestImageCache(unittest.TestCase):
Check that it returns nothing on an invalid URL. Check that it returns nothing on an invalid URL.
""" """
# See https://framagit.org/phyks/Flatisfy/issues/116. # See https://framagit.org/phyks/Flatisfy/issues/116.
self.assertIsNone( self.assertIsNone(self.IMAGE_CACHE.get("https://httpbin.org/status/404"))
self.IMAGE_CACHE.get("https://httpbin.org/status/404") self.assertIsNone(self.IMAGE_CACHE.get("https://httpbin.org/status/500"))
)
self.assertIsNone(
self.IMAGE_CACHE.get("https://httpbin.org/status/500")
)
def test_invalid_data(self): def test_invalid_data(self):
""" """
Check that it returns nothing on an invalid data. Check that it returns nothing on an invalid data.
""" """
# See https://framagit.org/phyks/Flatisfy/issues/116. # See https://framagit.org/phyks/Flatisfy/issues/116.
self.assertIsNone( self.assertIsNone(self.IMAGE_CACHE.get("https://httpbin.org/"))
self.IMAGE_CACHE.get("https://httpbin.org/")
)
class TestDuplicates(unittest.TestCase): class TestDuplicates(unittest.TestCase):
""" """
Checks duplicates detection. Checks duplicates detection.
""" """
DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS = 8 # pylint: disable=invalid-name DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS = 8 # pylint: disable=invalid-name
DUPLICATES_MIN_SCORE_WITH_PHOTOS = 15 # pylint: disable=invalid-name DUPLICATES_MIN_SCORE_WITH_PHOTOS = 15 # pylint: disable=invalid-name
HASH_THRESHOLD = 10 # pylint: disable=invalid-name HASH_THRESHOLD = 10 # pylint: disable=invalid-name
@ -316,9 +283,7 @@ class TestDuplicates(unittest.TestCase):
""" """
Generates a fake flat post. Generates a fake flat post.
""" """
backend = BACKENDS_BY_PRECEDENCE[ backend = BACKENDS_BY_PRECEDENCE[random.randint(0, len(BACKENDS_BY_PRECEDENCE) - 1)]
random.randint(0, len(BACKENDS_BY_PRECEDENCE) - 1)
]
return { return {
"id": str(random.randint(100000, 199999)) + "@" + backend, "id": str(random.randint(100000, 199999)) + "@" + backend,
"phone": "0607080910", "phone": "0607080910",
@ -326,7 +291,7 @@ class TestDuplicates(unittest.TestCase):
"utilities": "", "utilities": "",
"area": random.randint(200, 1500) / 10, "area": random.randint(200, 1500) / 10,
"cost": random.randint(100000, 300000), "cost": random.randint(100000, 300000),
"bedrooms": random.randint(1, 4) "bedrooms": random.randint(1, 4),
} }
@staticmethod @staticmethod
@ -350,10 +315,7 @@ class TestDuplicates(unittest.TestCase):
""" """
flat1 = self.generate_fake_flat() flat1 = self.generate_fake_flat()
flat2 = copy.deepcopy(flat1) flat2 = copy.deepcopy(flat1)
score = duplicates.get_duplicate_score( score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
flat1, flat2,
self.IMAGE_CACHE, self.HASH_THRESHOLD
)
self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS) self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
def test_different_prices(self): def test_different_prices(self):
@ -364,10 +326,7 @@ class TestDuplicates(unittest.TestCase):
flat2 = copy.deepcopy(flat1) flat2 = copy.deepcopy(flat1)
flat2["cost"] += 1000 flat2["cost"] += 1000
score = duplicates.get_duplicate_score( score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
flat1, flat2,
self.IMAGE_CACHE, self.HASH_THRESHOLD
)
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS) self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
def test_different_rooms(self): def test_different_rooms(self):
@ -379,10 +338,7 @@ class TestDuplicates(unittest.TestCase):
flat2 = copy.deepcopy(flat1) flat2 = copy.deepcopy(flat1)
flat2["rooms"] += 1 flat2["rooms"] += 1
score = duplicates.get_duplicate_score( score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
flat1, flat2,
self.IMAGE_CACHE, self.HASH_THRESHOLD
)
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS) self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
def test_different_areas(self): def test_different_areas(self):
@ -393,10 +349,7 @@ class TestDuplicates(unittest.TestCase):
flat2 = copy.deepcopy(flat1) flat2 = copy.deepcopy(flat1)
flat2["area"] += 10 flat2["area"] += 10
score = duplicates.get_duplicate_score( score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
flat1, flat2,
self.IMAGE_CACHE, self.HASH_THRESHOLD
)
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS) self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
def test_different_areas_decimals(self): def test_different_areas_decimals(self):
@ -409,10 +362,7 @@ class TestDuplicates(unittest.TestCase):
flat1["area"] = 50.65 flat1["area"] = 50.65
flat2["area"] = 50.37 flat2["area"] = 50.37
score = duplicates.get_duplicate_score( score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
flat1, flat2,
self.IMAGE_CACHE, self.HASH_THRESHOLD
)
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS) self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
def test_different_phones(self): def test_different_phones(self):
@ -424,10 +374,7 @@ class TestDuplicates(unittest.TestCase):
flat2 = copy.deepcopy(flat1) flat2 = copy.deepcopy(flat1)
flat2["phone"] = "0708091011" flat2["phone"] = "0708091011"
score = duplicates.get_duplicate_score( score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
flat1, flat2,
self.IMAGE_CACHE, self.HASH_THRESHOLD
)
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS) self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
def test_real_duplicates(self): def test_real_duplicates(self):
@ -435,15 +382,9 @@ class TestDuplicates(unittest.TestCase):
Two flats with same price, area and rooms quantity should be detected Two flats with same price, area and rooms quantity should be detected
as duplicates. as duplicates.
""" """
flats = self.load_files( flats = self.load_files("127028739@seloger", "14428129@explorimmo")
"127028739@seloger",
"14428129@explorimmo"
)
score = duplicates.get_duplicate_score( score = duplicates.get_duplicate_score(flats[0], flats[1], self.IMAGE_CACHE, self.HASH_THRESHOLD)
flats[0], flats[1],
self.IMAGE_CACHE, self.HASH_THRESHOLD
)
self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITH_PHOTOS) self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITH_PHOTOS)
# TODO: fixme, find new testing examples # TODO: fixme, find new testing examples
@ -502,8 +443,13 @@ def run():
""" """
LOGGER.info("Running tests…") LOGGER.info("Running tests…")
try: try:
for testsuite in [TestTexts, TestPhoneNumbers, TestImageCache, for testsuite in [
TestDuplicates, TestPhotos]: TestTexts,
TestPhoneNumbers,
TestImageCache,
TestDuplicates,
TestPhotos,
]:
suite = unittest.TestLoader().loadTestsFromTestCase(testsuite) suite = unittest.TestLoader().loadTestsFromTestCase(testsuite)
result = unittest.TextTestRunner(verbosity=2).run(suite) result = unittest.TextTestRunner(verbosity=2).run(suite)
assert result.wasSuccessful() assert result.wasSuccessful()

View File

@ -3,9 +3,7 @@
This module contains basic utility functions, such as pretty printing of JSON This module contains basic utility functions, such as pretty printing of JSON
output, checking that a value is within a given interval etc. output, checking that a value is within a given interval etc.
""" """
from __future__ import ( from __future__ import absolute_import, division, print_function, unicode_literals
absolute_import, division, print_function, unicode_literals
)
import datetime import datetime
import itertools import itertools
@ -41,7 +39,7 @@ def next_weekday(d, weekday):
:returns: The datetime object for the next given weekday. :returns: The datetime object for the next given weekday.
""" """
days_ahead = weekday - d.weekday() days_ahead = weekday - d.weekday()
if days_ahead <= 0: # Target day already happened this week if days_ahead <= 0: # Target day already happened this week
days_ahead += 7 days_ahead += 7
return d + datetime.timedelta(days_ahead) return d + datetime.timedelta(days_ahead)
@ -61,8 +59,18 @@ def convert_arabic_to_roman(arabic):
return arabic return arabic
to_roman = { to_roman = {
1: 'I', 2: 'II', 3: 'III', 4: 'IV', 5: 'V', 6: 'VI', 7: 'VII', 1: "I",
8: 'VIII', 9: 'IX', 10: 'X', 20: 'XX', 30: 'XXX' 2: "II",
3: "III",
4: "IV",
5: "V",
6: "VI",
7: "VII",
8: "VIII",
9: "IX",
10: "X",
20: "XX",
30: "XXX",
} }
roman_chars_list = [] roman_chars_list = []
count = 1 count = 1
@ -71,7 +79,7 @@ def convert_arabic_to_roman(arabic):
if digit != 0: if digit != 0:
roman_chars_list.append(to_roman[digit * count]) roman_chars_list.append(to_roman[digit * count])
count *= 10 count *= 10
return ''.join(roman_chars_list[::-1]) return "".join(roman_chars_list[::-1])
def convert_arabic_to_roman_in_text(text): def convert_arabic_to_roman_in_text(text):
@ -82,11 +90,7 @@ def convert_arabic_to_roman_in_text(text):
:returns: The corresponding text with roman literals converted to :returns: The corresponding text with roman literals converted to
arabic. arabic.
""" """
return re.sub( return re.sub(r"(\d+)", lambda matchobj: convert_arabic_to_roman(matchobj.group(0)), text)
r'(\d+)',
lambda matchobj: convert_arabic_to_roman(matchobj.group(0)),
text
)
def hash_dict(func): def hash_dict(func):
@ -96,11 +100,13 @@ def hash_dict(func):
From https://stackoverflow.com/a/44776960. From https://stackoverflow.com/a/44776960.
""" """
class HDict(dict): class HDict(dict):
""" """
Transform mutable dictionnary into immutable. Useful to be compatible Transform mutable dictionnary into immutable. Useful to be compatible
with lru_cache with lru_cache
""" """
def __hash__(self): def __hash__(self):
return hash(json.dumps(self)) return hash(json.dumps(self))
@ -108,17 +114,10 @@ def hash_dict(func):
""" """
The wrapped function The wrapped function
""" """
args = tuple( args = tuple([HDict(arg) if isinstance(arg, dict) else arg for arg in args])
[ kwargs = {k: HDict(v) if isinstance(v, dict) else v for k, v in kwargs.items()}
HDict(arg) if isinstance(arg, dict) else arg
for arg in args
]
)
kwargs = {
k: HDict(v) if isinstance(v, dict) else v
for k, v in kwargs.items()
}
return func(*args, **kwargs) return func(*args, **kwargs)
return wrapped return wrapped
@ -126,6 +125,7 @@ class DateAwareJSONEncoder(json.JSONEncoder):
""" """
Extend the default JSON encoder to serialize datetimes to iso strings. Extend the default JSON encoder to serialize datetimes to iso strings.
""" """
def default(self, o): # pylint: disable=locally-disabled,E0202 def default(self, o): # pylint: disable=locally-disabled,E0202
if isinstance(o, (datetime.date, datetime.datetime)): if isinstance(o, (datetime.date, datetime.datetime)):
return o.isoformat() return o.isoformat()
@ -153,9 +153,7 @@ def pretty_json(data):
"toto": "ok" "toto": "ok"
} }
""" """
return json.dumps(data, cls=DateAwareJSONEncoder, return json.dumps(data, cls=DateAwareJSONEncoder, indent=4, separators=(",", ": "), sort_keys=True)
indent=4, separators=(',', ': '),
sort_keys=True)
def batch(iterable, size): def batch(iterable, size):
@ -294,10 +292,7 @@ def distance(gps1, gps2):
long2 = math.radians(gps2[1]) long2 = math.radians(gps2[1])
# pylint: disable=locally-disabled,invalid-name # pylint: disable=locally-disabled,invalid-name
a = ( a = math.sin((lat2 - lat1) / 2.0) ** 2 + math.cos(lat1) * math.cos(lat2) * math.sin((long2 - long1) / 2.0) ** 2
math.sin((lat2 - lat1) / 2.0)**2 +
math.cos(lat1) * math.cos(lat2) * math.sin((long2 - long1) / 2.0)**2
)
c = 2.0 * math.atan2(math.sqrt(a), math.sqrt(1 - a)) c = 2.0 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
earth_radius = 6371000 earth_radius = 6371000
@ -385,13 +380,14 @@ def get_travel_time_between(latlng_from, latlng_to, mode, config):
"from": "%s;%s" % (latlng_from[1], latlng_from[0]), "from": "%s;%s" % (latlng_from[1], latlng_from[0]),
"to": "%s;%s" % (latlng_to[1], latlng_to[0]), "to": "%s;%s" % (latlng_to[1], latlng_to[0]),
"datetime": date_from.isoformat(), "datetime": date_from.isoformat(),
"count": 1 "count": 1,
} }
try: try:
# Do the query to Navitia API # Do the query to Navitia API
req = requests.get( req = requests.get(
NAVITIA_ENDPOINT, params=payload, NAVITIA_ENDPOINT,
auth=(config["navitia_api_key"], "") params=payload,
auth=(config["navitia_api_key"], ""),
) )
req.raise_for_status() req.raise_for_status()
@ -400,28 +396,28 @@ def get_travel_time_between(latlng_from, latlng_to, mode, config):
for section in journeys["sections"]: for section in journeys["sections"]:
if section["type"] == "public_transport": if section["type"] == "public_transport":
# Public transport # Public transport
sections.append({ sections.append(
"geojson": section["geojson"], {
"color": ( "geojson": section["geojson"],
section["display_informations"].get("color", None) "color": (section["display_informations"].get("color", None)),
) }
}) )
elif section["type"] == "street_network": elif section["type"] == "street_network":
# Walking # Walking
sections.append({ sections.append({"geojson": section["geojson"], "color": None})
"geojson": section["geojson"],
"color": None
})
else: else:
# Skip anything else # Skip anything else
continue continue
except (requests.exceptions.RequestException, except (
ValueError, IndexError, KeyError) as exc: requests.exceptions.RequestException,
ValueError,
IndexError,
KeyError,
) as exc:
# Ignore any possible exception # Ignore any possible exception
LOGGER.warning( LOGGER.warning(
"An exception occurred during travel time lookup on " "An exception occurred during travel time lookup on Navitia: %s.",
"Navitia: %s.", str(exc),
str(exc)
) )
else: else:
LOGGER.warning( LOGGER.warning(
@ -430,50 +426,43 @@ def get_travel_time_between(latlng_from, latlng_to, mode, config):
) )
elif mode in [TimeToModes.WALK, TimeToModes.BIKE, TimeToModes.CAR]: elif mode in [TimeToModes.WALK, TimeToModes.BIKE, TimeToModes.CAR]:
MAPBOX_MODES = { MAPBOX_MODES = {
TimeToModes.WALK: 'mapbox/walking', TimeToModes.WALK: "mapbox/walking",
TimeToModes.BIKE: 'mapbox/cycling', TimeToModes.BIKE: "mapbox/cycling",
TimeToModes.CAR: 'mapbox/driving' TimeToModes.CAR: "mapbox/driving",
} }
# Check that Mapbox API key is available # Check that Mapbox API key is available
if config["mapbox_api_key"]: if config["mapbox_api_key"]:
try: try:
service = mapbox.Directions( service = mapbox.Directions(access_token=config["mapbox_api_key"])
access_token=config['mapbox_api_key']
)
origin = { origin = {
'type': 'Feature', "type": "Feature",
'properties': {'name': 'Start'}, "properties": {"name": "Start"},
'geometry': { "geometry": {
'type': 'Point', "type": "Point",
'coordinates': [latlng_from[1], latlng_from[0]]}} "coordinates": [latlng_from[1], latlng_from[0]],
},
}
destination = { destination = {
'type': 'Feature', "type": "Feature",
'properties': {'name': 'End'}, "properties": {"name": "End"},
'geometry': { "geometry": {
'type': 'Point', "type": "Point",
'coordinates': [latlng_to[1], latlng_to[0]]}} "coordinates": [latlng_to[1], latlng_to[0]],
response = service.directions( },
[origin, destination], MAPBOX_MODES[mode] }
) response = service.directions([origin, destination], MAPBOX_MODES[mode])
response.raise_for_status() response.raise_for_status()
route = response.geojson()['features'][0] route = response.geojson()["features"][0]
# Fix longitude/latitude inversion in geojson output # Fix longitude/latitude inversion in geojson output
geometry = route['geometry'] geometry = route["geometry"]
geometry['coordinates'] = [ geometry["coordinates"] = [(x[1], x[0]) for x in geometry["coordinates"]]
(x[1], x[0]) for x in geometry['coordinates'] sections = [{"geojson": geometry, "color": "000"}]
] travel_time = route["properties"]["duration"]
sections = [{ except (requests.exceptions.RequestException, IndexError, KeyError) as exc:
"geojson": geometry,
"color": "000"
}]
travel_time = route['properties']['duration']
except (requests.exceptions.RequestException,
IndexError, KeyError) as exc:
# Ignore any possible exception # Ignore any possible exception
LOGGER.warning( LOGGER.warning(
"An exception occurred during travel time lookup on " "An exception occurred during travel time lookup on Mapbox: %s.",
"Mapbox: %s.", str(exc),
str(exc)
) )
else: else:
LOGGER.warning( LOGGER.warning(
@ -482,10 +471,7 @@ def get_travel_time_between(latlng_from, latlng_to, mode, config):
) )
if travel_time: if travel_time:
return { return {"time": travel_time, "sections": sections}
"time": travel_time,
"sections": sections
}
return None return None
@ -493,6 +479,7 @@ def timeit(func):
""" """
A decorator that logs how much time was spent in the function. A decorator that logs how much time was spent in the function.
""" """
def wrapped(*args, **kwargs): def wrapped(*args, **kwargs):
""" """
The wrapped function The wrapped function
@ -502,4 +489,5 @@ def timeit(func):
runtime = time.time() - before runtime = time.time() - before
LOGGER.info("%s -- Execution took %s seconds.", func.__name__, runtime) LOGGER.info("%s -- Execution took %s seconds.", func.__name__, runtime)
return res return res
return wrapped return wrapped

View File

@ -2,9 +2,7 @@
""" """
This module contains the definition of the Bottle web app. This module contains the definition of the Bottle web app.
""" """
from __future__ import ( from __future__ import absolute_import, division, print_function, unicode_literals
absolute_import, division, print_function, unicode_literals
)
import functools import functools
import json import json
@ -25,14 +23,12 @@ class QuietWSGIRefServer(bottle.WSGIRefServer):
Quiet implementation of Bottle built-in WSGIRefServer, as `Canister` is Quiet implementation of Bottle built-in WSGIRefServer, as `Canister` is
handling the logging through standard Python logging. handling the logging through standard Python logging.
""" """
# pylint: disable=locally-disabled,too-few-public-methods # pylint: disable=locally-disabled,too-few-public-methods
quiet = True quiet = True
def run(self, app): def run(self, app):
app.log.info( app.log.info("Server is now up and ready! Listening on %s:%s." % (self.host, self.port))
'Server is now up and ready! Listening on %s:%s.' %
(self.host, self.port)
)
super(QuietWSGIRefServer, self).run(app) super(QuietWSGIRefServer, self).run(app)
@ -42,12 +38,10 @@ def _serve_static_file(filename):
""" """
return bottle.static_file( return bottle.static_file(
filename, filename,
root=os.path.join( root=os.path.join(os.path.dirname(os.path.realpath(__file__)), "static"),
os.path.dirname(os.path.realpath(__file__)),
"static"
)
) )
def get_app(config): def get_app(config):
""" """
Get a Bottle app instance with all the routes set-up. Get a Bottle app instance with all the routes set-up.
@ -65,77 +59,69 @@ def get_app(config):
app.install(canister.Canister()) app.install(canister.Canister())
# Use DateAwareJSONEncoder to dump JSON strings # Use DateAwareJSONEncoder to dump JSON strings
# From http://stackoverflow.com/questions/21282040/bottle-framework-how-to-return-datetime-in-json-response#comment55718456_21282666. pylint: disable=locally-disabled,line-too-long # From http://stackoverflow.com/questions/21282040/bottle-framework-how-to-return-datetime-in-json-response#comment55718456_21282666. pylint: disable=locally-disabled,line-too-long
app.install( app.install(bottle.JSONPlugin(json_dumps=functools.partial(json.dumps, cls=DateAwareJSONEncoder)))
bottle.JSONPlugin(
json_dumps=functools.partial(json.dumps, cls=DateAwareJSONEncoder)
)
)
# Enable CORS # Enable CORS
@app.hook('after_request') @app.hook("after_request")
def enable_cors(): def enable_cors():
""" """
Add CORS headers at each request. Add CORS headers at each request.
""" """
# The str() call is required as we import unicode_literal and WSGI # The str() call is required as we import unicode_literal and WSGI
# headers list should have plain str type. # headers list should have plain str type.
bottle.response.headers[str('Access-Control-Allow-Origin')] = str('*') bottle.response.headers[str("Access-Control-Allow-Origin")] = str("*")
bottle.response.headers[str('Access-Control-Allow-Methods')] = str( bottle.response.headers[str("Access-Control-Allow-Methods")] = str("PUT, GET, POST, DELETE, OPTIONS, PATCH")
'PUT, GET, POST, DELETE, OPTIONS, PATCH' bottle.response.headers[str("Access-Control-Allow-Headers")] = str(
) "Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token"
bottle.response.headers[str('Access-Control-Allow-Headers')] = str(
'Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token'
) )
# API v1 routes # API v1 routes
app.route("/api/v1", ["GET", "OPTIONS"], api_routes.index_v1) app.route("/api/v1", ["GET", "OPTIONS"], api_routes.index_v1)
app.route("/api/v1/time_to_places", ["GET", "OPTIONS"], app.route("/api/v1/time_to_places", ["GET", "OPTIONS"], api_routes.time_to_places_v1)
api_routes.time_to_places_v1)
app.route("/api/v1/flats", ["GET", "OPTIONS"], api_routes.flats_v1) app.route("/api/v1/flats", ["GET", "OPTIONS"], api_routes.flats_v1)
app.route("/api/v1/flats/:flat_id", ["GET", "OPTIONS"], api_routes.flat_v1) app.route("/api/v1/flats/:flat_id", ["GET", "OPTIONS"], api_routes.flat_v1)
app.route("/api/v1/flats/:flat_id", ["PATCH", "OPTIONS"], app.route("/api/v1/flats/:flat_id", ["PATCH", "OPTIONS"], api_routes.update_flat_v1)
api_routes.update_flat_v1)
app.route("/api/v1/ics/visits.ics", ["GET", "OPTIONS"], app.route("/api/v1/ics/visits.ics", ["GET", "OPTIONS"], api_routes.ics_feed_v1)
api_routes.ics_feed_v1)
app.route("/api/v1/search", ["POST", "OPTIONS"], api_routes.search_v1) app.route("/api/v1/search", ["POST", "OPTIONS"], api_routes.search_v1)
app.route("/api/v1/opendata", ["GET", "OPTIONS"], api_routes.opendata_index_v1) app.route("/api/v1/opendata", ["GET", "OPTIONS"], api_routes.opendata_index_v1)
app.route("/api/v1/opendata/postal_codes", ["GET", "OPTIONS"], app.route(
api_routes.opendata_postal_codes_v1) "/api/v1/opendata/postal_codes",
["GET", "OPTIONS"],
api_routes.opendata_postal_codes_v1,
)
app.route("/api/v1/metadata", ["GET", "OPTIONS"], api_routes.metadata_v1) app.route("/api/v1/metadata", ["GET", "OPTIONS"], api_routes.metadata_v1)
app.route("/api/v1/import", ["GET", "OPTIONS"], api_routes.import_v1)
# Index # Index
app.route("/", "GET", lambda: _serve_static_file("index.html")) app.route("/", "GET", lambda: _serve_static_file("index.html"))
# Static files # Static files
app.route("/favicon.ico", "GET", app.route("/favicon.ico", "GET", lambda: _serve_static_file("favicon.ico"))
lambda: _serve_static_file("favicon.ico"))
app.route( app.route(
"/assets/<filename:path>", "GET", "/assets/<filename:path>",
lambda filename: _serve_static_file("/assets/{}".format(filename)) "GET",
lambda filename: _serve_static_file("/assets/{}".format(filename)),
) )
app.route( app.route(
"/img/<filename:path>", "GET", "/img/<filename:path>",
lambda filename: _serve_static_file("/img/{}".format(filename)) "GET",
lambda filename: _serve_static_file("/img/{}".format(filename)),
) )
app.route( app.route(
"/.well-known/<filename:path>", "GET", "/.well-known/<filename:path>",
lambda filename: _serve_static_file("/.well-known/{}".format(filename)) "GET",
lambda filename: _serve_static_file("/.well-known/{}".format(filename)),
) )
app.route( app.route(
"/data/img/<filename:path>", "GET", "/data/img/<filename:path>",
lambda filename: bottle.static_file( "GET",
filename, lambda filename: bottle.static_file(filename, root=os.path.join(config["data_directory"], "images")),
root=os.path.join(
config["data_directory"],
"images"
)
)
) )
return app return app

View File

@ -7,9 +7,7 @@ This module is heavily based on code from
[Bottle-SQLAlchemy](https://github.com/iurisilvio/bottle-sqlalchemy) which is [Bottle-SQLAlchemy](https://github.com/iurisilvio/bottle-sqlalchemy) which is
licensed under MIT license. licensed under MIT license.
""" """
from __future__ import ( from __future__ import absolute_import, division, print_function, unicode_literals
absolute_import, division, print_function, unicode_literals
)
import functools import functools
import inspect import inspect
@ -22,7 +20,8 @@ class ConfigPlugin(object):
A Bottle plugin to automatically pass the config object to the routes A Bottle plugin to automatically pass the config object to the routes
specifying they need it. specifying they need it.
""" """
name = 'config'
name = "config"
api = 2 api = 2
KEYWORD = "config" KEYWORD = "config"
@ -41,9 +40,7 @@ class ConfigPlugin(object):
if not isinstance(other, ConfigPlugin): if not isinstance(other, ConfigPlugin):
continue continue
else: else:
raise bottle.PluginError( raise bottle.PluginError("Found another conflicting Config plugin.")
"Found another conflicting Config plugin."
)
def apply(self, callback, route): def apply(self, callback, route):
""" """

View File

@ -7,9 +7,7 @@ This module is heavily based on code from
[Bottle-SQLAlchemy](https://github.com/iurisilvio/bottle-sqlalchemy) which is [Bottle-SQLAlchemy](https://github.com/iurisilvio/bottle-sqlalchemy) which is
licensed under MIT license. licensed under MIT license.
""" """
from __future__ import ( from __future__ import absolute_import, division, print_function, unicode_literals
absolute_import, division, print_function, unicode_literals
)
import inspect import inspect
@ -21,7 +19,8 @@ class DatabasePlugin(object):
A Bottle plugin to automatically pass an SQLAlchemy database session object A Bottle plugin to automatically pass an SQLAlchemy database session object
to the routes specifying they need it. to the routes specifying they need it.
""" """
name = 'database'
name = "database"
api = 2 api = 2
KEYWORD = "db" KEYWORD = "db"
@ -41,9 +40,7 @@ class DatabasePlugin(object):
if not isinstance(other, DatabasePlugin): if not isinstance(other, DatabasePlugin):
continue continue
else: else:
raise bottle.PluginError( raise bottle.PluginError("Found another conflicting Database plugin.")
"Found another conflicting Database plugin."
)
def apply(self, callback, route): def apply(self, callback, route):
""" """
@ -64,6 +61,7 @@ class DatabasePlugin(object):
if self.KEYWORD not in callback_args: if self.KEYWORD not in callback_args:
# If no need for a db session, call the route callback # If no need for a db session, call the route callback
return callback return callback
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
""" """
Wrap the callback in a call to get_session. Wrap the callback in a call to get_session.
@ -72,6 +70,7 @@ class DatabasePlugin(object):
# Get a db session and pass it to the callback # Get a db session and pass it to the callback
kwargs[self.KEYWORD] = session kwargs[self.KEYWORD] = session
return callback(*args, **kwargs) return callback(*args, **kwargs)
return wrapper return wrapper

View File

@ -16,7 +16,7 @@
<style> <style>
body { body {
margin: 0 auto; margin: 0 auto;
max-width: 75em; /* max-width: 75em; */
font-family: "Helvetica", "Arial", sans-serif; font-family: "Helvetica", "Arial", sans-serif;
line-height: 1.5; line-height: 1.5;
padding: 4em 1em; padding: 4em 1em;

View File

@ -0,0 +1,398 @@
<template>
<div>
<template v-if="isLoading">
<p>{{ $t("common.loading") }}</p>
</template>
<div class="grid" v-else-if="flat && timeToPlaces">
<div class="left-panel">
<h2>
(<!--
--><router-link :to="{ name: 'status', params: { status: flat.status }}"><!--
-->{{ flat.status ? capitalize($t("status." + flat.status)) : '' }}<!--
--></router-link><!--
-->) {{ flat.title }} [{{ flat.id.split("@")[1] }}]
<span class="expired">{{ flat.is_expired ? '[' + $t('common.expired') + ']' : '' }}</span>
</h2>
<div class="grid">
<div class="left-panel">
<span>
{{ flat.cost | cost(flat.currency) }}
<template v-if="flat.utilities === 'included'">
{{ $t("flatsDetails.utilities_included") }}
</template>
<template v-else-if="flat.utilities === 'excluded'">
{{ $t("flatsDetails.utilities_excluded") }}
</template>
</span>
<span v-if="flat.flatisfy_postal_code.postal_code">
à {{ flat.flatisfy_postal_code.name }} ({{ flat.flatisfy_postal_code.postal_code }})
</span>
</div>
<span class="right-panel right">
<template v-if="flat.area"><span>{{flat.area}} m<sup>2</sup></span></template><template v-if="flat.rooms"><span>, {{flat.rooms}} {{ $tc("flatsDetails.rooms", flat.rooms) }}</span></template><template v-if="flat.bedrooms"><span>/ {{flat.bedrooms}} {{ $tc("flatsDetails.bedrooms", flat.bedrooms) }}</span></template>
</span>
</div>
<div>
<template v-if="flat.photos && flat.photos.length > 0">
<Slider :photos="flat.photos"></Slider>
</template>
</div>
<div>
<h3>{{ $t("flatsDetails.Description") }}</h3>
<p>{{ flat.text }}</p>
<p class="right">{{ flat.location }}</p>
<p>{{ $t("flatsDetails.First_posted") }} {{ flat.date ? flat.date.fromNow() : '?' }}.</p>
</div>
<div>
<h3>{{ $t("flatsDetails.Details") }}</h3>
<table>
<tr v-for="(value, key) in flat.details">
<th>{{ key }}</th>
<td>{{ value }}</td>
</tr>
</table>
</div>
<div>
<h3>{{ $t("flatsDetails.Metadata") }}</h3>
<table>
<tr>
<th>
{{ $t("flatsDetails.postal_code") }}
</th>
<td>
<template v-if="flat.flatisfy_postal_code.postal_code">
{{ flat.flatisfy_postal_code.name }} ({{ flat.flatisfy_postal_code.postal_code }})
</template>
<template v-else>
?
</template>
</td>
</tr>
<tr v-if="displayedStations">
<th>
{{ $t("flatsDetails.nearby_stations") }}
</th>
<td>
{{ displayedStations }}
</td>
</tr>
<tr v-if="Object.keys(flat.flatisfy_time_to).length">
<th>
{{ $t("flatsDetails.Times_to") }}
</th>
<td>
<ul class="time_to_list">
<li v-for="(time_to, place) in flat.flatisfy_time_to" :key="place">
{{ place }}: {{ humanizeTimeTo(time_to["time"]) }}
</li>
</ul>
</td>
</tr>
<tr>
<th>
{{ $t("flatsDetails.SquareMeterCost") }}
</th>
<td>
{{ flat.sqCost }} {{ flat.currency }}
</td>
</tr>
</table>
</div>
<div>
<h3>{{ $t("flatsDetails.Location") }}</h3>
<FlatsMap :flats="flatMarker" :places="timeToPlaces" :journeys="journeys"></FlatsMap>
</div>
<div>
<h3>{{ $t("flatsDetails.Notes") }}</h3>
<form v-on:submit="updateFlatNotes">
<textarea ref="notesTextarea" rows="10" :v-model="flat.notes"></textarea>
<p class="right"><input type="submit" :value="$t('flatsDetails.Save')"/></p>
</form>
</div>
</div>
<div class="right-panel">
<h3>{{ $t("flatsDetails.Contact") }}</h3>
<div class="contact">
<template v-if="flat.phone">
<p v-for="phoneNumber in flat.phone.split(',')">
<a :href="'tel:+33' + normalizePhoneNumber(phoneNumber)">{{ phoneNumber }}</a>
</p>
</template>
<template v-if="flat.urls.length == 1">
<a :href="flat.urls[0]" target="_blank">
{{ $tc("common.Original_post", 1) }}
<i class="fa fa-external-link" aria-hidden="true"></i>
</a>
</template>
<template v-else-if="flat.urls.length > 1">
<p>{{ $tc("common.Original_post", flat.urls.length) }}
<ul>
<li v-for="(url, index) in flat.urls">
<a :href="url" target="_blank">
{{ $tc("common.Original_post", 1) }} {{ index + 1 }}
<i class="fa fa-external-link" aria-hidden="true"></i>
</a>
</li>
</ul>
</p>
</template>
</div>
<h3>{{ $t("flatsDetails.Visit") }}</h3>
<div class="visit">
<flat-pickr
:value="flatpickrValue"
:config="flatpickrConfig"
:placeholder="$t('flatsDetails.setDateOfVisit')"
/>
</div>
<h3>{{ $t("common.Actions") }}</h3>
<nav>
<ul>
<template v-if="flat.status !== 'user_deleted'">
<Notation :flat="flat"></Notation>
<li>
<button v-on:click="updateFlatStatus('user_deleted')" class="fullButton">
<i class="fa fa-trash" aria-hidden="true"></i>
{{ $t("common.Remove") }}
</button>
</li>
</template>
<template v-else>
<li>
<button v-on:click="updateFlatStatus('new')" class="fullButton">
<i class="fa fa-undo" aria-hidden="true"></i>
{{ $t("common.Restore") }}
</button>
</li>
</template>
</ul>
</nav>
</div>
</div>
</div>
</template>
<script>
import flatPickr from 'vue-flatpickr-component'
import moment from 'moment'
import 'font-awesome-webpack'
import 'flatpickr/dist/flatpickr.css'
import FlatsMap from '../components/flatsmap.vue'
import Slider from '../components/slider.vue'
import Notation from '../components/notation.vue'
import { capitalize } from '../tools'
export default {
components: {
FlatsMap,
Slider,
flatPickr,
Notation
},
created () {
this.fetchData()
},
data () {
return {
// TODO: Flatpickr locale
'overloadNotation': null,
'flatpickrConfig': {
static: true,
altFormat: 'h:i K, M j, Y',
altInput: true,
enableTime: true,
onChange: selectedDates => this.updateFlatVisitDate(selectedDates.length > 0 ? selectedDates[0] : null)
}
}
},
props: ['flat'],
computed: {
isLoading () {
return this.$store.getters.isLoading
},
flatMarker () {
return this.$store.getters.flatsMarkers(this.$router, flat => flat.id === this.flat.id)
},
'flatpickrValue' () {
if (this.flat && this.flat.visit_date) {
return this.flat.visit_date.local().format()
}
return null
},
timeToPlaces () {
return this.$store.getters.timeToPlaces(this.flat.flatisfy_constraint)
},
notation () {
if (this.overloadNotation) {
return this.overloadNotation
}
return this.flat.notation
},
journeys () {
if (Object.keys(this.flat.flatisfy_time_to).length > 0) {
const journeys = []
for (const place in this.flat.flatisfy_time_to) {
this.flat.flatisfy_time_to[place].sections.forEach(
section => journeys.push({
geojson: section.geojson,
options: {
color: section.color ? ('#' + section.color) : '#2196f3',
dashArray: section.color ? 'none' : '2, 10'
}
})
)
}
return journeys
}
return []
},
displayedStations () {
if (this.flat.flatisfy_stations.length > 0) {
const stationsNames = this.flat.flatisfy_stations.map(station => station.name)
return stationsNames.join(', ')
} else {
return null
}
}
},
watch: {
flat: 'fetchData'
},
methods: {
fetchData () {
this.$store.dispatch('getAllTimeToPlaces')
},
updateFlatStatus (status) {
this.$store.dispatch('updateFlatStatus', { flatId: this.flat.id, newStatus: status })
},
updateFlatNotes () {
const notes = this.$refs.notesTextarea.value
this.$store.dispatch(
'updateFlatNotes',
{ flatId: this.flat.id, newNotes: notes }
)
},
updateFlatVisitDate (date) {
if (date) {
date = moment(date).utc().format()
}
this.$store.dispatch(
'updateFlatVisitDate',
{ flatId: this.flat.id, newVisitDate: date }
)
},
humanizeTimeTo (time) {
const minutes = Math.floor(time.as('minutes'))
return minutes + ' ' + this.$tc('common.mins', minutes)
},
normalizePhoneNumber (phoneNumber) {
phoneNumber = phoneNumber.replace(/ /g, '')
phoneNumber = phoneNumber.replace(/\./g, '')
return phoneNumber
},
capitalize: capitalize
}
}
</script>
<style scoped>
.expired {
font-weight: bold;
text-transform: uppercase;
}
@media screen and (min-width: 768px) {
.grid {
display: grid;
grid-gap: 50px;
grid-template-columns: 75fr 25fr;
}
.left-panel {
grid-column: 1;
grid-row: 1;
}
.right-panel {
grid-column: 2;
grid-row: 1;
}
}
.left-panel textarea {
width: 100%;
}
.right {
text-align: right;
}
nav ul {
list-style-type: none;
padding-left: 1em;
}
.contact {
padding-left: 1em;
}
.right-panel li {
margin-bottom: 1em;
margin-top: 1em;
}
button {
cursor: pointer;
width: 75%;
padding: 0.3em;
font-size: 0.9em;
}
table {
table-layout: fixed;
}
td {
word-wrap: break-word;
word-break: break-all;
white-space: normal;
}
.time_to_list {
margin: 0;
padding-left: 0;
list-style-position: outside;
list-style-type: none;
}
@media screen and (max-width: 767px) {
.right-panel nav {
text-align: center;
}
.fullButton {
width: 100%;
}
}
</style>

View File

@ -1,21 +1,26 @@
<template lang="html"> <template lang="html">
<div class="full"> <div class="full">
<v-map :zoom="zoom.defaultZoom" :center="center" :bounds="bounds" :min-zoom="zoom.minZoom" :max-zoom="zoom.maxZoom"> <v-map v-if="bounds" :zoom="zoom.defaultZoom" :bounds="bounds" :min-zoom="zoom.minZoom" :max-zoom="zoom.maxZoom" v-on:click="$emit('select-flat', null)" @update:bounds="bounds = $event">
<v-tilelayer :url="tiles.url" :attribution="tiles.attribution"></v-tilelayer> <v-tilelayer :url="tiles.url" :attribution="tiles.attribution"></v-tilelayer>
<template v-for="marker in flats"> <v-marker-cluster>
<v-marker :lat-lng="{ lat: marker.gps[0], lng: marker.gps[1] }" :icon="icons.flat"> <template v-for="marker in flats">
<v-popup :content="marker.content"></v-popup> <v-marker :lat-lng="{ lat: marker.gps[0], lng: marker.gps[1] }" :icon="icons.flat" v-on:click="$emit('select-flat', marker.flatId)">
</v-marker> <!-- <v-popup :content="marker.content"></v-popup> -->
</template> </v-marker>
<template v-for="(place_gps, place_name) in places"> </template>
<v-marker :lat-lng="{ lat: place_gps[0], lng: place_gps[1] }" :icon="icons.place"> </v-marker-cluster>
<v-tooltip :content="place_name"></v-tooltip> <v-marker-cluster>
</v-marker> <template v-for="(place_gps, place_name) in places">
</template> <v-marker :lat-lng="{ lat: place_gps[0], lng: place_gps[1] }" :icon="icons.place">
<v-tooltip :content="place_name"></v-tooltip>
</v-marker>
</template>
</v-marker-cluster>
<template v-for="journey in journeys"> <template v-for="journey in journeys">
<v-geojson-layer :geojson="journey.geojson" :options="Object.assign({}, defaultGeoJSONOptions, journey.options)"></v-geojson-layer> <v-geojson-layer :geojson="journey.geojson" :options="Object.assign({}, defaultGeoJSONOptions, journey.options)"></v-geojson-layer>
</template> </template>
</v-map> </v-map>
<div v-else>Nothing to display yet</div>
</div> </div>
</template> </template>
@ -31,10 +36,13 @@ L.Icon.Default.mergeOptions({
}) })
import 'leaflet/dist/leaflet.css' import 'leaflet/dist/leaflet.css'
import 'leaflet.markercluster/dist/MarkerCluster.css'
import 'leaflet.markercluster/dist/MarkerCluster.Default.css'
require('leaflet.icon.glyph') require('leaflet.icon.glyph')
import Vue2Leaflet from 'vue2-leaflet' import { LMap, LTileLayer, LMarker, LTooltip, LPopup, LGeoJson } from 'vue2-leaflet'
import Vue2LeafletMarkerCluster from 'vue2-leaflet-markercluster'
export default { export default {
data () { data () {
@ -46,11 +54,11 @@ export default {
fillColor: '#e4ce7f', fillColor: '#e4ce7f',
fillOpacity: 1 fillOpacity: 1
}, },
center: null, bounds: [[40.91351257612758, -7.580566406250001], [51.65892664880053, 12.0849609375]],
zoom: { zoom: {
defaultZoom: 13, defaultZoom: 6,
minZoom: 5, minZoom: 5,
maxZoom: 17 maxZoom: 20
}, },
tiles: { tiles: {
url: 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', url: 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
@ -67,25 +75,27 @@ export default {
}, },
components: { components: {
'v-map': Vue2Leaflet.Map, 'v-map': LMap,
'v-tilelayer': Vue2Leaflet.TileLayer, 'v-tilelayer': LTileLayer,
'v-marker': Vue2Leaflet.Marker, 'v-marker': LMarker,
'v-tooltip': Vue2Leaflet.Tooltip, 'v-marker-cluster': Vue2LeafletMarkerCluster,
'v-popup': Vue2Leaflet.Popup, 'v-tooltip': LTooltip,
'v-geojson-layer': Vue2Leaflet.GeoJSON 'v-popup': LPopup,
'v-geojson-layer': LGeoJson
}, },
computed: { watch: {
bounds () { flats: 'computeBounds',
let bounds = [] places: 'computeBounds'
this.flats.forEach(flat => bounds.push(flat.gps)) },
Object.keys(this.places).forEach(place => bounds.push(this.places[place]))
if (bounds.length > 0) { methods: {
bounds = L.latLngBounds(bounds) computeBounds (newData, oldData) {
return bounds if (this.flats.length && JSON.stringify(newData) !== JSON.stringify(oldData)) {
} else { const allBounds = []
return null this.flats.forEach(flat => allBounds.push(flat.gps))
Object.keys(this.places).forEach(place => allBounds.push(this.places[place]))
this.bounds = allBounds.length ? L.latLngBounds(allBounds) : undefined
} }
} }
}, },

View File

@ -1,16 +1,11 @@
<template> <template>
<tr> <tr>
<td v-if="showNotationColumn"> <td v-if="showNotationColumn">
<template v-for="n in notationRange"> <Notation :flat="flat" :title="capitalizedStatus"></Notation>
<i class="fa fa-star" aria-hidden="true" :title="capitalizedStatus"></i>
</template>
</td> </td>
<td class="no-padding"> <td class="no-padding">
<Notation v-if="!showNotationColumn" :flat="flat" :title="capitalizedStatus"></Notation>
<router-link class="fill" :to="{name: 'details', params: {id: flat.id}}"> <router-link class="fill" :to="{name: 'details', params: {id: flat.id}}">
<template v-if="!showNotationColumn" v-for="n in notationRange">
<i class="fa fa-star" aria-hidden="true" :title="capitalizedStatus"></i>
</template>
[{{ flat.id.split("@")[1] }}] [{{ flat.id.split("@")[1] }}]
<span class="expired">{{ flat.is_expired ? "[" + $t("common.expired") + "]" : null }}</span> <span class="expired">{{ flat.is_expired ? "[" + $t("common.expired") + "]" : null }}</span>
{{ flat.title }} {{ flat.title }}
@ -31,7 +26,7 @@
{{ flat.rooms ? flat.rooms : '?'}} {{ flat.rooms ? flat.rooms : '?'}}
</td> </td>
<td> <td>
{{ flat.cost }} {{ flat.currency }} {{ flat.cost | cost(flat.currency) }}
<template v-if="flat.utilities == 'included'"> <template v-if="flat.utilities == 'included'">
{{ $t("flatsDetails.utilities_included") }} {{ $t("flatsDetails.utilities_included") }}
</template> </template>
@ -60,7 +55,8 @@
</template> </template>
<script> <script>
import { capitalize, range } from '../tools' import { capitalize } from '../tools'
import Notation from '../components/notation.vue'
export default { export default {
props: { props: {
@ -69,6 +65,10 @@ export default {
showNotes: Boolean showNotes: Boolean
}, },
components: {
Notation
},
computed: { computed: {
capitalizedStatus () { capitalizedStatus () {
return capitalize(this.$t('status.followed')) return capitalize(this.$t('status.followed'))
@ -81,9 +81,6 @@ export default {
return this.flat.photos[0].url return this.flat.photos[0].url
} }
return null return null
},
notationRange () {
return range(this.flat.notation)
} }
}, },

View File

@ -0,0 +1,68 @@
<template>
<div>
<template v-for="n in range(5)">
<button v-bind:key="n" v-on:mouseover="handleHover(n)" v-on:mouseout="handleOut()" v-on:click="updateNotation(n)">
<i class="fa" v-bind:class="{'fa-star': n < notation, 'fa-star-o': n >= notation}" aria-hidden="true"></i>
</button>
</template>
</div>
</template>
<script>
import { range } from '../tools'
import 'flatpickr/dist/flatpickr.css'
export default {
data () {
return {
'overloadNotation': null
}
},
props: ['flat'],
computed: {
notation () {
if (this.overloadNotation) {
return this.overloadNotation
}
return this.flat.notation
}
},
methods: {
updateNotation (notation) {
notation = notation + 1
if (notation === this.flat.notation) {
this.flat.notation = 0
this.$store.dispatch('updateFlatNotation', { flatId: this.flat.id, newNotation: 0 })
this.$store.dispatch('updateFlatStatus', { flatId: this.flat.id, newStatus: 'new' })
} else {
this.flat.notation = notation
this.$store.dispatch('updateFlatNotation', { flatId: this.flat.id, newNotation: notation })
this.$store.dispatch('updateFlatStatus', { flatId: this.flat.id, newStatus: 'followed' })
}
},
handleHover (n) {
this.overloadNotation = n + 1
},
handleOut () {
this.overloadNotation = null
},
range: range
}
}
</script>
<style scoped>
button {
border: none;
width: auto;
background-color: transparent;
}
</style>

View File

@ -3,9 +3,12 @@ import Vue from 'vue'
import i18n from './i18n' import i18n from './i18n'
import router from './router' import router from './router'
import store from './store' import store from './store'
import { costFilter } from './tools'
import App from './components/app.vue' import App from './components/app.vue'
Vue.filter('cost', costFilter)
new Vue({ new Vue({
i18n, i18n,
router, router,

View File

@ -1,73 +1,88 @@
import { findFlatGPS } from '../tools' import { findFlatGPS, costFilter } from '../tools'
export default { export default {
allFlats: state => state.flats, allFlats: (state) => state.flats,
flat: (state, getters) => id => state.flats.find(flat => flat.id === id), flat: (state, getters) => (id) =>
state.flats.find((flat) => flat.id === id),
isLoading: state => state.loading > 0, isLoading: (state) => state.loading > 0,
postalCodesFlatsBuckets: (state, getters) => filter => { inseeCodesFlatsBuckets: (state, getters) => (filter) => {
const postalCodeBuckets = {} const buckets = {};
state.flats.forEach(flat => { state.flats.forEach((flat) => {
if (!filter || filter(flat)) { if (!filter || filter(flat)) {
const postalCode = flat.flatisfy_postal_code.postal_code const insee = flat.flatisfy_postal_code.insee_code;
if (!postalCodeBuckets[postalCode]) { if (!buckets[insee]) {
postalCodeBuckets[postalCode] = { buckets[insee] = {
'name': flat.flatisfy_postal_code.name, name: flat.flatisfy_postal_code.name,
'flats': [] flats: [],
} };
} }
postalCodeBuckets[postalCode].flats.push(flat) buckets[insee].flats.push(flat);
} }
}) });
return postalCodeBuckets return buckets;
}, },
flatsMarkers: (state, getters) => (router, filter) => { flatsMarkers: (state, getters) => (router, filter) => {
const markers = [] const markers = [];
state.flats.forEach(flat => { state.flats.forEach((flat) => {
if (filter && filter(flat)) { if (filter && filter(flat)) {
const gps = findFlatGPS(flat) const gps = findFlatGPS(flat);
if (gps) { if (gps) {
const previousMarkerIndex = markers.findIndex( const previousMarker = markers.find(
marker => marker.gps[0] === gps[0] && marker.gps[1] === gps[1] (marker) =>
) marker.gps[0] === gps[0] && marker.gps[1] === gps[1]
);
const href = router.resolve({ name: 'details', params: { id: flat.id }}).href if (previousMarker) {
if (previousMarkerIndex !== -1) { // randomize position a bit
markers[previousMarkerIndex].content += '<br/><a href="' + href + '">' + flat.title + '</a>' // gps[0] += (Math.random() - 0.5) / 500
} else { // gps[1] += (Math.random() - 0.5) / 500
markers.push({
'title': '',
'content': '<a href="' + href + '">' + flat.title + '</a>',
'gps': gps
})
} }
const href = router.resolve({
name: "details",
params: { id: flat.id },
}).href;
const cost = flat.cost
? costFilter(flat.cost, flat.currency)
: "";
markers.push({
title: "",
content:
'<a href="' +
href +
'">' +
flat.title +
"</a>" +
cost,
gps: gps,
flatId: flat.id,
});
} }
} }
}) });
return markers return markers;
}, },
allTimeToPlaces: state => { allTimeToPlaces: (state) => {
const places = {} const places = {};
Object.keys(state.timeToPlaces).forEach(constraint => { Object.keys(state.timeToPlaces).forEach((constraint) => {
const constraintTimeToPlaces = state.timeToPlaces[constraint] const constraintTimeToPlaces = state.timeToPlaces[constraint];
Object.keys(constraintTimeToPlaces).forEach(name => { Object.keys(constraintTimeToPlaces).forEach((name) => {
places[name] = constraintTimeToPlaces[name] places[name] = constraintTimeToPlaces[name];
}) });
}) });
return places return places;
}, },
timeToPlaces: (state, getters) => (constraintName) => { timeToPlaces: (state, getters) => (constraintName) => {
return state.timeToPlaces[constraintName] return state.timeToPlaces[constraintName];
}, },
metadata: state => state.metadata metadata: (state) => state.metadata,
} };

View File

@ -1,8 +1,10 @@
export function findFlatGPS (flat) { export function findFlatGPS (flat) {
let gps let gps
// Try to push a marker based on stations if (flat.flatisfy_position) {
if (flat.flatisfy_stations && flat.flatisfy_stations.length > 0) { gps = [flat.flatisfy_position.lat, flat.flatisfy_position.lng]
} else if (flat.flatisfy_stations && flat.flatisfy_stations.length > 0) {
// Try to push a marker based on stations
gps = [0.0, 0.0] gps = [0.0, 0.0]
flat.flatisfy_stations.forEach(station => { flat.flatisfy_stations.forEach(station => {
gps = [gps[0] + station.gps[0], gps[1] + station.gps[1]] gps = [gps[0] + station.gps[0], gps[1] + station.gps[1]]
@ -23,3 +25,18 @@ export function capitalize (string) {
export function range (n) { export function range (n) {
return [...Array(n).keys()] return [...Array(n).keys()]
} }
export function costFilter (value, currency) {
if (!value) {
return 'N/A'
}
if (currency === 'EUR') {
currency = ' €'
}
var valueStr = value.toString()
valueStr = ' '.repeat((3 + valueStr.length) % 3) + valueStr
return valueStr.match(/.{1,3}/g).join('.') + currency
}

View File

@ -3,217 +3,18 @@
<template v-if="isLoading"> <template v-if="isLoading">
<p>{{ $t("common.loading") }}</p> <p>{{ $t("common.loading") }}</p>
</template> </template>
<div class="grid" v-else-if="flat && timeToPlaces"> <Flat :flat="flat"></Flat>
<div class="left-panel">
<h2>
(<!--
--><router-link :to="{ name: 'status', params: { status: flat.status }}"><!--
-->{{ flat.status ? capitalize($t("status." + flat.status)) : '' }}<!--
--></router-link><!--
-->) {{ flat.title }} [{{ flat.id.split("@")[1] }}]
<span class="expired">{{ flat.is_expired ? '[' + $t('common.expired') + ']' : '' }}</span>
</h2>
<div class="grid">
<div class="left-panel">
<p>
{{ flat.cost }} {{ flat.currency }}
<template v-if="flat.utilities === 'included'">
{{ $t("flatsDetails.utilities_included") }}
</template>
<template v-else-if="flat.utilities === 'excluded'">
{{ $t("flatsDetails.utilities_excluded") }}
</template>
</p>
</div>
<p class="right-panel right">
{{ flat.area ? flat.area : '?' }} m<sup>2</sup>,
{{ flat.rooms ? flat.rooms : '?' }} {{ $tc("flatsDetails.rooms", flat.rooms) }} /
{{ flat.bedrooms ? flat.bedrooms : '?' }} {{ $tc("flatsDetails.bedrooms", flat.bedrooms) }}
</p>
</div>
<div>
<template v-if="flat.photos && flat.photos.length > 0">
<Slider :photos="flat.photos"></Slider>
</template>
</div>
<div>
<h3>{{ $t("flatsDetails.Description") }}</h3>
<p>{{ flat.text }}</p>
<p class="right">{{ flat.location }}</p>
<p>{{ $t("flatsDetails.First_posted") }} {{ flat.date ? flat.date.fromNow() : '?' }}.</p>
</div>
<div>
<h3>{{ $t("flatsDetails.Details") }}</h3>
<table>
<tr v-for="(value, key) in flat.details">
<th>{{ key }}</th>
<td>{{ value }}</td>
</tr>
</table>
</div>
<div>
<h3>{{ $t("flatsDetails.Metadata") }}</h3>
<table>
<tr>
<th>
{{ $t("flatsDetails.postal_code") }}
</th>
<td>
<template v-if="flat.flatisfy_postal_code.postal_code">
{{ flat.flatisfy_postal_code.name }} ({{ flat.flatisfy_postal_code.postal_code }})
</template>
<template v-else>
?
</template>
</td>
</tr>
<tr>
<th>
{{ $t("flatsDetails.nearby_stations") }}
</th>
<td>
<template v-if="displayedStations">
{{ displayedStations }}
</template>
<template v-else>
?
</template>
</td>
</tr>
<tr>
<th>
{{ $t("flatsDetails.Times_to") }}
</th>
<td>
<template v-if="Object.keys(flat.flatisfy_time_to).length">
<ul class="time_to_list">
<li v-for="(time_to, place) in flat.flatisfy_time_to" :key="place">
{{ place }}: {{ humanizeTimeTo(time_to["time"]) }}
</li>
</ul>
</template>
<template v-else>
?
</template>
</td>
</tr>
<tr>
<th>
{{ $t("flatsDetails.SquareMeterCost") }}
</th>
<td>
{{ flat.sqCost }} {{ flat.currency }}
</td>
</tr>
</table>
</div>
<div>
<h3>{{ $t("flatsDetails.Location") }}</h3>
<FlatsMap :flats="flatMarkers" :places="timeToPlaces" :journeys="journeys"></FlatsMap>
</div>
<div>
<h3>{{ $t("flatsDetails.Notes") }}</h3>
<form v-on:submit="updateFlatNotes">
<textarea ref="notesTextarea" rows="10">{{ flat.notes }}</textarea>
<p class="right"><input type="submit" :value="$t('flatsDetails.Save')"/></p>
</form>
</div>
</div>
<div class="right-panel">
<h3>{{ $t("flatsDetails.Contact") }}</h3>
<div class="contact">
<p>
<template v-if="flat.phone">
<template v-for="phoneNumber in flat.phone.split(',')">
<a :href="'tel:+33' + normalizePhoneNumber(phoneNumber)">{{ phoneNumber }}</a><br/>
</template>
</template>
<template v-else>
{{ $t("flatsDetails.no_phone_found") }}
</template>
</p>
<p>{{ $tc("common.Original_post", 42) }}
<ul>
<li v-for="(url, index) in flat.urls">
<a :href="url">
{{ $tc("common.Original_post", 1) }} {{ index + 1 }}
<i class="fa fa-external-link" aria-hidden="true"></i>
</a>
</li>
</ul>
</p>
</div>
<h3>{{ $t("flatsDetails.Visit") }}</h3>
<div class="visit">
<flat-pickr
:value="flatpickrValue"
:config="flatpickrConfig"
:placeholder="$t('flatsDetails.setDateOfVisit')"
/>
</div>
<h3>{{ $t("common.Actions") }}</h3>
<nav>
<ul>
<template v-if="flat.status !== 'user_deleted'">
<li ref="notationButton">
<template v-for="n in range(notation)">
<button class="btnIcon" v-on:mouseover="handleNotationHover(n)" v-on:mouseout="handleNotationOut()" v-on:click="updateFlatNotation(n)">
<i class="fa fa-star" aria-hidden="true"></i>
</button>
</template>
<template v-for="n in range(5 - notation)">
<button class="btnIcon" v-on:mouseover="handleNotationHover(notation + n)" v-on:mouseout="handleNotationOut()" v-on:click="updateFlatNotation(notation + n)">
<i class="fa fa-star-o" aria-hidden="true"></i>
</button>
</template>
</li>
<li>
<button v-on:click="updateFlatStatus('user_deleted')" class="fullButton">
<i class="fa fa-trash" aria-hidden="true"></i>
{{ $t("common.Remove") }}
</button>
</li>
</template>
<template v-else>
<li>
<button v-on:click="updateFlatStatus('new')" class="fullButton">
<i class="fa fa-undo" aria-hidden="true"></i>
{{ $t("common.Restore") }}
</button>
</li>
</template>
</ul>
</nav>
</div>
</div>
</div> </div>
</template> </template>
<script> <script>
import flatPickr from 'vue-flatpickr-component'
import moment from 'moment'
import 'font-awesome-webpack'
import 'flatpickr/dist/flatpickr.css'
import FlatsMap from '../components/flatsmap.vue' import Flat from '../components/flat.vue'
import Slider from '../components/slider.vue'
import { capitalize, range } from '../tools'
export default { export default {
components: { components: {
FlatsMap, Flat
Slider,
flatPickr
}, },
created () { created () {
document.title = this.title // Set title document.title = this.title // Set title
@ -232,20 +33,6 @@ export default {
} }
}, },
data () {
return {
// TODO: Flatpickr locale
'overloadNotation': null,
'flatpickrConfig': {
static: true,
altFormat: 'h:i K, M j, Y',
altInput: true,
enableTime: true,
onChange: selectedDates => this.updateFlatVisitDate(selectedDates.length > 0 ? selectedDates[0] : null)
}
}
},
computed: { computed: {
isLoading () { isLoading () {
return this.$store.getters.isLoading return this.$store.getters.isLoading
@ -253,204 +40,16 @@ export default {
title () { title () {
return 'Flatisfy - ' + this.$route.params.id return 'Flatisfy - ' + this.$route.params.id
}, },
flatMarkers () {
return this.$store.getters.flatsMarkers(this.$router, flat => flat.id === this.$route.params.id)
},
flat () { flat () {
return this.$store.getters.flat(this.$route.params.id) return this.$store.getters.flat(this.$route.params.id)
},
'flatpickrValue' () {
if (this.flat && this.flat.visit_date) {
return this.flat.visit_date.local().format()
}
return null
},
timeToPlaces () {
return this.$store.getters.timeToPlaces(this.flat.flatisfy_constraint)
},
notation () {
if (this.overloadNotation) {
return this.overloadNotation
}
return this.flat.notation
},
journeys () {
if (Object.keys(this.flat.flatisfy_time_to).length > 0) {
const journeys = []
for (const place in this.flat.flatisfy_time_to) {
this.flat.flatisfy_time_to[place].sections.forEach(
section => journeys.push({
geojson: section.geojson,
options: {
color: section.color ? ('#' + section.color) : '#2196f3',
dashArray: section.color ? 'none' : '2, 10'
}
})
)
}
return journeys
}
return []
},
displayedStations () {
if (this.flat.flatisfy_stations.length > 0) {
const stationsNames = this.flat.flatisfy_stations.map(station => station.name)
return stationsNames.join(', ')
} else {
return null
}
} }
}, },
methods: { methods: {
fetchData () { fetchData () {
this.$store.dispatch('getFlat', { flatId: this.$route.params.id }) this.$store.dispatch('getFlat', { flatId: this.$route.params.id })
this.$store.dispatch('getAllTimeToPlaces') }
},
updateFlatNotation (notation) {
notation = notation + 1
if (notation === this.flat.notation) {
this.$store.dispatch('updateFlatNotation', { flatId: this.$route.params.id, newNotation: 0 })
this.$store.dispatch('updateFlatStatus', { flatId: this.$route.params.id, newStatus: 'new' })
} else {
this.$store.dispatch('updateFlatNotation', { flatId: this.$route.params.id, newNotation: notation })
this.$store.dispatch('updateFlatStatus', { flatId: this.$route.params.id, newStatus: 'followed' })
}
},
updateFlatStatus (status) {
this.$store.dispatch('updateFlatStatus', { flatId: this.$route.params.id, newStatus: status })
},
updateFlatNotes () {
const notes = this.$refs.notesTextarea.value
this.$store.dispatch(
'updateFlatNotes',
{ flatId: this.$route.params.id, newNotes: notes }
)
},
updateFlatVisitDate (date) {
if (date) {
date = moment(date).utc().format()
}
this.$store.dispatch(
'updateFlatVisitDate',
{ flatId: this.$route.params.id, newVisitDate: date }
)
},
humanizeTimeTo (time) {
const minutes = Math.floor(time.as('minutes'))
return minutes + ' ' + this.$tc('common.mins', minutes)
},
handleNotationHover (n) {
this.overloadNotation = n + 1
},
handleNotationOut () {
this.overloadNotation = null
},
normalizePhoneNumber (phoneNumber) {
phoneNumber = phoneNumber.replace(/ /g, '')
phoneNumber = phoneNumber.replace(/\./g, '')
return phoneNumber
},
capitalize: capitalize,
range: range
} }
} }
</script> </script>
<style scoped>
.expired {
font-weight: bold;
text-transform: uppercase;
}
@media screen and (min-width: 768px) {
.grid {
display: grid;
grid-gap: 50px;
grid-template-columns: 75fr 25fr;
}
.left-panel {
grid-column: 1;
grid-row: 1;
}
.right-panel {
grid-column: 2;
grid-row: 1;
}
}
.left-panel textarea {
width: 100%;
}
.right {
text-align: right;
}
nav ul {
list-style-type: none;
padding-left: 1em;
}
.contact {
padding-left: 1em;
}
.right-panel li {
margin-bottom: 1em;
margin-top: 1em;
}
button {
cursor: pointer;
width: 75%;
padding: 0.3em;
font-size: 0.9em;
}
table {
table-layout: fixed;
}
td {
word-wrap: break-word;
word-break: break-all;
white-space: normal;
}
.time_to_list {
margin: 0;
padding-left: 0;
list-style-position: outside;
list-style-type: none;
}
.btnIcon {
border: none;
width: auto;
background-color: transparent;
}
@media screen and (max-width: 767px) {
.right-panel nav {
text-align: center;
}
.fullButton {
width: 100%;
}
}
</style>

View File

@ -1,50 +1,56 @@
<template> <template>
<div> <div class="flex-row">
<FlatsMap :flats="flatsMarkers" :places="timeToPlaces"></FlatsMap> <div class="flex" style="overflow: auto;">
<FlatsMap :flats="flatsMarkers" :places="timeToPlaces" v-on:select-flat="selectFlat($event)"></FlatsMap>
<h2> <h2>
{{ $t("home.new_available_flats") }} {{ $t("home.new_available_flats") }}
<template v-if="lastUpdate"> <template v-if="lastUpdate">
<label class="show-last-update"> <label class="show-last-update">
{{ $t("home.Last_update") }} {{ lastUpdate.fromNow() }} {{ $t("home.Last_update") }} {{ lastUpdate.fromNow() }}
</label> </label>
</template>
<label class="show-expired-flats-label">
<input type="checkbox" class="show-expired-flats-checkbox" v-model="showExpiredFlats" />
{{ $t("home.show_expired_flats") }}
</label>
</h2>
<template v-if="Object.keys(postalCodesFlatsBuckets).length > 0">
<template v-for="(postal_code_data, postal_code) in postalCodesFlatsBuckets">
<h3>
{{ postal_code_data.name || $t('common.Unknown') }}
<span v-if="postal_code !== 'undefined'">
({{ postal_code }})
</span>
- {{ postal_code_data.flats.length }} {{ $tc("common.flats", postal_code_data.flats.length) }}
</h3>
<FlatsTable :flats="postal_code_data.flats" :key="postal_code"></FlatsTable>
</template> </template>
</template> <label class="show-expired-flats-label">
<template v-else-if="isLoading"> <input type="checkbox" class="show-expired-flats-checkbox" v-model="showExpiredFlats" />
<p>{{ $t("common.loading") }}</p> {{ $t("home.show_expired_flats") }}
</template> </label>
<template v-else> </h2>
<p>{{ $t("flatListing.no_available_flats") }}</p>
</template> <template v-if="Object.keys(inseeCodesFlatsBuckets).length > 0">
<template v-for="(insee_code_data, insee_code) in inseeCodesFlatsBuckets">
<h3>
{{ insee_code_data.name || $t('common.Unknown') }}
<span v-if="insee_code !== 'undefined'">
({{ insee_code }})
</span>
- {{ insee_code_data.flats.length }} {{ $tc("common.flats", insee_code_data.flats.length) }}
</h3>
<FlatsTable :flats="insee_code_data.flats" :key="insee_code"></FlatsTable>
</template>
</template>
<template v-else-if="isLoading">
<p>{{ $t("common.loading") }}</p>
</template>
<template v-else>
<p>{{ $t("flatListing.no_available_flats") }}</p>
</template>
</div>
<div v-if="selectedFlat" class="flex">
<Flat :flat="selectedFlat"></Flat>
</div>
</div> </div>
</template> </template>
<script> <script>
import FlatsMap from '../components/flatsmap.vue' import FlatsMap from '../components/flatsmap.vue'
import FlatsTable from '../components/flatstable.vue' import FlatsTable from '../components/flatstable.vue'
import Flat from '../components/flat.vue'
import moment from 'moment' import moment from 'moment'
export default { export default {
components: { components: {
FlatsMap, FlatsMap,
FlatsTable FlatsTable,
Flat
}, },
created () { created () {
@ -60,13 +66,25 @@ export default {
data () { data () {
return { return {
showExpiredFlats: false showExpiredFlats: false,
selectedFlat: undefined
}
},
methods: {
selectFlat: async function (flatId) {
if (flatId) {
await this.$store.dispatch('getFlat', { flatId })
this.selectedFlat = await this.$store.getters.flat(flatId)
} else {
this.selectedFlat = undefined
}
} }
}, },
computed: { computed: {
postalCodesFlatsBuckets () { inseeCodesFlatsBuckets () {
return this.$store.getters.postalCodesFlatsBuckets(flat => return this.$store.getters.inseeCodesFlatsBuckets(flat =>
flat.status === 'new' && flat.status === 'new' &&
(this.showExpiredFlats || !flat.is_expired) (this.showExpiredFlats || !flat.is_expired)
) )
@ -100,7 +118,12 @@ h2 {
display: flex; display: flex;
justify-content: space-between; justify-content: space-between;
} }
.flex-row {
display:flex;
}
.flex {
flex: 1;
}
table { table {
margin-left: 0; margin-left: 0;
margin-right: 0; margin-right: 0;

View File

@ -12,10 +12,10 @@
<template v-if="isLoading"> <template v-if="isLoading">
<p>{{ $t("common.loading") }}</p> <p>{{ $t("common.loading") }}</p>
</template> </template>
<template v-else-if="Object.keys(postalCodesFlatsBuckets).length > 0"> <template v-else-if="Object.keys(inseeCodesFlatsBuckets).length > 0">
<template v-for="(postal_code_data, postal_code) in postalCodesFlatsBuckets"> <template v-for="(insee_code_data, insee_code) in inseeCodesFlatsBuckets">
<h3>{{ postal_code_data.name }} ({{ postal_code }}) - {{ postal_code_data.flats.length }} {{ $tc("common.flats", postal_code_data.flats.length) }}</h3> <h3>{{ insee_code_data.name }} ({{ insee_code }}) - {{ insee_code_data.flats.length }} {{ $tc("common.flats", insee_code_data.flats.length) }}</h3>
<FlatsTable :flats="postal_code_data.flats"></FlatsTable> <FlatsTable :flats="insee_code_data.flats"></FlatsTable>
</template> </template>
</template> </template>
<template v-else> <template v-else>
@ -51,12 +51,12 @@ export default {
}, },
computed: { computed: {
postalCodesFlatsBuckets () { inseeCodesFlatsBuckets () {
if (!this.$route.query.query || this.loading) { if (!this.$route.query.query || this.loading) {
return {} return {}
} }
return this.$store.getters.postalCodesFlatsBuckets( return this.$store.getters.inseeCodesFlatsBuckets(
flat => flat.status !== 'duplicate' && flat.status !== 'ignored' && flat.status !== 'user_deleted' flat => flat.status !== 'duplicate' && flat.status !== 'ignored' && flat.status !== 'user_deleted'
) )
}, },

View File

@ -17,11 +17,11 @@
<template v-if="isLoading"> <template v-if="isLoading">
<p>{{ $t("common.loading") }}</p> <p>{{ $t("common.loading") }}</p>
</template> </template>
<template v-else-if="Object.keys(postalCodesFlatsBuckets).length"> <template v-else-if="Object.keys(inseeCodesFlatsBuckets).length">
<template v-for="(postal_code_data, postal_code) in postalCodesFlatsBuckets"> <template v-for="(insee_code_data, insee_code) in inseeCodesFlatsBuckets">
<h3>{{ postal_code_data.name }} ({{ postal_code }}) - {{ postal_code_data.flats.length }} {{ $tc("common.flats", postal_code_data.flats.length) }}</h3> <h3>{{ insee_code_data.name }} ({{ insee_code }}) - {{ insee_code_data.flats.length }} {{ $tc("common.flats", insee_code_data.flats.length) }}</h3>
<FlatsTable <FlatsTable
:flats="postal_code_data.flats" :flats="insee_code_data.flats"
:showNotationColumn="$route.params.status === 'followed'" :showNotationColumn="$route.params.status === 'followed'"
:showNotes="$route.params.status === 'followed'" :showNotes="$route.params.status === 'followed'"
:initialSortBy="$route.params.status === 'followed' ? 'notation' : undefined" :initialSortBy="$route.params.status === 'followed' ? 'notation' : undefined"
@ -81,8 +81,8 @@ export default {
}, },
computed: { computed: {
postalCodesFlatsBuckets () { inseeCodesFlatsBuckets () {
return this.$store.getters.postalCodesFlatsBuckets(flat => flat.status === this.$route.params.status) return this.$store.getters.inseeCodesFlatsBuckets(flat => flat.status === this.$route.params.status)
}, },
title () { title () {
return 'Flatisfy - ' + capitalize(this.$t('status.' + this.$route.params.status)) return 'Flatisfy - ' + capitalize(this.$t('status.' + this.$route.params.status))

View File

@ -2,13 +2,12 @@
""" """
This module contains the definition of the web app API routes. This module contains the definition of the web app API routes.
""" """
from __future__ import ( from __future__ import absolute_import, division, print_function, unicode_literals
absolute_import, division, print_function, unicode_literals
)
import datetime import datetime
import itertools import itertools
import json import json
import logging
import re import re
import os import os
@ -17,7 +16,9 @@ import vobject
import flatisfy.data import flatisfy.data
from flatisfy.models import flat as flat_model from flatisfy.models import flat as flat_model
from flatisfy.models import postal_code
from flatisfy.models.postal_code import PostalCode from flatisfy.models.postal_code import PostalCode
from flatisfy import cmds
FILTER_RE = re.compile(r"filter\[([A-z0-9_]+)\]") FILTER_RE = re.compile(r"filter\[([A-z0-9_]+)\]")
@ -60,26 +61,24 @@ def _JSONApiSpec(query, model, default_sorting=None):
# Handle pagination according to JSON API spec # Handle pagination according to JSON API spec
page_number, page_size = 0, None page_number, page_size = 0, None
try: try:
if 'page[size]' in query: if "page[size]" in query:
page_size = int(query['page[size]']) page_size = int(query["page[size]"])
assert page_size > 0 assert page_size > 0
if 'page[number]' in query: if "page[number]" in query:
page_number = int(query['page[number]']) page_number = int(query["page[number]"])
assert page_number >= 0 assert page_number >= 0
except (AssertionError, ValueError): except (AssertionError, ValueError):
raise ValueError("Invalid pagination provided.") raise ValueError("Invalid pagination provided.")
# Handle sorting according to JSON API spec # Handle sorting according to JSON API spec
sorting = [] sorting = []
if 'sort' in query: if "sort" in query:
for index in query['sort'].split(','): for index in query["sort"].split(","):
try: try:
sort_field = getattr(model, index.lstrip('-')) sort_field = getattr(model, index.lstrip("-"))
except AttributeError: except AttributeError:
raise ValueError( raise ValueError("Invalid sorting key provided: {}.".format(index))
"Invalid sorting key provided: {}.".format(index) if index.startswith("-"):
)
if index.startswith('-'):
sort_field = sort_field.desc() sort_field = sort_field.desc()
sorting.append(sort_field) sorting.append(sort_field)
# Default sorting options # Default sorting options
@ -87,11 +86,7 @@ def _JSONApiSpec(query, model, default_sorting=None):
try: try:
sorting.append(getattr(model, default_sorting)) sorting.append(getattr(model, default_sorting))
except AttributeError: except AttributeError:
raise ValueError( raise ValueError("Invalid default sorting key provided: {}.".format(default_sorting))
"Invalid default sorting key provided: {}.".format(
default_sorting
)
)
return filters, page_number, page_size, sorting return filters, page_number, page_size, sorting
@ -110,22 +105,22 @@ def _serialize_flat(flat, config):
postal_codes = {} postal_codes = {}
for constraint_name, constraint in config["constraints"].items(): for constraint_name, constraint in config["constraints"].items():
postal_codes[constraint_name] = flatisfy.data.load_data( postal_codes[constraint_name] = flatisfy.data.load_data(PostalCode, constraint, config)
PostalCode, constraint, config
)
try: try:
assert flat["flatisfy_postal_code"] assert flat["flatisfy_position"]
lat = flat["flatisfy_position"]["lat"]
lng = flat["flatisfy_position"]["lng"]
postal_code_data = next( postal_code_data = next(
x x for x in postal_codes.get(flat["flatisfy_constraint"], []) if x.lat == lat and x.lng == lng
for x in postal_codes.get(flat["flatisfy_constraint"], [])
if x.postal_code == flat["flatisfy_postal_code"]
) )
logging.warn(f"{postal_code_data.name}, {lat}, {lng}")
flat["flatisfy_postal_code"] = { flat["flatisfy_postal_code"] = {
"postal_code": flat["flatisfy_postal_code"], "postal_code": postal_code_data.postal_code,
"insee_code": postal_code_data.insee_code,
"name": postal_code_data.name, "name": postal_code_data.name,
"gps": (postal_code_data.lat, postal_code_data.lng) "gps": (postal_code_data.lat, postal_code_data.lng),
} }
except (AssertionError, StopIteration): except (AssertionError, StopIteration):
flat["flatisfy_postal_code"] = {} flat["flatisfy_postal_code"] = {}
@ -148,7 +143,7 @@ def index_v1():
"search": "/api/v1/search", "search": "/api/v1/search",
"ics": "/api/v1/ics/visits.ics", "ics": "/api/v1/ics/visits.ics",
"time_to_places": "/api/v1/time_to_places", "time_to_places": "/api/v1/time_to_places",
"metadata": "/api/v1/metadata" "metadata": "/api/v1/metadata",
} }
@ -179,36 +174,32 @@ def flats_v1(config, db):
:return: The available flats objects in a JSON ``data`` dict. :return: The available flats objects in a JSON ``data`` dict.
""" """
if bottle.request.method == 'OPTIONS': if bottle.request.method == "OPTIONS":
# CORS # CORS
return '' return ""
try: try:
try: try:
filters, page_number, page_size, sorting = _JSONApiSpec( filters, page_number, page_size, sorting = _JSONApiSpec(
bottle.request.query, bottle.request.query, flat_model.Flat, default_sorting="cost"
flat_model.Flat,
default_sorting='cost'
) )
except ValueError as exc: except ValueError as exc:
return JSONError(400, str(exc)) return JSONError(400, str(exc))
# Build flat list # Build flat list
db_query = ( db_query = db.query(flat_model.Flat).filter_by(**filters).order_by(*sorting)
db.query(flat_model.Flat).filter_by(**filters).order_by(*sorting)
)
flats = [ flats = [
_serialize_flat(flat, config) _serialize_flat(flat, config)
for flat in itertools.islice( for flat in itertools.islice(
db_query, db_query,
page_number * page_size if page_size else None, page_number * page_size if page_size else None,
page_number * page_size + page_size if page_size else None page_number * page_size + page_size if page_size else None,
) )
] ]
return { return {
"data": flats, "data": flats,
"page": page_number, "page": page_number,
"items_per_page": page_size if page_size else len(flats) "items_per_page": page_size if page_size else len(flats),
} }
except Exception as exc: # pylint: disable= broad-except except Exception as exc: # pylint: disable= broad-except
return JSONError(500, str(exc)) return JSONError(500, str(exc))
@ -224,7 +215,7 @@ def flat_v1(flat_id, config, db):
:return: The flat object in a JSON ``data`` dict. :return: The flat object in a JSON ``data`` dict.
""" """
if bottle.request.method == 'OPTIONS': if bottle.request.method == "OPTIONS":
# CORS # CORS
return {} return {}
@ -234,9 +225,7 @@ def flat_v1(flat_id, config, db):
if not flat: if not flat:
return JSONError(404, "No flat with id {}.".format(flat_id)) return JSONError(404, "No flat with id {}.".format(flat_id))
return { return {"data": _serialize_flat(flat, config)}
"data": _serialize_flat(flat, config)
}
except Exception as exc: # pylint: disable= broad-except except Exception as exc: # pylint: disable= broad-except
return JSONError(500, str(exc)) return JSONError(500, str(exc))
@ -260,7 +249,7 @@ def update_flat_v1(flat_id, config, db):
:return: The new flat object in a JSON ``data`` dict. :return: The new flat object in a JSON ``data`` dict.
""" """
if bottle.request.method == 'OPTIONS': if bottle.request.method == "OPTIONS":
# CORS # CORS
return {} return {}
@ -274,14 +263,9 @@ def update_flat_v1(flat_id, config, db):
for key, value in json_body.items(): for key, value in json_body.items():
setattr(flat, key, value) setattr(flat, key, value)
except ValueError as exc: except ValueError as exc:
return JSONError( return JSONError(400, "Invalid payload provided: {}.".format(str(exc)))
400,
"Invalid payload provided: {}.".format(str(exc))
)
return { return {"data": _serialize_flat(flat, config)}
"data": _serialize_flat(flat, config)
}
except Exception as exc: # pylint: disable= broad-except except Exception as exc: # pylint: disable= broad-except
return JSONError(500, str(exc)) return JSONError(500, str(exc))
@ -297,20 +281,15 @@ def time_to_places_v1(config):
:return: The JSON dump of the places to compute time to (dict of places :return: The JSON dump of the places to compute time to (dict of places
names mapped to GPS coordinates). names mapped to GPS coordinates).
""" """
if bottle.request.method == 'OPTIONS': if bottle.request.method == "OPTIONS":
# CORS # CORS
return {} return {}
try: try:
places = {} places = {}
for constraint_name, constraint in config["constraints"].items(): for constraint_name, constraint in config["constraints"].items():
places[constraint_name] = { places[constraint_name] = {k: v["gps"] for k, v in constraint["time_to"].items()}
k: v["gps"] return {"data": places}
for k, v in constraint["time_to"].items()
}
return {
"data": places
}
except Exception as exc: # pylint: disable= broad-except except Exception as exc: # pylint: disable= broad-except
return JSONError(500, str(exc)) return JSONError(500, str(exc))
@ -345,7 +324,7 @@ def search_v1(db, config):
:return: The matching flat objects in a JSON ``data`` dict. :return: The matching flat objects in a JSON ``data`` dict.
""" """
if bottle.request.method == 'OPTIONS': if bottle.request.method == "OPTIONS":
# CORS # CORS
return {} return {}
@ -357,30 +336,25 @@ def search_v1(db, config):
try: try:
filters, page_number, page_size, sorting = _JSONApiSpec( filters, page_number, page_size, sorting = _JSONApiSpec(
bottle.request.query, bottle.request.query, flat_model.Flat, default_sorting="cost"
flat_model.Flat,
default_sorting='cost'
) )
except ValueError as exc: except ValueError as exc:
return JSONError(400, str(exc)) return JSONError(400, str(exc))
flats_db_query = (flat_model.Flat flats_db_query = flat_model.Flat.search_query(db, query).filter_by(**filters).order_by(*sorting)
.search_query(db, query)
.filter_by(**filters)
.order_by(*sorting))
flats = [ flats = [
_serialize_flat(flat, config) _serialize_flat(flat, config)
for flat in itertools.islice( for flat in itertools.islice(
flats_db_query, flats_db_query,
page_number * page_size if page_size else None, page_number * page_size if page_size else None,
page_number * page_size + page_size if page_size else None page_number * page_size + page_size if page_size else None,
) )
] ]
return { return {
"data": flats, "data": flats,
"page": page_number, "page": page_number,
"items_per_page": page_size if page_size else len(flats) "items_per_page": page_size if page_size else len(flats),
} }
except Exception as exc: # pylint: disable= broad-except except Exception as exc: # pylint: disable= broad-except
return JSONError(500, str(exc)) return JSONError(500, str(exc))
@ -396,35 +370,33 @@ def ics_feed_v1(config, db):
:return: The ICS feed for the visits. :return: The ICS feed for the visits.
""" """
if bottle.request.method == 'OPTIONS': if bottle.request.method == "OPTIONS":
# CORS # CORS
return {} return {}
cal = vobject.iCalendar() cal = vobject.iCalendar()
try: try:
flats_with_visits = db.query(flat_model.Flat).filter( flats_with_visits = db.query(flat_model.Flat).filter(flat_model.Flat.visit_date.isnot(None))
flat_model.Flat.visit_date.isnot(None)
)
for flat in flats_with_visits: for flat in flats_with_visits:
vevent = cal.add('vevent') vevent = cal.add("vevent")
vevent.add('dtstart').value = flat.visit_date vevent.add("dtstart").value = flat.visit_date
vevent.add('dtend').value = ( vevent.add("dtend").value = flat.visit_date + datetime.timedelta(hours=1)
flat.visit_date + datetime.timedelta(hours=1) vevent.add("summary").value = "Visit - {}".format(flat.title)
)
vevent.add('summary').value = 'Visit - {}'.format(flat.title)
description = ( description = "{} (area: {}, cost: {} {})\n{}#/flat/{}\n".format(
'{} (area: {}, cost: {} {})\n{}#/flat/{}\n'.format( flat.title,
flat.title, flat.area, flat.cost, flat.currency, flat.area,
config['website_url'], flat.id flat.cost,
) flat.currency,
config["website_url"],
flat.id,
) )
description += '\n{}\n'.format(flat.text) description += "\n{}\n".format(flat.text)
if flat.notes: if flat.notes:
description += '\n{}\n'.format(flat.notes) description += "\n{}\n".format(flat.notes)
vevent.add('description').value = description vevent.add("description").value = description
except Exception: # pylint: disable= broad-except except Exception: # pylint: disable= broad-except
pass pass
@ -439,13 +411,11 @@ def opendata_index_v1():
GET /api/v1/opendata GET /api/v1/opendata
""" """
if bottle.request.method == 'OPTIONS': if bottle.request.method == "OPTIONS":
# CORS # CORS
return {} return {}
return { return {"postal_codes": "/api/v1/opendata/postal_codes"}
"postal_codes": "/api/v1/opendata/postal_codes"
}
def opendata_postal_codes_v1(db): def opendata_postal_codes_v1(db):
@ -476,36 +446,36 @@ def opendata_postal_codes_v1(db):
:return: The postal codes data from opendata. :return: The postal codes data from opendata.
""" """
if bottle.request.method == 'OPTIONS': if bottle.request.method == "OPTIONS":
# CORS # CORS
return {} return {}
try: try:
try: try:
filters, page_number, page_size, sorting = _JSONApiSpec( filters, page_number, page_size, sorting = _JSONApiSpec(
bottle.request.query, bottle.request.query, PostalCode, default_sorting="postal_code"
PostalCode,
default_sorting='postal_code'
) )
except ValueError as exc: except ValueError as exc:
return JSONError(400, str(exc)) return JSONError(400, str(exc))
db_query = db.query(PostalCode).filter_by(**filters).order_by(*sorting) db_query = db.query(PostalCode).filter_by(**filters).order_by(*sorting)
postal_codes = [ postal_codes = [
x.json_api_repr() for x in itertools.islice( x.json_api_repr()
for x in itertools.islice(
db_query, db_query,
page_number * page_size if page_size else None, page_number * page_size if page_size else None,
page_number * page_size + page_size if page_size else None page_number * page_size + page_size if page_size else None,
) )
] ]
return { return {
"data": postal_codes, "data": postal_codes,
"page": page_number, "page": page_number,
"items_per_page": page_size if page_size else len(postal_codes) "items_per_page": page_size if page_size else len(postal_codes),
} }
except Exception as exc: # pylint: disable= broad-except except Exception as exc: # pylint: disable= broad-except
return JSONError(500, str(exc)) return JSONError(500, str(exc))
def metadata_v1(config): def metadata_v1(config):
""" """
API v1 metadata of the application. API v1 metadata of the application.
@ -516,25 +486,39 @@ def metadata_v1(config):
:return: The application metadata. :return: The application metadata.
""" """
if bottle.request.method == 'OPTIONS': if bottle.request.method == "OPTIONS":
# CORS # CORS
return {} return {}
try: try:
last_update = None last_update = None
try: try:
ts_file = os.path.join( ts_file = os.path.join(config["data_directory"], "timestamp")
config['data_directory'],
'timestamp'
)
last_update = os.path.getmtime(ts_file) last_update = os.path.getmtime(ts_file)
except OSError: except OSError:
pass pass
return { return {"data": {"last_update": last_update}}
'data': { except Exception as exc: # pylint: disable= broad-except
'last_update': last_update return JSONError(500, str(exc))
}
}
def import_v1(config):
"""
API v1 import new flats.
Example::
GET /api/v1/import
:return: The new flats.
"""
if bottle.request.method == "OPTIONS":
# CORS
return {}
try:
flats_id = cmds.import_and_filter(config, False, True)
return {"flats": flats_id}
except Exception as exc: # pylint: disable= broad-except except Exception as exc: # pylint: disable= broad-except
return JSONError(500, str(exc)) return JSONError(500, str(exc))

2
import.sh Executable file
View File

@ -0,0 +1,2 @@
#!/bin/sh -ev
python -m flatisfy import --config config.json --new-only -v "$@"

View File

@ -36,8 +36,7 @@ def run_migrations_offline():
""" """
url = config.get_main_option("sqlalchemy.url") url = config.get_main_option("sqlalchemy.url")
context.configure( context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
url=url, target_metadata=target_metadata, literal_binds=True)
with context.begin_transaction(): with context.begin_transaction():
context.run_migrations() context.run_migrations()
@ -52,18 +51,17 @@ def run_migrations_online():
""" """
connectable = engine_from_config( connectable = engine_from_config(
config.get_section(config.config_ini_section), config.get_section(config.config_ini_section),
prefix='sqlalchemy.', prefix="sqlalchemy.",
poolclass=pool.NullPool) poolclass=pool.NullPool,
)
with connectable.connect() as connection: with connectable.connect() as connection:
context.configure( context.configure(connection=connection, target_metadata=target_metadata)
connection=connection,
target_metadata=target_metadata
)
with context.begin_transaction(): with context.begin_transaction():
context.run_migrations() context.run_migrations()
if context.is_offline_mode(): if context.is_offline_mode():
run_migrations_offline() run_migrations_offline()
else: else:

View File

@ -10,21 +10,15 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '8155b83242eb' revision = "8155b83242eb"
down_revision = None down_revision = None
branch_labels = None branch_labels = None
depends_on = None depends_on = None
def upgrade(): def upgrade():
op.add_column( op.add_column("flats", sa.Column("is_expired", sa.Boolean(), default=False))
'flats',
sa.Column('is_expired', sa.Boolean(), default=False)
)
def downgrade(): def downgrade():
op.drop_column( op.drop_column("flats", "is_expired")
'flats',
'is_expired'
)

View File

@ -0,0 +1,24 @@
"""Add flat INSEE column
Revision ID: 9e58c66f1ac1
Revises: d21933db9ad8
Create Date: 2021-02-08 16:31:18.961186
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "9e58c66f1ac1"
down_revision = "d21933db9ad8"
branch_labels = None
depends_on = None
def upgrade():
op.add_column("postal_codes", sa.Column("insee_code", sa.String()))
def downgrade():
op.drop_column("postal_codes", "insee_code")

View File

@ -0,0 +1,69 @@
"""Add flat position column
Revision ID: d21933db9ad8
Revises: 8155b83242eb
Create Date: 2021-02-08 16:26:37.190842
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy.types as types
import json
class StringyJSON(types.TypeDecorator):
"""
Stores and retrieves JSON as TEXT for SQLite.
From
https://avacariu.me/articles/2016/compiling-json-as-text-for-sqlite-with-sqlalchemy.
.. note ::
The associated field is immutable. That is, changes to the data
(typically, changing the value of a dict field) will not trigger an
update on the SQL side upon ``commit`` as the reference to the object
will not have been updated. One should force the update by forcing an
update of the reference (by performing a ``copy`` operation on the dict
for instance).
"""
impl = types.TEXT
def process_bind_param(self, value, dialect):
"""
Process the bound param, serialize the object to JSON before saving
into database.
"""
if value is not None:
value = json.dumps(value)
return value
def process_result_value(self, value, dialect):
"""
Process the value fetched from the database, deserialize the JSON
string before returning the object.
"""
if value is not None:
value = json.loads(value)
return value
# TypeEngine.with_variant says "use StringyJSON instead when
# connecting to 'sqlite'"
# pylint: disable=locally-disabled,invalid-name
MagicJSON = types.JSON().with_variant(StringyJSON, "sqlite")
# revision identifiers, used by Alembic.
revision = "d21933db9ad8"
down_revision = "8155b83242eb"
branch_labels = None
depends_on = None
def upgrade():
op.add_column("flats", sa.Column("flatisfy_position", MagicJSON, default=False))
def downgrade():
op.drop_column("flats", "flatisfy_position")

View File

@ -14,7 +14,8 @@
"watch:dev": "webpack --colors --progress --watch", "watch:dev": "webpack --colors --progress --watch",
"build:prod": "NODE_ENV=production webpack --colors --progress -p", "build:prod": "NODE_ENV=production webpack --colors --progress -p",
"watch:prod": "NODE_ENV=production webpack --colors --progress --watch -p", "watch:prod": "NODE_ENV=production webpack --colors --progress --watch -p",
"lint": "eslint --ext .js,.vue ./flatisfy/web/js_src/**" "lint": "eslint --fix --ext .js,.vue ./flatisfy/web/js_src/**",
"ziparound": "cp flatisfy/data_files/laposte.json node_modules/ziparound/laposte.json && node node_modules/ziparound"
}, },
"dependencies": { "dependencies": {
"es6-promise": "^4.1.0", "es6-promise": "^4.1.0",
@ -23,6 +24,7 @@
"imagesloaded": "^4.1.1", "imagesloaded": "^4.1.1",
"isomorphic-fetch": "^2.2.1", "isomorphic-fetch": "^2.2.1",
"isotope-layout": "^3.0.3", "isotope-layout": "^3.0.3",
"leaflet": "^1.7.1",
"leaflet.icon.glyph": "^0.2.0", "leaflet.icon.glyph": "^0.2.0",
"masonry": "0.0.2", "masonry": "0.0.2",
"moment": "^2.18.1", "moment": "^2.18.1",
@ -31,7 +33,8 @@
"vue-i18n": "^6.1.1", "vue-i18n": "^6.1.1",
"vue-images-loaded": "^1.1.2", "vue-images-loaded": "^1.1.2",
"vue-router": "^2.4.0", "vue-router": "^2.4.0",
"vue2-leaflet": "0.0.44", "vue2-leaflet": "2.6.0",
"vue2-leaflet-markercluster": "^3.1.0",
"vueisotope": "^3.0.0-rc", "vueisotope": "^3.0.0-rc",
"vuex": "^2.3.0" "vuex": "^2.3.0"
}, },
@ -53,6 +56,7 @@
"vue-html-loader": "^1.2.4", "vue-html-loader": "^1.2.4",
"vue-loader": "^11.3.4", "vue-loader": "^11.3.4",
"vue-template-compiler": "^2.2.6", "vue-template-compiler": "^2.2.6",
"webpack": "^2.3.3" "webpack": "^2.3.3",
"ziparound": "1.0.0"
} }
} }

View File

@ -8,6 +8,7 @@ future
imagehash imagehash
mapbox mapbox
pillow pillow
ratelimit
requests requests
requests_mock requests_mock
sqlalchemy sqlalchemy
@ -15,5 +16,6 @@ titlecase
unidecode unidecode
vobject vobject
whoosh whoosh
https://git.weboob.org/weboob/devel/repository/archive.zip?ref=master git+https://git.weboob.org/weboob/weboob/
https://git.weboob.org/weboob/modules/repository/archive.zip?ref=master git+https://git.weboob.org/weboob/modules/
money

18
start.sh Executable file
View File

@ -0,0 +1,18 @@
#!/bin/sh -ev
function clean_up {
# Perform program exit housekeeping
kill $SERVE_PID $YARN_PID
exit
}
python -m flatisfy serve --config config.json -v &
SERVE_PID=$!
yarn watch:dev &
YARN_PID=$!
trap clean_up SIGHUP SIGINT SIGTERM
wait $SERVE_PID $YARN_PID

11
wsgi.py
View File

@ -12,11 +12,8 @@ import flatisfy.config
from flatisfy.web import app as web_app from flatisfy.web import app as web_app
class Args(): class Args:
config = os.path.join( config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "config/config.json")
os.path.dirname(os.path.realpath(__file__)),
"config/config.json"
)
LOGGER = logging.getLogger("flatisfy") LOGGER = logging.getLogger("flatisfy")
@ -24,9 +21,7 @@ LOGGER = logging.getLogger("flatisfy")
CONFIG = flatisfy.config.load_config(Args()) CONFIG = flatisfy.config.load_config(Args())
if CONFIG is None: if CONFIG is None:
LOGGER.error("Invalid configuration. Exiting. " LOGGER.error("Invalid configuration. Exiting. Run init-config before if this is the first time you run Flatisfy.")
"Run init-config before if this is the first time "
"you run Flatisfy.")
sys.exit(1) sys.exit(1)

7985
yarn.lock Normal file

File diff suppressed because it is too large Load Diff