Compare commits
44 Commits
8f24ed48a3
...
1a95495c30
Author | SHA1 | Date | |
---|---|---|---|
1a95495c30 | |||
3201c3ddd3 | |||
|
e32db2648a | ||
|
764a5c68ec | ||
|
6862648d50 | ||
|
67da9a055e | ||
|
4d11726332 | ||
|
1bd855dbd8 | ||
|
bd07988549 | ||
|
e4aef0bfaf | ||
|
7790eb0a32 | ||
|
9f328259a7 | ||
|
b3e316cf5b | ||
|
4e3b0055cf | ||
|
f6f1593384 | ||
|
62da67332f | ||
|
48f249bae2 | ||
|
fcb20d2878 | ||
|
711590b809 | ||
|
a92db5e8ee | ||
|
582a868a1d | ||
|
0f2a14b024 | ||
|
36e98bc5b3 | ||
|
713912cfbc | ||
|
42909bd46f | ||
|
9c5afac91c | ||
|
c6f711030a | ||
|
cc4c1ccb18 | ||
|
c659dc6b76 | ||
|
5a3a82ca8d | ||
|
2374763d3b | ||
|
e6b71c6ed5 | ||
|
da6cb83f93 | ||
|
caa4961679 | ||
|
89234b5c5b | ||
|
395a8ba72b | ||
|
fefab6ea36 | ||
|
361725ea4d | ||
|
76a06cf795 | ||
|
9698a889ad | ||
|
b19459b97f | ||
|
0da04a4b6e | ||
|
31f08cb36b | ||
|
af8d864c38 |
1
.dockerignore
Normal file
1
.dockerignore
Normal file
@ -0,0 +1 @@
|
||||
data
|
10
.editorconfig
Normal file
10
.editorconfig
Normal file
@ -0,0 +1,10 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
|
||||
[*.py]
|
||||
max_line_length=120
|
@ -4,6 +4,9 @@
|
||||
"env": {
|
||||
"browser": true
|
||||
},
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 8
|
||||
},
|
||||
rules: {
|
||||
'indent': ["error", 4, { 'SwitchCase': 1 }],
|
||||
}
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -9,9 +9,7 @@ flatisfy/web/static/assets
|
||||
data/
|
||||
package-lock.json
|
||||
doc/_build
|
||||
yarn.lock
|
||||
data_rework/
|
||||
.env
|
||||
weboob
|
||||
.htpasswd
|
||||
.vscode
|
6
.vscode/extensions.json
vendored
Normal file
6
.vscode/extensions.json
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"mtxr.sqltools",
|
||||
"mtxr.sqltools-driver-sqlite"
|
||||
]
|
||||
}
|
15
.vscode/settings.json
vendored
Normal file
15
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"cSpell.words": [
|
||||
"Weboob",
|
||||
"flatisfy"
|
||||
],
|
||||
"sqltools.useNodeRuntime": true,
|
||||
"sqltools.connections": [
|
||||
{
|
||||
"previewLimit": 50,
|
||||
"driver": "SQLite",
|
||||
"name": "flatisfy",
|
||||
"database": "${workspaceFolder:flatisfy}/data/flatisfy.db"
|
||||
}
|
||||
]
|
||||
}
|
@ -227,4 +227,10 @@ schema might change from time to time. Here is how to update it automatically:
|
||||
|
||||
### Other tools more or less connected with Flatisfy
|
||||
|
||||
+ [ZipAround](https://github.com/guix77/ziparound) generates a list of ZIP codes centered on a city name, within a radius of N kilometers and within a certain travel time by car (France only)
|
||||
+ [ZipAround](https://github.com/guix77/ziparound) generates a list of ZIP codes centered on a city name, within a radius of N kilometers and within a certain travel time by car (France only). You can invoke it with:
|
||||
|
||||
```sh
|
||||
yarn ziparound
|
||||
# or alternatively
|
||||
yarn ziparound --code 75001 --distance 3
|
||||
```
|
||||
|
88
doc/conf.py
88
doc/conf.py
@ -18,7 +18,8 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.abspath('..'))
|
||||
|
||||
sys.path.insert(0, os.path.abspath(".."))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
@ -30,19 +31,19 @@ sys.path.insert(0, os.path.abspath('..'))
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.viewcode',
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.viewcode",
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
#
|
||||
source_suffix = ['.rst', '.md']
|
||||
source_suffix = [".rst", ".md"]
|
||||
source_parsers = {
|
||||
'.md': 'recommonmark.parser.CommonMarkParser',
|
||||
".md": "recommonmark.parser.CommonMarkParser",
|
||||
}
|
||||
|
||||
# The encoding of source files.
|
||||
@ -50,21 +51,21 @@ source_parsers = {
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = u'Flatisfy'
|
||||
copyright = u'2017, Phyks (Lucas Verney)'
|
||||
author = u'Phyks (Lucas Verney)'
|
||||
project = u"Flatisfy"
|
||||
copyright = u"2017, Phyks (Lucas Verney)"
|
||||
author = u"Phyks (Lucas Verney)"
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = u'0.1'
|
||||
version = u"0.1"
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = u'0.1'
|
||||
release = u"0.1"
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
@ -85,7 +86,7 @@ language = None
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This patterns also effect to html_static_path and html_extra_path
|
||||
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
@ -107,7 +108,7 @@ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
pygments_style = "sphinx"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
@ -124,7 +125,7 @@ todo_include_todos = False
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
html_theme = 'classic'
|
||||
html_theme = "classic"
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
@ -158,7 +159,7 @@ html_theme = 'classic'
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
html_static_path = ["_static"]
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
@ -238,34 +239,36 @@ html_static_path = ['_static']
|
||||
# html_search_scorer = 'scorer.js'
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'Flatisfydoc'
|
||||
htmlhelp_basename = "Flatisfydoc"
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#
|
||||
# 'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#
|
||||
# 'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#
|
||||
# 'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#
|
||||
# 'figure_align': 'htbp',
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#
|
||||
# 'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#
|
||||
# 'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#
|
||||
# 'preamble': '',
|
||||
# Latex figure (float) alignment
|
||||
#
|
||||
# 'figure_align': 'htbp',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'Flatisfy.tex', u'Flatisfy Documentation',
|
||||
u'Phyks (Lucas Verney)', 'manual'),
|
||||
(
|
||||
master_doc,
|
||||
"Flatisfy.tex",
|
||||
u"Flatisfy Documentation",
|
||||
u"Phyks (Lucas Verney)",
|
||||
"manual",
|
||||
),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
@ -305,10 +308,7 @@ latex_documents = [
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'flatisfy', u'Flatisfy Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
man_pages = [(master_doc, "flatisfy", u"Flatisfy Documentation", [author], 1)]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#
|
||||
@ -321,9 +321,15 @@ man_pages = [
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'Flatisfy', u'Flatisfy Documentation',
|
||||
author, 'Flatisfy', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
(
|
||||
master_doc,
|
||||
"Flatisfy",
|
||||
u"Flatisfy Documentation",
|
||||
author,
|
||||
"Flatisfy",
|
||||
"One line description of project.",
|
||||
"Miscellaneous",
|
||||
),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
|
@ -19,7 +19,7 @@ RUN curl -sL https://deb.nodesource.com/setup_10.x | bash - \
|
||||
&& apt-get install -y nodejs
|
||||
|
||||
# Install weboob's code itself.
|
||||
RUN git clone --depth 1 https://git.weboob.org/weboob/devel /home/user/weboob \
|
||||
RUN git clone --depth 1 https://git.weboob.org/weboob/weboob /home/user/weboob \
|
||||
&& cd /home/user/weboob \
|
||||
&& pip install .
|
||||
|
||||
|
@ -9,3 +9,4 @@ services:
|
||||
- ./data:/flatisfy
|
||||
ports:
|
||||
- "8080:8080"
|
||||
working_dir: /home/user/app
|
||||
|
@ -17,6 +17,7 @@ from flatisfy import data
|
||||
from flatisfy import fetch
|
||||
from flatisfy import tools
|
||||
from flatisfy import tests
|
||||
|
||||
# pylint: enable=locally-disabled,wrong-import-position
|
||||
|
||||
|
||||
@ -27,68 +28,47 @@ def parse_args(argv=None):
|
||||
"""
|
||||
Create parser and parse arguments.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(prog="Flatisfy",
|
||||
description="Find the perfect flat.")
|
||||
parser = argparse.ArgumentParser(prog="Flatisfy", description="Find the perfect flat.")
|
||||
|
||||
# Parent parser containing arguments common to any subcommand
|
||||
parent_parser = argparse.ArgumentParser(add_help=False)
|
||||
parent_parser.add_argument("--data-dir", help="Location of Flatisfy data directory.")
|
||||
parent_parser.add_argument("--config", help="Configuration file to use.")
|
||||
parent_parser.add_argument(
|
||||
"--data-dir",
|
||||
help="Location of Flatisfy data directory."
|
||||
"--passes",
|
||||
choices=[0, 1, 2, 3],
|
||||
type=int,
|
||||
help="Number of passes to do on the filtered data.",
|
||||
)
|
||||
parent_parser.add_argument("--max-entries", type=int, help="Maximum number of entries to fetch.")
|
||||
parent_parser.add_argument("-v", "--verbose", action="store_true", help="Verbose logging output.")
|
||||
parent_parser.add_argument("-vv", action="store_true", help="Debug logging output.")
|
||||
parent_parser.add_argument(
|
||||
"--config",
|
||||
help="Configuration file to use."
|
||||
)
|
||||
parent_parser.add_argument(
|
||||
"--passes", choices=[0, 1, 2, 3], type=int,
|
||||
help="Number of passes to do on the filtered data."
|
||||
)
|
||||
parent_parser.add_argument(
|
||||
"--max-entries", type=int,
|
||||
help="Maximum number of entries to fetch."
|
||||
)
|
||||
parent_parser.add_argument(
|
||||
"-v", "--verbose", action="store_true",
|
||||
help="Verbose logging output."
|
||||
)
|
||||
parent_parser.add_argument(
|
||||
"-vv", action="store_true",
|
||||
help="Debug logging output."
|
||||
)
|
||||
parent_parser.add_argument(
|
||||
"--constraints", type=str,
|
||||
help="Comma-separated list of constraints to consider."
|
||||
"--constraints",
|
||||
type=str,
|
||||
help="Comma-separated list of constraints to consider.",
|
||||
)
|
||||
|
||||
# Subcommands
|
||||
subparsers = parser.add_subparsers(
|
||||
dest="cmd", help="Available subcommands"
|
||||
)
|
||||
subparsers = parser.add_subparsers(dest="cmd", help="Available subcommands")
|
||||
|
||||
# Build data subcommand
|
||||
subparsers.add_parser(
|
||||
"build-data", parents=[parent_parser],
|
||||
help="Build necessary data"
|
||||
)
|
||||
subparsers.add_parser("build-data", parents=[parent_parser], help="Build necessary data")
|
||||
|
||||
# Init config subcommand
|
||||
parser_init_config = subparsers.add_parser(
|
||||
"init-config", parents=[parent_parser],
|
||||
help="Initialize empty configuration."
|
||||
)
|
||||
parser_init_config.add_argument(
|
||||
"output", nargs="?", help="Output config file. Use '-' for stdout."
|
||||
"init-config", parents=[parent_parser], help="Initialize empty configuration."
|
||||
)
|
||||
parser_init_config.add_argument("output", nargs="?", help="Output config file. Use '-' for stdout.")
|
||||
|
||||
# Fetch subcommand parser
|
||||
subparsers.add_parser("fetch", parents=[parent_parser],
|
||||
help="Fetch housings posts")
|
||||
subparsers.add_parser("fetch", parents=[parent_parser], help="Fetch housings posts")
|
||||
|
||||
# Filter subcommand parser
|
||||
parser_filter = subparsers.add_parser(
|
||||
"filter", parents=[parent_parser],
|
||||
help="Filter housings posts according to constraints in config."
|
||||
"filter",
|
||||
parents=[parent_parser],
|
||||
help="Filter housings posts according to constraints in config.",
|
||||
)
|
||||
parser_filter.add_argument(
|
||||
"--input",
|
||||
@ -97,28 +77,29 @@ def parse_args(argv=None):
|
||||
"no additional fetching of infos is done, and the script outputs "
|
||||
"a filtered JSON dump on stdout. If not provided, update status "
|
||||
"of the flats in the database."
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
# Import subcommand parser
|
||||
subparsers.add_parser("import", parents=[parent_parser],
|
||||
help="Import housing posts in database.")
|
||||
import_filter = subparsers.add_parser("import", parents=[parent_parser], help="Import housing posts in database.")
|
||||
import_filter.add_argument(
|
||||
"--new-only",
|
||||
action="store_true",
|
||||
help=("Download new housing posts only but do not refresh existing ones"),
|
||||
)
|
||||
|
||||
# Purge subcommand parser
|
||||
subparsers.add_parser("purge", parents=[parent_parser],
|
||||
help="Purge database.")
|
||||
subparsers.add_parser("purge", parents=[parent_parser], help="Purge database.")
|
||||
|
||||
# Serve subcommand parser
|
||||
parser_serve = subparsers.add_parser("serve", parents=[parent_parser],
|
||||
help="Serve the web app.")
|
||||
parser_serve = subparsers.add_parser("serve", parents=[parent_parser], help="Serve the web app.")
|
||||
parser_serve.add_argument("--port", type=int, help="Port to bind to.")
|
||||
parser_serve.add_argument("--host", help="Host to listen on.")
|
||||
|
||||
# Test subcommand parser
|
||||
subparsers.add_parser("test", parents=[parent_parser],
|
||||
help="Unit testing.")
|
||||
subparsers.add_parser("test", parents=[parent_parser], help="Unit testing.")
|
||||
|
||||
return parser.parse_args(argv)
|
||||
return parser, parser.parse_args(argv)
|
||||
|
||||
|
||||
def main():
|
||||
@ -127,25 +108,30 @@ def main():
|
||||
"""
|
||||
# pylint: disable=locally-disabled,too-many-branches
|
||||
# Parse arguments
|
||||
args = parse_args()
|
||||
parser, args = parse_args()
|
||||
|
||||
# Set logger
|
||||
if args.vv:
|
||||
logging.getLogger('').setLevel(logging.DEBUG)
|
||||
logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG)
|
||||
elif args.verbose:
|
||||
logging.getLogger('').setLevel(logging.INFO)
|
||||
if getattr(args, 'vv', False):
|
||||
logging.getLogger("").setLevel(logging.DEBUG)
|
||||
logging.getLogger("titlecase").setLevel(logging.INFO)
|
||||
logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
|
||||
elif getattr(args, 'verbose', False):
|
||||
logging.getLogger("").setLevel(logging.INFO)
|
||||
# sqlalchemy INFO level is way too loud, just stick with WARNING
|
||||
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING)
|
||||
logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
|
||||
else:
|
||||
logging.getLogger('').setLevel(logging.WARNING)
|
||||
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING)
|
||||
logging.getLogger("").setLevel(logging.WARNING)
|
||||
logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
|
||||
|
||||
# Init-config command
|
||||
if args.cmd == "init-config":
|
||||
flatisfy.config.init_config(args.output)
|
||||
sys.exit(0)
|
||||
else:
|
||||
if not args.cmd:
|
||||
parser.print_help()
|
||||
sys.exit(0)
|
||||
|
||||
# Load config
|
||||
if args.cmd == "build-data":
|
||||
# Data not yet built, do not use it in config checks
|
||||
@ -153,9 +139,11 @@ def main():
|
||||
else:
|
||||
config = flatisfy.config.load_config(args, check_with_data=True)
|
||||
if config is None:
|
||||
LOGGER.error("Invalid configuration. Exiting. "
|
||||
"Run init-config before if this is the first time "
|
||||
"you run Flatisfy.")
|
||||
LOGGER.error(
|
||||
"Invalid configuration. Exiting. "
|
||||
"Run init-config before if this is the first time "
|
||||
"you run Flatisfy."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# Purge command
|
||||
@ -171,18 +159,11 @@ def main():
|
||||
if args.cmd == "fetch":
|
||||
# Fetch and filter flats list
|
||||
fetched_flats = fetch.fetch_flats(config)
|
||||
fetched_flats = cmds.filter_fetched_flats(config,
|
||||
fetched_flats=fetched_flats,
|
||||
fetch_details=True)
|
||||
fetched_flats = cmds.filter_fetched_flats(config, fetched_flats=fetched_flats, fetch_details=True)
|
||||
# Sort by cost
|
||||
fetched_flats = {
|
||||
k: tools.sort_list_of_dicts_by(v["new"], "cost")
|
||||
for k, v in fetched_flats.items()
|
||||
}
|
||||
fetched_flats = {k: tools.sort_list_of_dicts_by(v["new"], "cost") for k, v in fetched_flats.items()}
|
||||
|
||||
print(
|
||||
tools.pretty_json(fetched_flats)
|
||||
)
|
||||
print(tools.pretty_json(fetched_flats))
|
||||
return
|
||||
# Filter command
|
||||
elif args.cmd == "filter":
|
||||
@ -190,28 +171,19 @@ def main():
|
||||
if args.input:
|
||||
fetched_flats = fetch.load_flats_from_file(args.input, config)
|
||||
|
||||
fetched_flats = cmds.filter_fetched_flats(
|
||||
config,
|
||||
fetched_flats=fetched_flats,
|
||||
fetch_details=False
|
||||
)
|
||||
fetched_flats = cmds.filter_fetched_flats(config, fetched_flats=fetched_flats, fetch_details=False)
|
||||
|
||||
# Sort by cost
|
||||
fetched_flats = {
|
||||
k: tools.sort_list_of_dicts_by(v["new"], "cost")
|
||||
for k, v in fetched_flats.items()
|
||||
}
|
||||
fetched_flats = {k: tools.sort_list_of_dicts_by(v["new"], "cost") for k, v in fetched_flats.items()}
|
||||
|
||||
# Output to stdout
|
||||
print(
|
||||
tools.pretty_json(fetched_flats)
|
||||
)
|
||||
print(tools.pretty_json(fetched_flats))
|
||||
else:
|
||||
cmds.import_and_filter(config, load_from_db=True)
|
||||
return
|
||||
# Import command
|
||||
elif args.cmd == "import":
|
||||
cmds.import_and_filter(config, load_from_db=False)
|
||||
cmds.import_and_filter(config, load_from_db=False, new_only=args.new_only)
|
||||
return
|
||||
# Serve command
|
||||
elif args.cmd == "serve":
|
||||
|
100
flatisfy/cmds.py
100
flatisfy/cmds.py
@ -18,21 +18,23 @@ from flatisfy import fetch
|
||||
from flatisfy import tools
|
||||
from flatisfy.filters import metadata
|
||||
from flatisfy.web import app as web_app
|
||||
|
||||
import time
|
||||
from ratelimit.exception import RateLimitException
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def filter_flats_list(config, constraint_name, flats_list, fetch_details=True):
|
||||
def filter_flats_list(config, constraint_name, flats_list, fetch_details=True, past_flats=None):
|
||||
"""
|
||||
Filter the available flats list. Then, filter it according to criteria.
|
||||
|
||||
:param config: A config dict.
|
||||
:param constraint_name: The constraint name that the ``flats_list`` should
|
||||
satisfy.
|
||||
:param flats_list: The initial list of flat objects to filter.
|
||||
:param fetch_details: Whether additional details should be fetched between
|
||||
the two passes.
|
||||
:param flats_list: The initial list of flat objects to filter.
|
||||
:param past_flats: The list of already fetched flats
|
||||
:return: A dict mapping flat status and list of flat objects.
|
||||
"""
|
||||
# Add the flatisfy metadata entry and prepare the flat objects
|
||||
@ -44,13 +46,9 @@ def filter_flats_list(config, constraint_name, flats_list, fetch_details=True):
|
||||
except KeyError:
|
||||
LOGGER.error(
|
||||
"Missing constraint %s. Skipping filtering for these posts.",
|
||||
constraint_name
|
||||
constraint_name,
|
||||
)
|
||||
return {
|
||||
"new": [],
|
||||
"duplicate": [],
|
||||
"ignored": []
|
||||
}
|
||||
return {"new": [], "duplicate": [], "ignored": []}
|
||||
|
||||
first_pass_result = collections.defaultdict(list)
|
||||
second_pass_result = collections.defaultdict(list)
|
||||
@ -58,52 +56,55 @@ def filter_flats_list(config, constraint_name, flats_list, fetch_details=True):
|
||||
# Do a first pass with the available infos to try to remove as much
|
||||
# unwanted postings as possible
|
||||
if config["passes"] > 0:
|
||||
first_pass_result = flatisfy.filters.first_pass(flats_list,
|
||||
constraint,
|
||||
config)
|
||||
first_pass_result = flatisfy.filters.first_pass(flats_list, constraint, config)
|
||||
else:
|
||||
first_pass_result["new"] = flats_list
|
||||
|
||||
# Load additional infos
|
||||
if fetch_details:
|
||||
past_ids = {x["id"]: x for x in past_flats} if past_flats else {}
|
||||
for i, flat in enumerate(first_pass_result["new"]):
|
||||
details = fetch.fetch_details(config, flat["id"])
|
||||
details = None
|
||||
|
||||
use_cache = past_ids.get(flat["id"])
|
||||
if use_cache:
|
||||
LOGGER.debug("Skipping details download for %s.", flat["id"])
|
||||
details = use_cache
|
||||
else:
|
||||
if flat["id"].split("@")[1] in ["seloger", "leboncoin"]:
|
||||
try:
|
||||
details = fetch.fetch_details_rate_limited(config, flat["id"])
|
||||
except RateLimitException:
|
||||
time.sleep(60)
|
||||
details = fetch.fetch_details_rate_limited(config, flat["id"])
|
||||
else:
|
||||
details = fetch.fetch_details(config, flat["id"])
|
||||
|
||||
first_pass_result["new"][i] = tools.merge_dicts(flat, details)
|
||||
|
||||
# Do a second pass to consolidate all the infos we found and make use of
|
||||
# additional infos
|
||||
if config["passes"] > 1:
|
||||
second_pass_result = flatisfy.filters.second_pass(
|
||||
first_pass_result["new"], constraint, config
|
||||
)
|
||||
second_pass_result = flatisfy.filters.second_pass(first_pass_result["new"], constraint, config)
|
||||
else:
|
||||
second_pass_result["new"] = first_pass_result["new"]
|
||||
|
||||
# Do a third pass to deduplicate better
|
||||
if config["passes"] > 2:
|
||||
third_pass_result = flatisfy.filters.third_pass(
|
||||
second_pass_result["new"],
|
||||
config
|
||||
)
|
||||
third_pass_result = flatisfy.filters.third_pass(second_pass_result["new"], config)
|
||||
else:
|
||||
third_pass_result["new"] = second_pass_result["new"]
|
||||
|
||||
return {
|
||||
"new": third_pass_result["new"],
|
||||
"duplicate": (
|
||||
first_pass_result["duplicate"] +
|
||||
second_pass_result["duplicate"] +
|
||||
third_pass_result["duplicate"]
|
||||
first_pass_result["duplicate"] + second_pass_result["duplicate"] + third_pass_result["duplicate"]
|
||||
),
|
||||
"ignored": (
|
||||
first_pass_result["ignored"] +
|
||||
second_pass_result["ignored"] +
|
||||
third_pass_result["ignored"]
|
||||
)
|
||||
"ignored": (first_pass_result["ignored"] + second_pass_result["ignored"] + third_pass_result["ignored"]),
|
||||
}
|
||||
|
||||
|
||||
def filter_fetched_flats(config, fetched_flats, fetch_details=True):
|
||||
def filter_fetched_flats(config, fetched_flats, fetch_details=True, past_flats={}):
|
||||
"""
|
||||
Filter the available flats list. Then, filter it according to criteria.
|
||||
|
||||
@ -120,12 +121,13 @@ def filter_fetched_flats(config, fetched_flats, fetch_details=True):
|
||||
config,
|
||||
constraint_name,
|
||||
flats_list,
|
||||
fetch_details
|
||||
fetch_details,
|
||||
past_flats.get(constraint_name, None),
|
||||
)
|
||||
return fetched_flats
|
||||
|
||||
|
||||
def import_and_filter(config, load_from_db=False):
|
||||
def import_and_filter(config, load_from_db=False, new_only=False):
|
||||
"""
|
||||
Fetch the available flats list. Then, filter it according to criteria.
|
||||
Finally, store it in the database.
|
||||
@ -136,17 +138,23 @@ def import_and_filter(config, load_from_db=False):
|
||||
:return: ``None``.
|
||||
"""
|
||||
# Fetch and filter flats list
|
||||
past_flats = fetch.load_flats_from_db(config)
|
||||
if load_from_db:
|
||||
fetched_flats = fetch.load_flats_from_db(config)
|
||||
fetched_flats = past_flats
|
||||
else:
|
||||
fetched_flats = fetch.fetch_flats(config)
|
||||
# Do not fetch additional details if we loaded data from the db.
|
||||
flats_by_status = filter_fetched_flats(config, fetched_flats=fetched_flats,
|
||||
fetch_details=(not load_from_db))
|
||||
flats_by_status = filter_fetched_flats(
|
||||
config,
|
||||
fetched_flats=fetched_flats,
|
||||
fetch_details=(not load_from_db),
|
||||
past_flats=past_flats if new_only else {},
|
||||
)
|
||||
# Create database connection
|
||||
get_session = database.init_db(config["database"], config["search_index"])
|
||||
|
||||
new_flats = []
|
||||
result = []
|
||||
|
||||
LOGGER.info("Merging fetched flats in database...")
|
||||
# Flatten the flats_by_status dict
|
||||
@ -159,14 +167,11 @@ def import_and_filter(config, load_from_db=False):
|
||||
# Set is_expired to true for all existing flats.
|
||||
# This will be set back to false if we find them during importing.
|
||||
for flat in session.query(flat_model.Flat).all():
|
||||
flat.is_expired = True;
|
||||
flat.is_expired = True
|
||||
|
||||
for status, flats_list in flatten_flats_by_status.items():
|
||||
# Build SQLAlchemy Flat model objects for every available flat
|
||||
flats_objects = {
|
||||
flat_dict["id"]: flat_model.Flat.from_dict(flat_dict)
|
||||
for flat_dict in flats_list
|
||||
}
|
||||
flats_objects = {flat_dict["id"]: flat_model.Flat.from_dict(flat_dict) for flat_dict in flats_list}
|
||||
|
||||
if flats_objects:
|
||||
# If there are some flats, try to merge them with the ones in
|
||||
@ -179,9 +184,7 @@ def import_and_filter(config, load_from_db=False):
|
||||
# status if the user defined it
|
||||
flat_object = flats_objects[each.id]
|
||||
if each.status in flat_model.AUTOMATED_STATUSES:
|
||||
flat_object.status = getattr(
|
||||
flat_model.FlatStatus, status
|
||||
)
|
||||
flat_object.status = getattr(flat_model.FlatStatus, status)
|
||||
else:
|
||||
flat_object.status = each.status
|
||||
|
||||
@ -198,21 +201,22 @@ def import_and_filter(config, load_from_db=False):
|
||||
flat.status = getattr(flat_model.FlatStatus, status)
|
||||
if flat.status == flat_model.FlatStatus.new:
|
||||
new_flats.append(flat)
|
||||
result.append(flat.id)
|
||||
|
||||
session.add_all(flats_objects.values())
|
||||
|
||||
if config["send_email"]:
|
||||
email.send_notification(config, new_flats)
|
||||
|
||||
LOGGER.info(f"Found {len(result)} new flats.")
|
||||
|
||||
# Touch a file to indicate last update timestamp
|
||||
ts_file = os.path.join(
|
||||
config["data_directory"],
|
||||
"timestamp"
|
||||
)
|
||||
with open(ts_file, 'w'):
|
||||
ts_file = os.path.join(config["data_directory"], "timestamp")
|
||||
with open(ts_file, "w"):
|
||||
os.utime(ts_file, None)
|
||||
|
||||
LOGGER.info("Done!")
|
||||
return result
|
||||
|
||||
|
||||
def purge_db(config):
|
||||
@ -253,4 +257,4 @@ def serve(config):
|
||||
server = web_app.QuietWSGIRefServer
|
||||
|
||||
print("Launching web viewer running on http://%s:%s" % (config["host"], config["port"]))
|
||||
app.run(host=config["host"], port=config["port"], server=server)
|
||||
app.run(host=config["host"], port=config["port"], server=server)
|
||||
|
@ -30,24 +30,25 @@ DEFAULT_CONFIG = {
|
||||
"default": {
|
||||
"type": None, # RENT, SALE, SHARING
|
||||
"house_types": [], # List of house types, must be in APART, HOUSE,
|
||||
# PARKING, LAND, OTHER or UNKNOWN
|
||||
# PARKING, LAND, OTHER or UNKNOWN
|
||||
"postal_codes": [], # List of postal codes
|
||||
"insees": [], # List of postal codes
|
||||
"area": (None, None), # (min, max) in m^2
|
||||
"cost": (None, None), # (min, max) in currency unit
|
||||
"rooms": (None, None), # (min, max)
|
||||
"bedrooms": (None, None), # (min, max)
|
||||
"minimum_nb_photos": None, # min number of photos
|
||||
"description_should_contain": [], # list of terms
|
||||
"description_should_not_contain": [ # list of terms
|
||||
"description_should_not_contain": [
|
||||
"vendu",
|
||||
"Vendu",
|
||||
"VENDU",
|
||||
"recherche"
|
||||
"recherche",
|
||||
],
|
||||
"time_to": {} # Dict mapping names to {"gps": [lat, lng],
|
||||
# "time": (min, max),
|
||||
# "mode": Valid mode }
|
||||
# Time is in seconds
|
||||
# "time": (min, max),
|
||||
# "mode": Valid mode }
|
||||
# Time is in seconds
|
||||
}
|
||||
},
|
||||
# Whether or not to store personal data from housing posts (phone number
|
||||
@ -91,15 +92,17 @@ DEFAULT_CONFIG = {
|
||||
"backends": None,
|
||||
# Should email notifications be sent?
|
||||
"send_email": False,
|
||||
"smtp_server": 'localhost',
|
||||
"smtp_server": "localhost",
|
||||
"smtp_port": 25,
|
||||
"smtp_username": None,
|
||||
"smtp_password": None,
|
||||
"smtp_from": "noreply@flatisfy.org",
|
||||
"smtp_to": [],
|
||||
"notification_lang": "en",
|
||||
# The web site url, to be used in email notifications. (doesn't matter
|
||||
# whether the trailing slash is present or not)
|
||||
"website_url": "http://127.0.0.1:8080"
|
||||
"website_url": "http://127.0.0.1:8080",
|
||||
"ignore_station": False,
|
||||
}
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
@ -114,20 +117,14 @@ def validate_config(config, check_with_data):
|
||||
check the config values.
|
||||
:return: ``True`` if the configuration is valid, ``False`` otherwise.
|
||||
"""
|
||||
|
||||
def _check_constraints_bounds(bounds):
|
||||
"""
|
||||
Check the bounds for numeric constraints.
|
||||
"""
|
||||
assert isinstance(bounds, list)
|
||||
assert len(bounds) == 2
|
||||
assert all(
|
||||
x is None or
|
||||
(
|
||||
isinstance(x, (float, int)) and
|
||||
x >= 0
|
||||
)
|
||||
for x in bounds
|
||||
)
|
||||
assert all(x is None or (isinstance(x, (float, int)) and x >= 0) for x in bounds)
|
||||
if bounds[0] is not None and bounds[1] is not None:
|
||||
assert bounds[1] > bounds[0]
|
||||
|
||||
@ -139,7 +136,9 @@ def validate_config(config, check_with_data):
|
||||
# pylint: disable=locally-disabled,line-too-long
|
||||
|
||||
assert config["passes"] in [0, 1, 2, 3]
|
||||
assert config["max_entries"] is None or (isinstance(config["max_entries"], int) and config["max_entries"] > 0) # noqa: E501
|
||||
assert config["max_entries"] is None or (
|
||||
isinstance(config["max_entries"], int) and config["max_entries"] > 0
|
||||
) # noqa: E501
|
||||
|
||||
assert config["data_directory"] is None or isinstance(config["data_directory"], str) # noqa: E501
|
||||
assert os.path.isdir(config["data_directory"])
|
||||
@ -159,6 +158,7 @@ def validate_config(config, check_with_data):
|
||||
assert config["smtp_username"] is None or isinstance(config["smtp_username"], str) # noqa: E501
|
||||
assert config["smtp_password"] is None or isinstance(config["smtp_password"], str) # noqa: E501
|
||||
assert config["smtp_to"] is None or isinstance(config["smtp_to"], list)
|
||||
assert config["notification_lang"] is None or isinstance(config["notification_lang"], str)
|
||||
|
||||
assert isinstance(config["store_personal_data"], bool)
|
||||
assert isinstance(config["max_distance_housing_station"], (int, float))
|
||||
@ -169,6 +169,8 @@ def validate_config(config, check_with_data):
|
||||
assert config["navitia_api_key"] is None or isinstance(config["navitia_api_key"], str) # noqa: E501
|
||||
assert config["mapbox_api_key"] is None or isinstance(config["mapbox_api_key"], str) # noqa: E501
|
||||
|
||||
assert config["ignore_station"] is None or isinstance(config["ignore_station"], bool) # noqa: E501
|
||||
|
||||
# Ensure constraints are ok
|
||||
assert config["constraints"]
|
||||
for constraint in config["constraints"].values():
|
||||
@ -188,8 +190,7 @@ def validate_config(config, check_with_data):
|
||||
assert isinstance(term, str)
|
||||
|
||||
assert "description_should_not_contain" in constraint
|
||||
assert isinstance(constraint["description_should_not_contain"],
|
||||
list)
|
||||
assert isinstance(constraint["description_should_not_contain"], list)
|
||||
if constraint["description_should_not_contain"]:
|
||||
for term in constraint["description_should_not_contain"]:
|
||||
assert isinstance(term, str)
|
||||
@ -202,16 +203,22 @@ def validate_config(config, check_with_data):
|
||||
assert "postal_codes" in constraint
|
||||
assert constraint["postal_codes"]
|
||||
assert all(isinstance(x, str) for x in constraint["postal_codes"])
|
||||
if "insee_codes" in constraint:
|
||||
assert constraint["insee_codes"]
|
||||
assert all(isinstance(x, str) for x in constraint["insee_codes"])
|
||||
|
||||
if check_with_data:
|
||||
# Ensure data is built into db
|
||||
data.preprocess_data(config, force=False)
|
||||
# Check postal codes
|
||||
opendata_postal_codes = [
|
||||
x.postal_code
|
||||
for x in data.load_data(PostalCode, constraint, config)
|
||||
]
|
||||
opendata = data.load_data(PostalCode, constraint, config)
|
||||
opendata_postal_codes = [x.postal_code for x in opendata]
|
||||
opendata_insee_codes = [x.insee_code for x in opendata]
|
||||
for postal_code in constraint["postal_codes"]:
|
||||
assert postal_code in opendata_postal_codes # noqa: E501
|
||||
if "insee_codes" in constraint:
|
||||
for insee in constraint["insee_codes"]:
|
||||
assert insee in opendata_insee_codes # noqa: E501
|
||||
|
||||
assert "area" in constraint
|
||||
_check_constraints_bounds(constraint["area"])
|
||||
@ -264,22 +271,18 @@ def load_config(args=None, check_with_data=True):
|
||||
config_data.update(json.load(fh))
|
||||
except (IOError, ValueError) as exc:
|
||||
LOGGER.error(
|
||||
"Unable to load configuration from file, "
|
||||
"using default configuration: %s.",
|
||||
exc
|
||||
"Unable to load configuration from file, using default configuration: %s.",
|
||||
exc,
|
||||
)
|
||||
|
||||
# Overload config with arguments
|
||||
if args and getattr(args, "passes", None) is not None:
|
||||
LOGGER.debug(
|
||||
"Overloading number of passes from CLI arguments: %d.",
|
||||
args.passes
|
||||
)
|
||||
LOGGER.debug("Overloading number of passes from CLI arguments: %d.", args.passes)
|
||||
config_data["passes"] = args.passes
|
||||
if args and getattr(args, "max_entries", None) is not None:
|
||||
LOGGER.debug(
|
||||
"Overloading maximum number of entries from CLI arguments: %d.",
|
||||
args.max_entries
|
||||
args.max_entries,
|
||||
)
|
||||
config_data["max_entries"] = args.max_entries
|
||||
if args and getattr(args, "port", None) is not None:
|
||||
@ -294,49 +297,39 @@ def load_config(args=None, check_with_data=True):
|
||||
LOGGER.debug("Overloading data directory from CLI arguments.")
|
||||
config_data["data_directory"] = args.data_dir
|
||||
elif config_data["data_directory"] is None:
|
||||
config_data["data_directory"] = appdirs.user_data_dir(
|
||||
"flatisfy",
|
||||
"flatisfy"
|
||||
)
|
||||
LOGGER.debug("Using default XDG data directory: %s.",
|
||||
config_data["data_directory"])
|
||||
config_data["data_directory"] = appdirs.user_data_dir("flatisfy", "flatisfy")
|
||||
LOGGER.debug("Using default XDG data directory: %s.", config_data["data_directory"])
|
||||
|
||||
if not os.path.isdir(config_data["data_directory"]):
|
||||
LOGGER.info("Creating data directory according to config: %s",
|
||||
config_data["data_directory"])
|
||||
LOGGER.info(
|
||||
"Creating data directory according to config: %s",
|
||||
config_data["data_directory"],
|
||||
)
|
||||
os.makedirs(config_data["data_directory"])
|
||||
os.makedirs(os.path.join(config_data["data_directory"], "images"))
|
||||
|
||||
if config_data["database"] is None:
|
||||
config_data["database"] = "sqlite:///" + os.path.join(
|
||||
config_data["data_directory"],
|
||||
"flatisfy.db"
|
||||
)
|
||||
config_data["database"] = "sqlite:///" + os.path.join(config_data["data_directory"], "flatisfy.db")
|
||||
|
||||
if config_data["search_index"] is None:
|
||||
config_data["search_index"] = os.path.join(
|
||||
config_data["data_directory"],
|
||||
"search_index"
|
||||
)
|
||||
config_data["search_index"] = os.path.join(config_data["data_directory"], "search_index")
|
||||
|
||||
# Handle constraints filtering
|
||||
if args and getattr(args, "constraints", None) is not None:
|
||||
LOGGER.info(
|
||||
("Filtering constraints from config according to CLI argument. "
|
||||
"Using only the following constraints: %s."),
|
||||
args.constraints.replace(",", ", ")
|
||||
(
|
||||
"Filtering constraints from config according to CLI argument. "
|
||||
"Using only the following constraints: %s."
|
||||
),
|
||||
args.constraints.replace(",", ", "),
|
||||
)
|
||||
constraints_filter = args.constraints.split(",")
|
||||
config_data["constraints"] = {
|
||||
k: v
|
||||
for k, v in config_data["constraints"].items()
|
||||
if k in constraints_filter
|
||||
}
|
||||
config_data["constraints"] = {k: v for k, v in config_data["constraints"].items() if k in constraints_filter}
|
||||
|
||||
# Sanitize website url
|
||||
if config_data["website_url"] is not None:
|
||||
if config_data["website_url"][-1] != '/':
|
||||
config_data["website_url"] += '/'
|
||||
if config_data["website_url"][-1] != "/":
|
||||
config_data["website_url"] += "/"
|
||||
|
||||
config_validation = validate_config(config_data, check_with_data)
|
||||
if config_validation is True:
|
||||
|
@ -16,7 +16,7 @@ BACKENDS_BY_PRECEDENCE = [
|
||||
"pap",
|
||||
"leboncoin",
|
||||
"explorimmo",
|
||||
"logicimmo"
|
||||
"logicimmo",
|
||||
]
|
||||
|
||||
|
||||
|
@ -24,11 +24,13 @@ except ImportError:
|
||||
try:
|
||||
from functools32 import lru_cache
|
||||
except ImportError:
|
||||
|
||||
def lru_cache(maxsize=None): # pylint: disable=unused-argument
|
||||
"""
|
||||
Identity implementation of ``lru_cache`` for fallback.
|
||||
"""
|
||||
return lambda func: func
|
||||
|
||||
LOGGER.warning(
|
||||
"`functools.lru_cache` is not available on your system. Consider "
|
||||
"installing `functools32` Python module if using Python2 for "
|
||||
@ -48,10 +50,7 @@ def preprocess_data(config, force=False):
|
||||
# Check if a build is required
|
||||
get_session = database.init_db(config["database"], config["search_index"])
|
||||
with get_session() as session:
|
||||
is_built = (
|
||||
session.query(PublicTransport).count() > 0 and
|
||||
session.query(PostalCode).count() > 0
|
||||
)
|
||||
is_built = session.query(PublicTransport).count() > 0 and session.query(PostalCode).count() > 0
|
||||
if is_built and not force:
|
||||
# No need to rebuild the database, skip
|
||||
return False
|
||||
@ -64,9 +63,7 @@ def preprocess_data(config, force=False):
|
||||
for preprocess in data_files.PREPROCESSING_FUNCTIONS:
|
||||
data_objects = preprocess()
|
||||
if not data_objects:
|
||||
raise flatisfy.exceptions.DataBuildError(
|
||||
"Error with %s." % preprocess.__name__
|
||||
)
|
||||
raise flatisfy.exceptions.DataBuildError("Error with %s." % preprocess.__name__)
|
||||
with get_session() as session:
|
||||
session.add_all(data_objects)
|
||||
LOGGER.info("Done building data!")
|
||||
@ -96,10 +93,7 @@ def load_data(model, constraint, config):
|
||||
# Load data for each area
|
||||
areas = list(set(areas))
|
||||
for area in areas:
|
||||
results.extend(
|
||||
session.query(model)
|
||||
.filter(model.area == area).all()
|
||||
)
|
||||
results.extend(session.query(model).filter(model.area == area).all())
|
||||
# Expunge loaded data from the session to be able to use them
|
||||
# afterwards
|
||||
session.expunge_all()
|
||||
|
@ -24,8 +24,8 @@ MODULE_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
titlecase.set_small_word_list(
|
||||
# Add French small words
|
||||
r"l|d|un|une|et|à|a|sur|ou|le|la|de|lès|les|" +
|
||||
titlecase.SMALL
|
||||
r"l|d|un|une|et|à|a|sur|ou|le|la|de|lès|les|"
|
||||
+ titlecase.SMALL
|
||||
)
|
||||
|
||||
TRANSPORT_DATA_FILES = {
|
||||
@ -33,7 +33,7 @@ TRANSPORT_DATA_FILES = {
|
||||
"FR-NW": "stops_fr-nw.txt",
|
||||
"FR-NE": "stops_fr-ne.txt",
|
||||
"FR-SW": "stops_fr-sw.txt",
|
||||
"FR-SE": "stops_fr-se.txt"
|
||||
"FR-SE": "stops_fr-se.txt",
|
||||
}
|
||||
|
||||
|
||||
@ -51,8 +51,20 @@ def french_postal_codes_to_quarter(postal_code):
|
||||
# French departements
|
||||
# Taken from Wikipedia data.
|
||||
department_to_subdivision = {
|
||||
"FR-ARA": ["01", "03", "07", "15", "26", "38", "42", "43", "63", "69",
|
||||
"73", "74"],
|
||||
"FR-ARA": [
|
||||
"01",
|
||||
"03",
|
||||
"07",
|
||||
"15",
|
||||
"26",
|
||||
"38",
|
||||
"42",
|
||||
"43",
|
||||
"63",
|
||||
"69",
|
||||
"73",
|
||||
"74",
|
||||
],
|
||||
"FR-BFC": ["21", "25", "39", "58", "70", "71", "89", "90"],
|
||||
"FR-BRE": ["22", "29", "35", "44", "56"],
|
||||
"FR-CVL": ["18", "28", "36", "37", "41", "45"],
|
||||
@ -61,36 +73,53 @@ def french_postal_codes_to_quarter(postal_code):
|
||||
"FR-HDF": ["02", "59", "60", "62", "80"],
|
||||
"FR-IDF": ["75", "77", "78", "91", "92", "93", "94", "95"],
|
||||
"FR-NOR": ["14", "27", "50", "61", "76"],
|
||||
"FR-NAQ": ["16", "17", "19", "23", "24", "33", "40", "47", "64", "79",
|
||||
"86", "87"],
|
||||
"FR-OCC": ["09", "11", "12", "30", "31", "32", "34", "46", "48", "65",
|
||||
"66", "81", "82"],
|
||||
"FR-NAQ": [
|
||||
"16",
|
||||
"17",
|
||||
"19",
|
||||
"23",
|
||||
"24",
|
||||
"33",
|
||||
"40",
|
||||
"47",
|
||||
"64",
|
||||
"79",
|
||||
"86",
|
||||
"87",
|
||||
],
|
||||
"FR-OCC": [
|
||||
"09",
|
||||
"11",
|
||||
"12",
|
||||
"30",
|
||||
"31",
|
||||
"32",
|
||||
"34",
|
||||
"46",
|
||||
"48",
|
||||
"65",
|
||||
"66",
|
||||
"81",
|
||||
"82",
|
||||
],
|
||||
"FR-PDL": ["44", "49", "53", "72", "85"],
|
||||
"FR-PAC": ["04", "05", "06", "13", "83", "84"]
|
||||
"FR-PAC": ["04", "05", "06", "13", "83", "84"],
|
||||
}
|
||||
subdivision_to_quarters = {
|
||||
'FR-IDF': ['FR-IDF'],
|
||||
'FR-NW': ['FR-BRE', 'FR-CVL', 'FR-NOR', 'FR-PDL'],
|
||||
'FR-NE': ['FR-BFC', 'FR-GES', 'FR-HDF'],
|
||||
'FR-SE': ['FR-ARA', 'FR-COR', 'FR-PAC', 'FR-OCC'],
|
||||
'FR-SW': ['FR-NAQ']
|
||||
"FR-IDF": ["FR-IDF"],
|
||||
"FR-NW": ["FR-BRE", "FR-CVL", "FR-NOR", "FR-PDL"],
|
||||
"FR-NE": ["FR-BFC", "FR-GES", "FR-HDF"],
|
||||
"FR-SE": ["FR-ARA", "FR-COR", "FR-PAC", "FR-OCC"],
|
||||
"FR-SW": ["FR-NAQ"],
|
||||
}
|
||||
|
||||
subdivision = next(
|
||||
(
|
||||
i
|
||||
for i, departments in department_to_subdivision.items()
|
||||
if departement in departments
|
||||
),
|
||||
None
|
||||
(i for i, departments in department_to_subdivision.items() if departement in departments),
|
||||
None,
|
||||
)
|
||||
return next(
|
||||
(
|
||||
i
|
||||
for i, subdivisions in subdivision_to_quarters.items()
|
||||
if subdivision in subdivisions
|
||||
),
|
||||
None
|
||||
(i for i, subdivisions in subdivision_to_quarters.items() if subdivision in subdivisions),
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
@ -106,9 +135,7 @@ def _preprocess_laposte():
|
||||
raw_laposte_data = []
|
||||
# Load opendata file
|
||||
try:
|
||||
with io.open(
|
||||
os.path.join(MODULE_DIR, data_file), "r", encoding='utf-8'
|
||||
) as fh:
|
||||
with io.open(os.path.join(MODULE_DIR, data_file), "r", encoding="utf-8") as fh:
|
||||
raw_laposte_data = json.load(fh)
|
||||
except (IOError, ValueError):
|
||||
LOGGER.error("Invalid raw LaPoste opendata file.")
|
||||
@ -124,31 +151,30 @@ def _preprocess_laposte():
|
||||
try:
|
||||
area = french_postal_codes_to_quarter(fields["code_postal"])
|
||||
if area is None:
|
||||
LOGGER.info(
|
||||
LOGGER.debug(
|
||||
"No matching area found for postal code %s, skipping it.",
|
||||
fields["code_postal"]
|
||||
fields["code_postal"],
|
||||
)
|
||||
continue
|
||||
|
||||
name = normalize_string(
|
||||
titlecase.titlecase(fields["nom_de_la_commune"]),
|
||||
lowercase=False
|
||||
)
|
||||
name = normalize_string(titlecase.titlecase(fields["nom_de_la_commune"]), lowercase=False)
|
||||
|
||||
if (fields["code_postal"], name) in seen_postal_codes:
|
||||
continue
|
||||
|
||||
seen_postal_codes.append((fields["code_postal"], name))
|
||||
postal_codes_data.append(PostalCode(
|
||||
area=area,
|
||||
postal_code=fields["code_postal"],
|
||||
name=name,
|
||||
lat=fields["coordonnees_gps"][0],
|
||||
lng=fields["coordonnees_gps"][1]
|
||||
))
|
||||
postal_codes_data.append(
|
||||
PostalCode(
|
||||
area=area,
|
||||
postal_code=fields["code_postal"],
|
||||
insee_code=fields["code_commune_insee"],
|
||||
name=name,
|
||||
lat=fields["coordonnees_gps"][0],
|
||||
lng=fields["coordonnees_gps"][1],
|
||||
)
|
||||
)
|
||||
except KeyError:
|
||||
LOGGER.info("Missing data for postal code %s, skipping it.",
|
||||
fields["code_postal"])
|
||||
LOGGER.debug("Missing data for postal code %s, skipping it.", fields["code_postal"])
|
||||
|
||||
return postal_codes_data
|
||||
|
||||
@ -164,17 +190,11 @@ def _preprocess_public_transport():
|
||||
for area, data_file in TRANSPORT_DATA_FILES.items():
|
||||
LOGGER.info("Building from public transport data %s.", data_file)
|
||||
try:
|
||||
with io.open(os.path.join(MODULE_DIR, data_file), "r",
|
||||
encoding='utf-8') as fh:
|
||||
with io.open(os.path.join(MODULE_DIR, data_file), "r", encoding="utf-8") as fh:
|
||||
filereader = csv.reader(fh)
|
||||
next(filereader, None) # Skip first row (headers)
|
||||
for row in filereader:
|
||||
public_transport_data.append(PublicTransport(
|
||||
name=row[2],
|
||||
area=area,
|
||||
lat=row[3],
|
||||
lng=row[4]
|
||||
))
|
||||
public_transport_data.append(PublicTransport(name=row[2], area=area, lat=row[3], lng=row[4]))
|
||||
except (IOError, IndexError):
|
||||
LOGGER.error("Invalid raw opendata file: %s.", data_file)
|
||||
return []
|
||||
@ -183,7 +203,4 @@ def _preprocess_public_transport():
|
||||
|
||||
|
||||
# List of all the available preprocessing functions. Order can be important.
|
||||
PREPROCESSING_FUNCTIONS = [
|
||||
_preprocess_laposte,
|
||||
_preprocess_public_transport
|
||||
]
|
||||
PREPROCESSING_FUNCTIONS = [_preprocess_laposte, _preprocess_public_transport]
|
||||
|
File diff suppressed because one or more lines are too long
@ -47,9 +47,7 @@ def init_db(database_uri=None, search_db_uri=None):
|
||||
Session = sessionmaker(bind=engine) # pylint: disable=locally-disabled,invalid-name
|
||||
|
||||
if search_db_uri:
|
||||
index_service = IndexService(
|
||||
whoosh_base=search_db_uri
|
||||
)
|
||||
index_service = IndexService(whoosh_base=search_db_uri)
|
||||
index_service.register_class(flatisfy.models.flat.Flat)
|
||||
|
||||
@contextmanager
|
||||
|
@ -50,4 +50,4 @@ class StringyJSON(types.TypeDecorator):
|
||||
# TypeEngine.with_variant says "use StringyJSON instead when
|
||||
# connecting to 'sqlite'"
|
||||
# pylint: disable=locally-disabled,invalid-name
|
||||
MagicJSON = types.JSON().with_variant(StringyJSON, 'sqlite')
|
||||
MagicJSON = types.JSON().with_variant(StringyJSON, "sqlite")
|
||||
|
@ -30,7 +30,6 @@ from whoosh.qparser import MultifieldParser
|
||||
|
||||
|
||||
class IndexService(object):
|
||||
|
||||
def __init__(self, config=None, whoosh_base=None):
|
||||
if not whoosh_base and config:
|
||||
whoosh_base = config.get("WHOOSH_BASE")
|
||||
@ -84,8 +83,7 @@ class IndexService(object):
|
||||
primary = field.name
|
||||
continue
|
||||
if field.name in model_class.__searchable__:
|
||||
schema[field.name] = whoosh.fields.TEXT(
|
||||
analyzer=StemmingAnalyzer())
|
||||
schema[field.name] = whoosh.fields.TEXT(analyzer=StemmingAnalyzer())
|
||||
return Schema(**schema), primary
|
||||
|
||||
def before_commit(self, session):
|
||||
@ -93,21 +91,18 @@ class IndexService(object):
|
||||
|
||||
for model in session.new:
|
||||
model_class = model.__class__
|
||||
if hasattr(model_class, '__searchable__'):
|
||||
self.to_update.setdefault(model_class.__name__, []).append(
|
||||
("new", model))
|
||||
if hasattr(model_class, "__searchable__"):
|
||||
self.to_update.setdefault(model_class.__name__, []).append(("new", model))
|
||||
|
||||
for model in session.deleted:
|
||||
model_class = model.__class__
|
||||
if hasattr(model_class, '__searchable__'):
|
||||
self.to_update.setdefault(model_class.__name__, []).append(
|
||||
("deleted", model))
|
||||
if hasattr(model_class, "__searchable__"):
|
||||
self.to_update.setdefault(model_class.__name__, []).append(("deleted", model))
|
||||
|
||||
for model in session.dirty:
|
||||
model_class = model.__class__
|
||||
if hasattr(model_class, '__searchable__'):
|
||||
self.to_update.setdefault(model_class.__name__, []).append(
|
||||
("changed", model))
|
||||
if hasattr(model_class, "__searchable__"):
|
||||
self.to_update.setdefault(model_class.__name__, []).append(("changed", model))
|
||||
|
||||
def after_commit(self, session):
|
||||
"""
|
||||
@ -128,16 +123,11 @@ class IndexService(object):
|
||||
# added as a new doc. Could probably replace this with a whoosh
|
||||
# update.
|
||||
|
||||
writer.delete_by_term(
|
||||
primary_field, text_type(getattr(model, primary_field)))
|
||||
writer.delete_by_term(primary_field, text_type(getattr(model, primary_field)))
|
||||
|
||||
if change_type in ("new", "changed"):
|
||||
attrs = dict((key, getattr(model, key))
|
||||
for key in searchable)
|
||||
attrs = {
|
||||
attr: text_type(getattr(model, attr))
|
||||
for attr in attrs.keys()
|
||||
}
|
||||
attrs = dict((key, getattr(model, key)) for key in searchable)
|
||||
attrs = {attr: text_type(getattr(model, attr)) for attr in attrs.keys()}
|
||||
attrs[primary_field] = text_type(getattr(model, primary_field))
|
||||
writer.add_document(**attrs)
|
||||
|
||||
@ -158,8 +148,7 @@ class Searcher(object):
|
||||
self.parser = MultifieldParser(list(fields), index.schema)
|
||||
|
||||
def __call__(self, session, query, limit=None):
|
||||
results = self.index.searcher().search(
|
||||
self.parser.parse(query), limit=limit)
|
||||
results = self.index.searcher().search(self.parser.parse(query), limit=limit)
|
||||
|
||||
keys = [x[self.primary] for x in results]
|
||||
primary_column = getattr(self.model_class, self.primary)
|
||||
|
@ -8,7 +8,7 @@ from builtins import str
|
||||
|
||||
import logging
|
||||
import smtplib
|
||||
|
||||
from money import Money
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from email.utils import formatdate, make_msgid
|
||||
@ -36,15 +36,15 @@ def send_email(server, port, subject, _from, _to, txt, html, username=None, pass
|
||||
if username or password:
|
||||
server.login(username or "", password or "")
|
||||
|
||||
msg = MIMEMultipart('alternative')
|
||||
msg['Subject'] = subject
|
||||
msg['From'] = _from
|
||||
msg['To'] = ', '.join(_to)
|
||||
msg['Date'] = formatdate()
|
||||
msg['Message-ID'] = make_msgid()
|
||||
msg = MIMEMultipart("alternative")
|
||||
msg["Subject"] = subject
|
||||
msg["From"] = _from
|
||||
msg["To"] = ", ".join(_to)
|
||||
msg["Date"] = formatdate()
|
||||
msg["Message-ID"] = make_msgid()
|
||||
|
||||
msg.attach(MIMEText(txt, 'plain', 'utf-8'))
|
||||
msg.attach(MIMEText(html, 'html', 'utf-8'))
|
||||
msg.attach(MIMEText(txt, "plain", "utf-8"))
|
||||
msg.attach(MIMEText(html, "html", "utf-8"))
|
||||
|
||||
server.sendmail(_from, _to, msg.as_string())
|
||||
server.quit()
|
||||
@ -61,13 +61,33 @@ def send_notification(config, flats):
|
||||
if not flats:
|
||||
return
|
||||
|
||||
txt = u'Hello dear user,\n\nThe following new flats have been found:\n\n'
|
||||
html = """
|
||||
i18n = {
|
||||
"en": {
|
||||
"subject": f"{len(flats)} new flats found!",
|
||||
"hello": "Hello dear user",
|
||||
"following_new_flats": "The following new flats have been found:",
|
||||
"area": "area",
|
||||
"cost": "cost",
|
||||
"signature": "Hope you'll find what you were looking for.",
|
||||
},
|
||||
"fr": {
|
||||
"subject": f"{len(flats)} nouvelles annonces disponibles !",
|
||||
"hello": "Bonjour cher utilisateur",
|
||||
"following_new_flats": "Voici les nouvelles annonces :",
|
||||
"area": "surface",
|
||||
"cost": "coût",
|
||||
"signature": "Bonne recherche",
|
||||
},
|
||||
}
|
||||
trs = i18n.get(config["notification_lang"], "en")
|
||||
|
||||
txt = trs["hello"] + ",\n\n\n\n"
|
||||
html = f"""
|
||||
<html>
|
||||
<head></head>
|
||||
<body>
|
||||
<p>Hello dear user!</p>
|
||||
<p>The following new flats have been found:
|
||||
<p>{trs["hello"]}!</p>
|
||||
<p>{trs["following_new_flats"]}
|
||||
|
||||
<ul>
|
||||
"""
|
||||
@ -77,41 +97,47 @@ def send_notification(config, flats):
|
||||
for flat in flats:
|
||||
title = str(flat.title)
|
||||
flat_id = str(flat.id)
|
||||
area = str(flat.area)
|
||||
cost = str(flat.cost)
|
||||
area = str(int(flat.area))
|
||||
cost = int(flat.cost)
|
||||
currency = str(flat.currency)
|
||||
|
||||
txt += (
|
||||
'- {}: {}#/flat/{} (area: {}, cost: {} {})\n'.format(
|
||||
title, website_url, flat_id, area, cost, currency
|
||||
)
|
||||
)
|
||||
|
||||
html += """
|
||||
txt += f"- {title}: {website_url}#/flat/{flat_id}"
|
||||
html += f"""
|
||||
<li>
|
||||
<a href="{}#/flat/{}">{}</a>
|
||||
(area: {}, cost: {} {})
|
||||
</li>
|
||||
""".format(website_url, flat_id, title, area, cost, currency)
|
||||
<a href="{website_url}#/flat/{flat_id}">{title}</a>
|
||||
"""
|
||||
|
||||
fields = []
|
||||
if area:
|
||||
fields.append(f"{trs['area']}: {area}m²")
|
||||
if cost:
|
||||
money = Money(cost, currency).format(config["notification_lang"])
|
||||
fields.append(f"{trs['cost']}: {money}")
|
||||
|
||||
if len(fields):
|
||||
txt += f'({", ".join(fields)})'
|
||||
html += f'({", ".join(fields)})'
|
||||
|
||||
html += "</li>"
|
||||
|
||||
html += "</ul>"
|
||||
|
||||
signature = (
|
||||
u"\nHope you'll find what you were looking for.\n\nBye!\nFlatisfy"
|
||||
)
|
||||
signature = f"\n{trs['signature']}\n\nBye!\nFlatisfy"
|
||||
txt += signature
|
||||
html += signature.replace('\n', '<br>')
|
||||
html += signature.replace("\n", "<br>")
|
||||
|
||||
html += """</p>
|
||||
</body>
|
||||
</html>"""
|
||||
|
||||
send_email(config["smtp_server"],
|
||||
config["smtp_port"],
|
||||
"New flats found!",
|
||||
config["smtp_from"],
|
||||
config["smtp_to"],
|
||||
txt,
|
||||
html,
|
||||
config.get("smtp_username"),
|
||||
config.get("smtp_password"))
|
||||
send_email(
|
||||
config["smtp_server"],
|
||||
config["smtp_port"],
|
||||
trs["subject"],
|
||||
config["smtp_from"],
|
||||
config["smtp_to"],
|
||||
txt,
|
||||
html,
|
||||
config.get("smtp_username"),
|
||||
config.get("smtp_password"),
|
||||
)
|
||||
|
@ -10,4 +10,5 @@ class DataBuildError(Exception):
|
||||
"""
|
||||
Error occurring on building a data file.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
@ -9,6 +9,7 @@ import collections
|
||||
import itertools
|
||||
import json
|
||||
import logging
|
||||
from ratelimit import limits
|
||||
|
||||
from flatisfy import database
|
||||
from flatisfy import tools
|
||||
@ -24,8 +25,7 @@ try:
|
||||
from weboob.core.ouiboube import WebNip
|
||||
from weboob.tools.json import WeboobEncoder
|
||||
except ImportError:
|
||||
LOGGER.error("Weboob is not available on your system. Make sure you "
|
||||
"installed it.")
|
||||
LOGGER.error("Weboob is not available on your system. Make sure you installed it.")
|
||||
raise
|
||||
|
||||
|
||||
@ -34,6 +34,7 @@ class WebOOBProxy(object):
|
||||
Wrapper around WebOOB ``WebNip`` class, to fetch housing posts without
|
||||
having to spawn a subprocess.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def version():
|
||||
"""
|
||||
@ -77,14 +78,14 @@ class WebOOBProxy(object):
|
||||
self.webnip = WebNip(modules_path=config["modules_path"])
|
||||
|
||||
# Create backends
|
||||
self.backends = [
|
||||
self.webnip.load_backend(
|
||||
module,
|
||||
module,
|
||||
params={}
|
||||
)
|
||||
for module in backends
|
||||
]
|
||||
self.backends = []
|
||||
for module in backends:
|
||||
try:
|
||||
self.backends.append(
|
||||
self.webnip.load_backend(module, module, params={})
|
||||
)
|
||||
except Exception as exc:
|
||||
raise Exception('Unable to load module ' + module) from exc
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
@ -114,28 +115,21 @@ class WebOOBProxy(object):
|
||||
except CallErrors as exc:
|
||||
# If an error occured, just log it
|
||||
LOGGER.error(
|
||||
(
|
||||
"An error occured while building query for "
|
||||
"postal code %s: %s"
|
||||
),
|
||||
("An error occured while building query for postal code %s: %s"),
|
||||
postal_code,
|
||||
str(exc)
|
||||
str(exc),
|
||||
)
|
||||
|
||||
if not matching_cities:
|
||||
# If postal code gave no match, warn the user
|
||||
LOGGER.warn(
|
||||
"Postal code %s could not be matched with a city.",
|
||||
postal_code
|
||||
)
|
||||
LOGGER.warn("Postal code %s could not be matched with a city.", postal_code)
|
||||
|
||||
# Remove "TOUTES COMMUNES" entry which are duplicates of the individual
|
||||
# cities entries in Logicimmo module.
|
||||
matching_cities = [
|
||||
city
|
||||
for city in matching_cities
|
||||
if not (city.backend == 'logicimmo' and
|
||||
city.name.startswith('TOUTES COMMUNES'))
|
||||
if not (city.backend == "logicimmo" and city.name.startswith("TOUTES COMMUNES"))
|
||||
]
|
||||
|
||||
# Then, build queries by grouping cities by at most 3
|
||||
@ -145,21 +139,14 @@ class WebOOBProxy(object):
|
||||
|
||||
try:
|
||||
query.house_types = [
|
||||
getattr(
|
||||
HOUSE_TYPES,
|
||||
house_type.upper()
|
||||
)
|
||||
for house_type in constraints_dict["house_types"]
|
||||
getattr(HOUSE_TYPES, house_type.upper()) for house_type in constraints_dict["house_types"]
|
||||
]
|
||||
except AttributeError:
|
||||
LOGGER.error("Invalid house types constraint.")
|
||||
return None
|
||||
|
||||
try:
|
||||
query.type = getattr(
|
||||
POSTS_TYPES,
|
||||
constraints_dict["type"].upper()
|
||||
)
|
||||
query.type = getattr(POSTS_TYPES, constraints_dict["type"].upper())
|
||||
except AttributeError:
|
||||
LOGGER.error("Invalid post type constraint.")
|
||||
return None
|
||||
@ -190,26 +177,22 @@ class WebOOBProxy(object):
|
||||
# TODO: Handle max_entries better
|
||||
try:
|
||||
for housing in itertools.islice(
|
||||
self.webnip.do(
|
||||
'search_housings',
|
||||
query,
|
||||
# Only run the call on the required backends.
|
||||
# Otherwise, WebOOB is doing weird stuff and returning
|
||||
# nonsense.
|
||||
backends=[x for x in self.backends
|
||||
if x.name in useful_backends]
|
||||
),
|
||||
max_entries
|
||||
self.webnip.do(
|
||||
"search_housings",
|
||||
query,
|
||||
# Only run the call on the required backends.
|
||||
# Otherwise, WebOOB is doing weird stuff and returning
|
||||
# nonsense.
|
||||
backends=[x for x in self.backends if x.name in useful_backends],
|
||||
),
|
||||
max_entries,
|
||||
):
|
||||
if not store_personal_data:
|
||||
housing.phone = None
|
||||
housings.append(json.dumps(housing, cls=WeboobEncoder))
|
||||
except CallErrors as exc:
|
||||
# If an error occured, just log it
|
||||
LOGGER.error(
|
||||
"An error occured while fetching the housing posts: %s",
|
||||
str(exc)
|
||||
)
|
||||
LOGGER.error("An error occured while fetching the housing posts: %s", str(exc))
|
||||
return housings
|
||||
|
||||
def info(self, full_flat_id, store_personal_data=False):
|
||||
@ -224,34 +207,26 @@ class WebOOBProxy(object):
|
||||
"""
|
||||
flat_id, backend_name = full_flat_id.rsplit("@", 1)
|
||||
try:
|
||||
backend = next(
|
||||
backend
|
||||
for backend in self.backends
|
||||
if backend.name == backend_name
|
||||
)
|
||||
backend = next(backend for backend in self.backends if backend.name == backend_name)
|
||||
except StopIteration:
|
||||
LOGGER.error("Backend %s is not available.", backend_name)
|
||||
return "{}"
|
||||
|
||||
try:
|
||||
housing = backend.get_housing(flat_id)
|
||||
# Otherwise, we miss the @backend afterwards
|
||||
housing.id = full_flat_id
|
||||
if not store_personal_data:
|
||||
# Ensure phone is cleared
|
||||
housing.phone = None
|
||||
else:
|
||||
# Ensure phone is fetched
|
||||
backend.fillobj(housing, 'phone')
|
||||
backend.fillobj(housing, "phone")
|
||||
# Otherwise, we miss the @backend afterwards
|
||||
housing.id = full_flat_id
|
||||
|
||||
return json.dumps(housing, cls=WeboobEncoder)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
# If an error occured, just log it
|
||||
LOGGER.error(
|
||||
"An error occured while fetching housing %s: %s",
|
||||
full_flat_id,
|
||||
str(exc)
|
||||
)
|
||||
LOGGER.error("An error occured while fetching housing %s: %s", full_flat_id, str(exc))
|
||||
return "{}"
|
||||
|
||||
|
||||
@ -271,19 +246,24 @@ def fetch_flats(config):
|
||||
queries = webOOB_proxy.build_queries(constraint)
|
||||
housing_posts = []
|
||||
for query in queries:
|
||||
housing_posts.extend(
|
||||
webOOB_proxy.query(query, config["max_entries"],
|
||||
config["store_personal_data"])
|
||||
)
|
||||
housing_posts.extend(webOOB_proxy.query(query, config["max_entries"], config["store_personal_data"]))
|
||||
housing_posts = housing_posts[: config["max_entries"]]
|
||||
LOGGER.info("Fetched %d flats.", len(housing_posts))
|
||||
|
||||
constraint_flats_list = [json.loads(flat) for flat in housing_posts]
|
||||
constraint_flats_list = [WebOOBProxy.restore_decimal_fields(flat)
|
||||
for flat in constraint_flats_list]
|
||||
constraint_flats_list = [WebOOBProxy.restore_decimal_fields(flat) for flat in constraint_flats_list]
|
||||
fetched_flats[constraint_name] = constraint_flats_list
|
||||
return fetched_flats
|
||||
|
||||
|
||||
@limits(calls=10, period=60)
|
||||
def fetch_details_rate_limited(config, flat_id):
|
||||
"""
|
||||
Limit flats fetching to at most 10 calls per minute to avoid rate banning
|
||||
"""
|
||||
return fetch_details(config, flat_id)
|
||||
|
||||
|
||||
def fetch_details(config, flat_id):
|
||||
"""
|
||||
Fetch the additional details for a flat using Flatboob / WebOOB.
|
||||
@ -294,8 +274,7 @@ def fetch_details(config, flat_id):
|
||||
"""
|
||||
with WebOOBProxy(config) as webOOB_proxy:
|
||||
LOGGER.info("Loading additional details for flat %s.", flat_id)
|
||||
webOOB_output = webOOB_proxy.info(flat_id,
|
||||
config["store_personal_data"])
|
||||
webOOB_output = webOOB_proxy.info(flat_id, config["store_personal_data"])
|
||||
|
||||
flat_details = json.loads(webOOB_output)
|
||||
flat_details = WebOOBProxy.restore_decimal_fields(flat_details)
|
||||
@ -326,10 +305,7 @@ def load_flats_from_file(json_file, config):
|
||||
LOGGER.info("Found %d flats.", len(flats_list))
|
||||
except (IOError, ValueError):
|
||||
LOGGER.error("File %s is not a valid dump file.", json_file)
|
||||
return {
|
||||
constraint_name: flats_list
|
||||
for constraint_name in config["constraints"]
|
||||
}
|
||||
return {constraint_name: flats_list for constraint_name in config["constraints"]}
|
||||
|
||||
|
||||
def load_flats_from_db(config):
|
||||
|
@ -36,48 +36,51 @@ def refine_with_housing_criteria(flats_list, constraint):
|
||||
for i, flat in enumerate(flats_list):
|
||||
# Check postal code
|
||||
postal_code = flat["flatisfy"].get("postal_code", None)
|
||||
if (
|
||||
postal_code and
|
||||
postal_code not in constraint["postal_codes"]
|
||||
):
|
||||
LOGGER.info("Postal code for flat %s is out of range.", flat["id"])
|
||||
is_ok[i] = is_ok[i] and False
|
||||
if postal_code and postal_code not in constraint["postal_codes"]:
|
||||
LOGGER.info(
|
||||
"Postal code %s for flat %s is out of range (%s).",
|
||||
postal_code,
|
||||
flat["id"],
|
||||
", ".join(constraint["postal_codes"]),
|
||||
)
|
||||
is_ok[i] = False
|
||||
# Check insee code
|
||||
insee_code = flat["flatisfy"].get("insee_code", None)
|
||||
if insee_code and "insee_codes" in constraint and insee_code not in constraint["insee_codes"]:
|
||||
LOGGER.info(
|
||||
"insee code %s for flat %s is out of range (%s).",
|
||||
insee_code,
|
||||
flat["id"],
|
||||
", ".join(constraint["insee_codes"]),
|
||||
)
|
||||
is_ok[i] = False
|
||||
|
||||
# Check time_to
|
||||
for place_name, time in flat["flatisfy"].get("time_to", {}).items():
|
||||
time = time["time"]
|
||||
is_within_interval = tools.is_within_interval(
|
||||
time,
|
||||
*(constraint["time_to"][place_name]["time"])
|
||||
)
|
||||
is_within_interval = tools.is_within_interval(time, *(constraint["time_to"][place_name]["time"]))
|
||||
if not is_within_interval:
|
||||
LOGGER.info("Flat %s is too far from place %s: %ds.",
|
||||
flat["id"], place_name, time)
|
||||
LOGGER.info(
|
||||
"Flat %s is too far from place %s: %ds.",
|
||||
flat["id"],
|
||||
place_name,
|
||||
time,
|
||||
)
|
||||
is_ok[i] = is_ok[i] and is_within_interval
|
||||
|
||||
# Check other fields
|
||||
for field in ["area", "cost", "rooms", "bedrooms"]:
|
||||
interval = constraint[field]
|
||||
is_within_interval = tools.is_within_interval(
|
||||
flat.get(field, None),
|
||||
*interval
|
||||
)
|
||||
is_within_interval = tools.is_within_interval(flat.get(field, None), *interval)
|
||||
if not is_within_interval:
|
||||
LOGGER.info("%s for flat %s is out of range.",
|
||||
field.capitalize(), flat["id"])
|
||||
LOGGER.info(
|
||||
"%s %s for flat %s is out of range.", field.capitalize(), str(flat.get(field, None)), flat["id"]
|
||||
)
|
||||
is_ok[i] = is_ok[i] and is_within_interval
|
||||
|
||||
return (
|
||||
[
|
||||
flat
|
||||
for i, flat in enumerate(flats_list)
|
||||
if is_ok[i]
|
||||
],
|
||||
[
|
||||
flat
|
||||
for i, flat in enumerate(flats_list)
|
||||
if not is_ok[i]
|
||||
]
|
||||
[flat for i, flat in enumerate(flats_list) if is_ok[i]],
|
||||
[flat for i, flat in enumerate(flats_list) if not is_ok[i]],
|
||||
)
|
||||
|
||||
|
||||
@ -103,54 +106,36 @@ def refine_with_details_criteria(flats_list, constraint):
|
||||
|
||||
for i, flat in enumerate(flats_list):
|
||||
# Check number of pictures
|
||||
has_enough_photos = tools.is_within_interval(
|
||||
len(flat.get('photos', [])),
|
||||
constraint['minimum_nb_photos'],
|
||||
None
|
||||
)
|
||||
has_enough_photos = tools.is_within_interval(len(flat.get("photos", [])), constraint["minimum_nb_photos"], None)
|
||||
if not has_enough_photos:
|
||||
LOGGER.info(
|
||||
"Flat %s only has %d photos, it should have at least %d.",
|
||||
flat["id"],
|
||||
len(flat['photos']),
|
||||
constraint['minimum_nb_photos']
|
||||
len(flat["photos"]),
|
||||
constraint["minimum_nb_photos"],
|
||||
)
|
||||
is_ok[i] = False
|
||||
|
||||
has_all_good_terms_in_description = True
|
||||
if constraint["description_should_contain"]:
|
||||
has_all_good_terms_in_description = all(
|
||||
term in flat['text']
|
||||
for term in constraint["description_should_contain"]
|
||||
)
|
||||
|
||||
has_a_bad_term_in_description = False
|
||||
if constraint["description_should_not_contain"]:
|
||||
has_a_bad_term_in_description = any(
|
||||
term in flat['text']
|
||||
for term in constraint["description_should_not_contain"]
|
||||
)
|
||||
|
||||
if (not has_all_good_terms_in_description
|
||||
or has_a_bad_term_in_description):
|
||||
LOGGER.info(
|
||||
("Description for flat %s does not contain all the required "
|
||||
"terms, or contains a blacklisted term."),
|
||||
flat["id"]
|
||||
)
|
||||
is_ok[i] = False
|
||||
for term in constraint["description_should_contain"]:
|
||||
if term.lower() not in flat["text"].lower():
|
||||
LOGGER.info(
|
||||
("Description for flat %s does not contain required term '%s'."),
|
||||
flat["id"],
|
||||
term,
|
||||
)
|
||||
is_ok[i] = False
|
||||
for term in constraint["description_should_not_contain"]:
|
||||
if term.lower() in flat["text"].lower():
|
||||
LOGGER.info(
|
||||
("Description for flat %s contains blacklisted term '%s'."),
|
||||
flat["id"],
|
||||
term,
|
||||
)
|
||||
is_ok[i] = False
|
||||
|
||||
return (
|
||||
[
|
||||
flat
|
||||
for i, flat in enumerate(flats_list)
|
||||
if is_ok[i]
|
||||
],
|
||||
[
|
||||
flat
|
||||
for i, flat in enumerate(flats_list)
|
||||
if not is_ok[i]
|
||||
]
|
||||
[flat for i, flat in enumerate(flats_list) if is_ok[i]],
|
||||
[flat for i, flat in enumerate(flats_list) if not is_ok[i]],
|
||||
)
|
||||
|
||||
|
||||
@ -172,30 +157,25 @@ def first_pass(flats_list, constraint, config):
|
||||
|
||||
# Handle duplicates based on ids
|
||||
# Just remove them (no merge) as they should be the exact same object.
|
||||
flats_list, _ = duplicates.detect(
|
||||
flats_list, key="id", merge=False, should_intersect=False
|
||||
)
|
||||
flats_list, _ = duplicates.detect(flats_list, key="id", merge=False, should_intersect=False)
|
||||
# Also merge duplicates based on urls (these may come from different
|
||||
# flatboob backends)
|
||||
# This is especially useful as some websites such as entreparticuliers
|
||||
# contains a lot of leboncoin housings posts.
|
||||
flats_list, duplicates_by_urls = duplicates.detect(
|
||||
flats_list, key="urls", merge=True, should_intersect=True
|
||||
)
|
||||
flats_list, duplicates_by_urls = duplicates.detect(flats_list, key="urls", merge=True, should_intersect=True)
|
||||
|
||||
# Guess the postal codes
|
||||
flats_list = metadata.guess_postal_code(flats_list, constraint, config)
|
||||
# Try to match with stations
|
||||
flats_list = metadata.guess_stations(flats_list, constraint, config)
|
||||
# Remove returned housing posts that do not match criteria
|
||||
flats_list, ignored_list = refine_with_housing_criteria(flats_list,
|
||||
constraint)
|
||||
|
||||
return {
|
||||
"new": flats_list,
|
||||
"ignored": ignored_list,
|
||||
"duplicate": duplicates_by_urls
|
||||
}
|
||||
if not config["ignore_station"]:
|
||||
# Try to match with stations
|
||||
flats_list = metadata.guess_stations(flats_list, constraint, config)
|
||||
|
||||
# Remove returned housing posts that do not match criteria
|
||||
flats_list, ignored_list = refine_with_housing_criteria(flats_list, constraint)
|
||||
|
||||
return {"new": flats_list, "ignored": ignored_list, "duplicate": duplicates_by_urls}
|
||||
|
||||
|
||||
@tools.timeit
|
||||
def second_pass(flats_list, constraint, config):
|
||||
@ -222,28 +202,24 @@ def second_pass(flats_list, constraint, config):
|
||||
flats_list = metadata.guess_postal_code(flats_list, constraint, config)
|
||||
|
||||
# Better match with stations (confirm and check better)
|
||||
flats_list = metadata.guess_stations(flats_list, constraint, config)
|
||||
if not config["ignore_station"]:
|
||||
flats_list = metadata.guess_stations(flats_list, constraint, config)
|
||||
|
||||
# Compute travel time to specified points
|
||||
flats_list = metadata.compute_travel_times(flats_list, constraint, config)
|
||||
# Compute travel time to specified points
|
||||
flats_list = metadata.compute_travel_times(flats_list, constraint, config)
|
||||
|
||||
# Remove returned housing posts that do not match criteria
|
||||
flats_list, ignored_list = refine_with_housing_criteria(flats_list,
|
||||
constraint)
|
||||
flats_list, ignored_list = refine_with_housing_criteria(flats_list, constraint)
|
||||
|
||||
# Remove returned housing posts which do not match criteria relying on
|
||||
# fetched details.
|
||||
flats_list, ignored_list = refine_with_details_criteria(flats_list,
|
||||
constraint)
|
||||
flats_list, ignored_list = refine_with_details_criteria(flats_list, constraint)
|
||||
|
||||
if config["serve_images_locally"]:
|
||||
images.download_images(flats_list, config)
|
||||
|
||||
return {
|
||||
"new": flats_list,
|
||||
"ignored": ignored_list,
|
||||
"duplicate": []
|
||||
}
|
||||
return {"new": flats_list, "ignored": ignored_list, "duplicate": []}
|
||||
|
||||
|
||||
@tools.timeit
|
||||
def third_pass(flats_list, config):
|
||||
@ -262,8 +238,4 @@ def third_pass(flats_list, config):
|
||||
# Deduplicate the list using every available data
|
||||
flats_list, duplicate_flats = duplicates.deep_detect(flats_list, config)
|
||||
|
||||
return {
|
||||
"new": flats_list,
|
||||
"ignored": [],
|
||||
"duplicate": duplicate_flats
|
||||
}
|
||||
return {"new": flats_list, "ignored": [], "duplicate": duplicate_flats}
|
||||
|
@ -9,15 +9,19 @@ import collections
|
||||
import hashlib
|
||||
import os
|
||||
import requests
|
||||
import logging
|
||||
from io import BytesIO
|
||||
|
||||
import PIL.Image
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MemoryCache(object):
|
||||
"""
|
||||
A cache in memory.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def on_miss(key):
|
||||
"""
|
||||
@ -85,6 +89,7 @@ class ImageCache(MemoryCache):
|
||||
"""
|
||||
A cache for images, stored in memory.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def compute_filename(url):
|
||||
"""
|
||||
@ -104,23 +109,27 @@ class ImageCache(MemoryCache):
|
||||
if len(self.map.keys()) > self.max_items:
|
||||
self.map.popitem(last=False)
|
||||
|
||||
if url.endswith(".svg"):
|
||||
# Skip SVG photo which are unsupported and unlikely to be relevant
|
||||
return None
|
||||
|
||||
filepath = None
|
||||
# Try to load from local folder
|
||||
if self.storage_dir:
|
||||
filepath = os.path.join(
|
||||
self.storage_dir,
|
||||
self.compute_filename(url)
|
||||
)
|
||||
filepath = os.path.join(self.storage_dir, self.compute_filename(url))
|
||||
if os.path.isfile(filepath):
|
||||
return PIL.Image.open(filepath)
|
||||
# Otherwise, fetch it
|
||||
try:
|
||||
LOGGER.debug(f"Download photo from {url} to {filepath}")
|
||||
req = requests.get(url)
|
||||
req.raise_for_status()
|
||||
image = PIL.Image.open(BytesIO(req.content))
|
||||
if self.storage_dir:
|
||||
if filepath:
|
||||
image.save(filepath, format=image.format)
|
||||
return image
|
||||
except (requests.HTTPError, IOError):
|
||||
except (requests.HTTPError, IOError) as exc:
|
||||
LOGGER.info(f"Download photo from {url} failed: {exc}")
|
||||
return None
|
||||
|
||||
def __init__(self, max_items=200, storage_dir=None):
|
||||
|
@ -35,14 +35,14 @@ def homogeneize_phone_number(numbers):
|
||||
|
||||
clean_numbers = []
|
||||
|
||||
for number in numbers.split(','):
|
||||
for number in numbers.split(","):
|
||||
number = number.strip()
|
||||
number = number.replace(".", "")
|
||||
number = number.replace(" ", "")
|
||||
number = number.replace("-", "")
|
||||
number = number.replace("(", "")
|
||||
number = number.replace(")", "")
|
||||
number = re.sub(r'^\+\d\d', "", number)
|
||||
number = re.sub(r"^\+\d\d", "", number)
|
||||
|
||||
if not number.startswith("0"):
|
||||
number = "0" + number
|
||||
@ -94,12 +94,7 @@ def compare_photos(photo1, photo2, photo_cache, hash_threshold):
|
||||
return False
|
||||
|
||||
|
||||
def find_number_common_photos(
|
||||
flat1_photos,
|
||||
flat2_photos,
|
||||
photo_cache,
|
||||
hash_threshold
|
||||
):
|
||||
def find_number_common_photos(flat1_photos, flat2_photos, photo_cache, hash_threshold):
|
||||
"""
|
||||
Compute the number of common photos between the two lists of photos for the
|
||||
flats.
|
||||
@ -174,22 +169,21 @@ def detect(flats_list, key="id", merge=True, should_intersect=False):
|
||||
# Sort matching flats by backend precedence
|
||||
matching_flats.sort(
|
||||
key=lambda flat: next(
|
||||
i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE)
|
||||
if flat["id"].endswith(backend)
|
||||
i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE) if flat["id"].endswith(backend)
|
||||
),
|
||||
reverse=True
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
if len(matching_flats) > 1:
|
||||
LOGGER.info("Found duplicates using key \"%s\": %s.",
|
||||
key,
|
||||
[flat["id"] for flat in matching_flats])
|
||||
LOGGER.info(
|
||||
'Found duplicates using key "%s": %s.',
|
||||
key,
|
||||
[flat["id"] for flat in matching_flats],
|
||||
)
|
||||
# Otherwise, check the policy
|
||||
if merge:
|
||||
# If a merge is requested, do the merge
|
||||
unique_flats_list.append(
|
||||
tools.merge_dicts(*matching_flats)
|
||||
)
|
||||
unique_flats_list.append(tools.merge_dicts(*matching_flats))
|
||||
else:
|
||||
# Otherwise, just keep the most important of them
|
||||
unique_flats_list.append(matching_flats[-1])
|
||||
@ -203,8 +197,7 @@ def detect(flats_list, key="id", merge=True, should_intersect=False):
|
||||
if should_intersect:
|
||||
# We added some flats twice with the above method, let's deduplicate on
|
||||
# id.
|
||||
unique_flats_list, _ = detect(unique_flats_list, key="id", merge=True,
|
||||
should_intersect=False)
|
||||
unique_flats_list, _ = detect(unique_flats_list, key="id", merge=True, should_intersect=False)
|
||||
|
||||
return unique_flats_list, duplicate_flats
|
||||
|
||||
@ -250,14 +243,12 @@ def get_duplicate_score(flat1, flat2, photo_cache, hash_threshold):
|
||||
|
||||
# They should have the same postal code, if available
|
||||
if (
|
||||
"flatisfy" in flat1 and "flatisfy" in flat2 and
|
||||
flat1["flatisfy"].get("postal_code", None) and
|
||||
flat2["flatisfy"].get("postal_code", None)
|
||||
"flatisfy" in flat1
|
||||
and "flatisfy" in flat2
|
||||
and flat1["flatisfy"].get("postal_code", None)
|
||||
and flat2["flatisfy"].get("postal_code", None)
|
||||
):
|
||||
assert (
|
||||
flat1["flatisfy"]["postal_code"] ==
|
||||
flat2["flatisfy"]["postal_code"]
|
||||
)
|
||||
assert flat1["flatisfy"]["postal_code"] == flat2["flatisfy"]["postal_code"]
|
||||
n_common_items += 1
|
||||
|
||||
# TODO: Better text comparison (one included in the other, fuzzymatch)
|
||||
@ -279,28 +270,16 @@ def get_duplicate_score(flat1, flat2, photo_cache, hash_threshold):
|
||||
# If the two flats are from the same website and have a
|
||||
# different float part, consider they cannot be duplicates. See
|
||||
# https://framagit.org/phyks/Flatisfy/issues/100.
|
||||
both_are_from_same_backend = (
|
||||
flat1["id"].split("@")[-1] == flat2["id"].split("@")[-1]
|
||||
)
|
||||
both_have_float_part = (
|
||||
(flat1["area"] % 1) > 0 and (flat2["area"] % 1) > 0
|
||||
)
|
||||
both_have_equal_float_part = (
|
||||
(flat1["area"] % 1) == (flat2["area"] % 1)
|
||||
)
|
||||
both_are_from_same_backend = flat1["id"].split("@")[-1] == flat2["id"].split("@")[-1]
|
||||
both_have_float_part = (flat1["area"] % 1) > 0 and (flat2["area"] % 1) > 0
|
||||
both_have_equal_float_part = (flat1["area"] % 1) == (flat2["area"] % 1)
|
||||
if both_have_float_part and both_are_from_same_backend:
|
||||
assert both_have_equal_float_part
|
||||
|
||||
if flat1.get("photos", []) and flat2.get("photos", []):
|
||||
n_common_photos = find_number_common_photos(
|
||||
flat1["photos"],
|
||||
flat2["photos"],
|
||||
photo_cache,
|
||||
hash_threshold
|
||||
)
|
||||
n_common_photos = find_number_common_photos(flat1["photos"], flat2["photos"], photo_cache, hash_threshold)
|
||||
|
||||
min_number_photos = min(len(flat1["photos"]),
|
||||
len(flat2["photos"]))
|
||||
min_number_photos = min(len(flat1["photos"]), len(flat2["photos"]))
|
||||
|
||||
# Either all the photos are the same, or there are at least
|
||||
# three common photos.
|
||||
@ -332,9 +311,7 @@ def deep_detect(flats_list, config):
|
||||
storage_dir = os.path.join(config["data_directory"], "images")
|
||||
else:
|
||||
storage_dir = None
|
||||
photo_cache = ImageCache(
|
||||
storage_dir=storage_dir
|
||||
)
|
||||
photo_cache = ImageCache(storage_dir=storage_dir)
|
||||
|
||||
LOGGER.info("Running deep duplicates detection.")
|
||||
matching_flats = collections.defaultdict(list)
|
||||
@ -347,30 +324,26 @@ def deep_detect(flats_list, config):
|
||||
if flat2["id"] in matching_flats[flat1["id"]]:
|
||||
continue
|
||||
|
||||
n_common_items = get_duplicate_score(
|
||||
flat1,
|
||||
flat2,
|
||||
photo_cache,
|
||||
config["duplicate_image_hash_threshold"]
|
||||
)
|
||||
n_common_items = get_duplicate_score(flat1, flat2, photo_cache, config["duplicate_image_hash_threshold"])
|
||||
|
||||
# Minimal score to consider they are duplicates
|
||||
if n_common_items >= config["duplicate_threshold"]:
|
||||
# Mark flats as duplicates
|
||||
LOGGER.info(
|
||||
("Found duplicates using deep detection: (%s, %s). "
|
||||
"Score is %d."),
|
||||
("Found duplicates using deep detection: (%s, %s). Score is %d."),
|
||||
flat1["id"],
|
||||
flat2["id"],
|
||||
n_common_items
|
||||
n_common_items,
|
||||
)
|
||||
matching_flats[flat1["id"]].append(flat2["id"])
|
||||
matching_flats[flat2["id"]].append(flat1["id"])
|
||||
|
||||
if photo_cache.total():
|
||||
LOGGER.debug("Photo cache: hits: %d%% / misses: %d%%.",
|
||||
photo_cache.hit_rate(),
|
||||
photo_cache.miss_rate())
|
||||
LOGGER.debug(
|
||||
"Photo cache: hits: %d%% / misses: %d%%.",
|
||||
photo_cache.hit_rate(),
|
||||
photo_cache.miss_rate(),
|
||||
)
|
||||
|
||||
seen_ids = []
|
||||
duplicate_flats = []
|
||||
@ -381,16 +354,11 @@ def deep_detect(flats_list, config):
|
||||
|
||||
seen_ids.extend(matching_flats[flat_id])
|
||||
to_merge = sorted(
|
||||
[
|
||||
flat
|
||||
for flat in flats_list
|
||||
if flat["id"] in matching_flats[flat_id]
|
||||
],
|
||||
[flat for flat in flats_list if flat["id"] in matching_flats[flat_id]],
|
||||
key=lambda flat: next(
|
||||
i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE)
|
||||
if flat["id"].endswith(backend)
|
||||
i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE) if flat["id"].endswith(backend)
|
||||
),
|
||||
reverse=True
|
||||
reverse=True,
|
||||
)
|
||||
unique_flats_list.append(tools.merge_dicts(*to_merge))
|
||||
# The ID of the added merged flat will be the one of the last item
|
||||
|
@ -22,15 +22,8 @@ def download_images(flats_list, config):
|
||||
:param flats_list: A list of flats dicts.
|
||||
:param config: A config dict.
|
||||
"""
|
||||
photo_cache = ImageCache(
|
||||
storage_dir=os.path.join(config["data_directory"], "images")
|
||||
)
|
||||
flats_list_length = len(flats_list)
|
||||
for i, flat in enumerate(flats_list):
|
||||
LOGGER.info(
|
||||
"Downloading photos for flat %d/%d: %s.",
|
||||
i + 1, flats_list_length, flat["id"]
|
||||
)
|
||||
photo_cache = ImageCache(storage_dir=os.path.join(config["data_directory"], "images"))
|
||||
for flat in flats_list:
|
||||
for photo in flat["photos"]:
|
||||
# Download photo
|
||||
image = photo_cache.get(photo["url"])
|
||||
|
@ -76,10 +76,10 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
|
||||
|
||||
Example::
|
||||
|
||||
>>> match("Paris 14ème", ["Ris", "ris", "Paris 14"], limit=1)
|
||||
>>> fuzzy_match("Paris 14ème", ["Ris", "ris", "Paris 14"], limit=1)
|
||||
[("Paris 14", 100)
|
||||
|
||||
>>> match( \
|
||||
>>> fuzzy_match( \
|
||||
"Saint-Jacques, Denfert-Rochereau (Colonel Rol-Tanguy), " \
|
||||
"Mouton-Duvernet", \
|
||||
["saint-jacques", "denfert rochereau", "duvernet", "toto"], \
|
||||
@ -88,8 +88,8 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
|
||||
[('denfert rochereau', 100), ('saint-jacques', 76)]
|
||||
"""
|
||||
# TODO: Is there a better confidence measure?
|
||||
normalized_query = tools.normalize_string(query)
|
||||
normalized_choices = [tools.normalize_string(choice) for choice in choices]
|
||||
normalized_query = tools.normalize_string(query).replace("saint", "st")
|
||||
normalized_choices = [tools.normalize_string(choice).replace("saint", "st") for choice in choices]
|
||||
|
||||
# Remove duplicates in the choices list
|
||||
unique_normalized_choices = tools.uniqify(normalized_choices)
|
||||
@ -97,13 +97,9 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
|
||||
# Get the matches (normalized strings)
|
||||
# Keep only ``limit`` matches.
|
||||
matches = sorted(
|
||||
[
|
||||
(choice, len(choice))
|
||||
for choice in tools.uniqify(unique_normalized_choices)
|
||||
if choice in normalized_query
|
||||
],
|
||||
[(choice, len(choice)) for choice in tools.uniqify(unique_normalized_choices) if choice in normalized_query],
|
||||
key=lambda x: x[1],
|
||||
reverse=True
|
||||
reverse=True,
|
||||
)
|
||||
if limit:
|
||||
matches = matches[:limit]
|
||||
@ -111,22 +107,66 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
|
||||
# Update confidence
|
||||
if matches:
|
||||
max_confidence = max(match[1] for match in matches)
|
||||
matches = [
|
||||
(x[0], int(x[1] / max_confidence * 100))
|
||||
for x in matches
|
||||
]
|
||||
matches = [(x[0], int(x[1] / max_confidence * 100)) for x in matches]
|
||||
|
||||
# Convert back matches to original strings
|
||||
# Also filter out matches below threshold
|
||||
matches = [
|
||||
(choices[normalized_choices.index(x[0])], x[1])
|
||||
for x in matches
|
||||
if x[1] >= threshold
|
||||
]
|
||||
matches = [(choices[normalized_choices.index(x[0])], x[1]) for x in matches if x[1] >= threshold]
|
||||
|
||||
return matches
|
||||
|
||||
|
||||
def guess_location_position(location, cities, constraint, must_match):
|
||||
# try to find a city
|
||||
# Find all fuzzy-matching cities
|
||||
postal_code = None
|
||||
insee_code = None
|
||||
position = None
|
||||
|
||||
matched_cities = fuzzy_match(location, [x.name for x in cities], limit=None)
|
||||
if matched_cities:
|
||||
# Find associated postal codes
|
||||
matched_postal_codes = []
|
||||
for matched_city_name, _ in matched_cities:
|
||||
postal_code_objects_for_city = [x for x in cities if x.name == matched_city_name]
|
||||
insee_code = [pc.insee_code for pc in postal_code_objects_for_city][0]
|
||||
matched_postal_codes.extend(pc.postal_code for pc in postal_code_objects_for_city)
|
||||
# Try to match them with postal codes in config constraint
|
||||
matched_postal_codes_in_config = set(matched_postal_codes) & set(constraint["postal_codes"])
|
||||
if matched_postal_codes_in_config:
|
||||
# If there are some matched postal codes which are also in
|
||||
# config, use them preferentially. This avoid ignoring
|
||||
# incorrectly some flats in cities with multiple postal
|
||||
# codes, see #110.
|
||||
postal_code = next(iter(matched_postal_codes_in_config))
|
||||
else:
|
||||
# Otherwise, simply take any matched postal code.
|
||||
postal_code = matched_postal_codes[0]
|
||||
|
||||
# take the city position
|
||||
for matched_city_name, _ in matched_cities:
|
||||
postal_code_objects_for_city = [
|
||||
x for x in cities if x.name == matched_city_name and x.postal_code == postal_code
|
||||
]
|
||||
if len(postal_code_objects_for_city):
|
||||
position = {
|
||||
"lat": postal_code_objects_for_city[0].lat,
|
||||
"lng": postal_code_objects_for_city[0].lng,
|
||||
}
|
||||
LOGGER.debug(("Found position %s using city %s."), position, matched_city_name)
|
||||
break
|
||||
|
||||
if not postal_code and must_match:
|
||||
postal_code = cities[0].postal_code
|
||||
position = {
|
||||
"lat": cities[0].lat,
|
||||
"lng": cities[0].lng,
|
||||
}
|
||||
insee_code = cities[0].insee_code
|
||||
|
||||
return (postal_code, insee_code, position)
|
||||
|
||||
|
||||
def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
|
||||
"""
|
||||
Try to guess the postal code from the location of the flats.
|
||||
@ -141,24 +181,27 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
|
||||
|
||||
:return: An updated list of flats dict with guessed postal code.
|
||||
"""
|
||||
opendata = {
|
||||
"postal_codes": data.load_data(PostalCode, constraint, config)
|
||||
}
|
||||
opendata = {"postal_codes": data.load_data(PostalCode, constraint, config)}
|
||||
|
||||
for flat in flats_list:
|
||||
location = flat.get("location", None)
|
||||
if not location:
|
||||
addr = flat.get("address", None)
|
||||
if addr:
|
||||
location = addr["full_address"]
|
||||
if not location:
|
||||
# Skip everything if empty location
|
||||
LOGGER.info(
|
||||
(
|
||||
"No location field for flat %s, skipping postal "
|
||||
"code lookup."
|
||||
),
|
||||
flat["id"]
|
||||
("No location field for flat %s, skipping postal code lookup. (%s)"),
|
||||
flat["id"],
|
||||
flat.get("address"),
|
||||
)
|
||||
continue
|
||||
|
||||
postal_code = None
|
||||
insee_code = None
|
||||
position = None
|
||||
|
||||
# Try to find a postal code directly
|
||||
try:
|
||||
postal_code = re.search(r"[0-9]{5}", location)
|
||||
@ -166,86 +209,51 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
|
||||
postal_code = postal_code.group(0)
|
||||
|
||||
# Check the postal code is within the db
|
||||
assert postal_code in [x.postal_code
|
||||
for x in opendata["postal_codes"]]
|
||||
assert postal_code in [x.postal_code for x in opendata["postal_codes"]]
|
||||
|
||||
LOGGER.info(
|
||||
"Found postal code in location field for flat %s: %s.",
|
||||
flat["id"], postal_code
|
||||
LOGGER.debug(
|
||||
"Found postal code directly in location field for flat %s: %s.",
|
||||
flat["id"],
|
||||
postal_code,
|
||||
)
|
||||
except AssertionError:
|
||||
postal_code = None
|
||||
|
||||
# If not found, try to find a city
|
||||
if not postal_code:
|
||||
# Find all fuzzy-matching cities
|
||||
matched_cities = fuzzy_match(
|
||||
location,
|
||||
[x.name for x in opendata["postal_codes"]],
|
||||
limit=None
|
||||
)
|
||||
if matched_cities:
|
||||
# Find associated postal codes
|
||||
matched_postal_codes = []
|
||||
for matched_city_name, _ in matched_cities:
|
||||
postal_code_objects_for_city = [
|
||||
x for x in opendata["postal_codes"]
|
||||
if x.name == matched_city_name
|
||||
]
|
||||
matched_postal_codes.extend(
|
||||
pc.postal_code
|
||||
for pc in postal_code_objects_for_city
|
||||
)
|
||||
# Try to match them with postal codes in config constraint
|
||||
matched_postal_codes_in_config = (
|
||||
set(matched_postal_codes) & set(constraint["postal_codes"])
|
||||
)
|
||||
if matched_postal_codes_in_config:
|
||||
# If there are some matched postal codes which are also in
|
||||
# config, use them preferentially. This avoid ignoring
|
||||
# incorrectly some flats in cities with multiple postal
|
||||
# codes, see #110.
|
||||
postal_code = next(iter(matched_postal_codes_in_config))
|
||||
else:
|
||||
# Otherwise, simply take any matched postal code.
|
||||
postal_code = matched_postal_codes[0]
|
||||
LOGGER.info(
|
||||
("Found postal code in location field through city lookup "
|
||||
"for flat %s: %s."),
|
||||
flat["id"], postal_code
|
||||
)
|
||||
# Then fetch position (and postal_code is couldn't be found earlier)
|
||||
cities = opendata["postal_codes"]
|
||||
if postal_code:
|
||||
cities = [x for x in cities if x.postal_code == postal_code]
|
||||
(postal_code, insee_code, position) = guess_location_position(
|
||||
location, cities, constraint, postal_code is not None
|
||||
)
|
||||
|
||||
# Check that postal code is not too far from the ones listed in config,
|
||||
# limit bad fuzzy matching
|
||||
if postal_code and distance_threshold:
|
||||
distance = min(
|
||||
tools.distance(
|
||||
next(
|
||||
(x.lat, x.lng)
|
||||
for x in opendata["postal_codes"]
|
||||
if x.postal_code == postal_code
|
||||
),
|
||||
next(
|
||||
(x.lat, x.lng)
|
||||
for x in opendata["postal_codes"]
|
||||
if x.postal_code == constraint_postal_code
|
||||
)
|
||||
next((x.lat, x.lng) for x in opendata["postal_codes"] if x.postal_code == postal_code),
|
||||
next((x.lat, x.lng) for x in opendata["postal_codes"] if x.postal_code == constraint_postal_code),
|
||||
)
|
||||
for constraint_postal_code in constraint["postal_codes"]
|
||||
)
|
||||
|
||||
if distance > distance_threshold:
|
||||
LOGGER.info(
|
||||
("Postal code %s found for flat %s is off-constraints "
|
||||
"(distance is %dm > %dm). Let's consider it is an "
|
||||
"artifact match and keep the post without this postal "
|
||||
"code."),
|
||||
(
|
||||
"Postal code %s found for flat %s @ %s is off-constraints "
|
||||
"(distance is %dm > %dm). Let's consider it is an "
|
||||
"artifact match and keep the post without this postal "
|
||||
"code."
|
||||
),
|
||||
postal_code,
|
||||
flat["id"],
|
||||
location,
|
||||
int(distance),
|
||||
int(distance_threshold)
|
||||
int(distance_threshold),
|
||||
)
|
||||
postal_code = None
|
||||
position = None
|
||||
|
||||
# Store it
|
||||
if postal_code:
|
||||
@ -253,12 +261,28 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
|
||||
if existing_postal_code and existing_postal_code != postal_code:
|
||||
LOGGER.warning(
|
||||
"Replacing previous postal code %s by %s for flat %s.",
|
||||
existing_postal_code, postal_code, flat["id"]
|
||||
existing_postal_code,
|
||||
postal_code,
|
||||
flat["id"],
|
||||
)
|
||||
flat["flatisfy"]["postal_code"] = postal_code
|
||||
else:
|
||||
LOGGER.info("No postal code found for flat %s.", flat["id"])
|
||||
|
||||
if insee_code:
|
||||
flat["flatisfy"]["insee_code"] = insee_code
|
||||
|
||||
if position:
|
||||
flat["flatisfy"]["position"] = position
|
||||
LOGGER.debug(
|
||||
"found postal_code=%s insee_code=%s position=%s for flat %s (%s).",
|
||||
postal_code,
|
||||
insee_code,
|
||||
position,
|
||||
flat["id"],
|
||||
location,
|
||||
)
|
||||
|
||||
return flats_list
|
||||
|
||||
|
||||
@ -272,10 +296,10 @@ def guess_stations(flats_list, constraint, config):
|
||||
|
||||
:return: An updated list of flats dict with guessed nearby stations.
|
||||
"""
|
||||
distance_threshold = config['max_distance_housing_station']
|
||||
distance_threshold = config["max_distance_housing_station"]
|
||||
opendata = {
|
||||
"postal_codes": data.load_data(PostalCode, constraint, config),
|
||||
"stations": data.load_data(PublicTransport, constraint, config)
|
||||
"stations": data.load_data(PublicTransport, constraint, config),
|
||||
}
|
||||
|
||||
for flat in flats_list:
|
||||
@ -283,14 +307,11 @@ def guess_stations(flats_list, constraint, config):
|
||||
|
||||
if not flat_station:
|
||||
# Skip everything if empty station
|
||||
LOGGER.info(
|
||||
"No stations field for flat %s, skipping stations lookup.",
|
||||
flat["id"]
|
||||
)
|
||||
LOGGER.info("No stations field for flat %s, skipping stations lookup.", flat["id"])
|
||||
continue
|
||||
|
||||
# Weboob modules can return several stations in a comma-separated list.
|
||||
flat_stations = flat_station.split(',')
|
||||
flat_stations = flat_station.split(",")
|
||||
# But some stations containing a comma exist, so let's add the initial
|
||||
# value to the list of stations to check if there was one.
|
||||
if len(flat_stations) > 1:
|
||||
@ -302,7 +323,7 @@ def guess_stations(flats_list, constraint, config):
|
||||
tentative_station,
|
||||
[x.name for x in opendata["stations"]],
|
||||
limit=10,
|
||||
threshold=50
|
||||
threshold=50,
|
||||
)
|
||||
|
||||
# Keep only one occurrence of each station
|
||||
@ -315,54 +336,43 @@ def guess_stations(flats_list, constraint, config):
|
||||
if postal_code:
|
||||
# If there is a postal code, check that the matched station is
|
||||
# closed to it
|
||||
postal_code_gps = next(
|
||||
(x.lat, x.lng)
|
||||
for x in opendata["postal_codes"]
|
||||
if x.postal_code == postal_code
|
||||
)
|
||||
postal_code_gps = next((x.lat, x.lng) for x in opendata["postal_codes"] if x.postal_code == postal_code)
|
||||
for station in matched_stations:
|
||||
# Note that multiple stations with the same name exist in a
|
||||
# city, hence the list of stations objects for a given matching
|
||||
# station name.
|
||||
stations_objects = [
|
||||
x for x in opendata["stations"] if x.name == station[0]
|
||||
]
|
||||
stations_objects = [x for x in opendata["stations"] if x.name == station[0]]
|
||||
for station_data in stations_objects:
|
||||
distance = tools.distance(
|
||||
(station_data.lat, station_data.lng),
|
||||
postal_code_gps
|
||||
)
|
||||
distance = tools.distance((station_data.lat, station_data.lng), postal_code_gps)
|
||||
if distance < distance_threshold:
|
||||
# If at least one of the coordinates for a given
|
||||
# station is close enough, that's ok and we can add
|
||||
# the station
|
||||
good_matched_stations.append({
|
||||
"key": station[0],
|
||||
"name": station_data.name,
|
||||
"confidence": station[1],
|
||||
"gps": (station_data.lat, station_data.lng)
|
||||
})
|
||||
good_matched_stations.append(
|
||||
{
|
||||
"key": station[0],
|
||||
"name": station_data.name,
|
||||
"confidence": station[1],
|
||||
"gps": (station_data.lat, station_data.lng),
|
||||
}
|
||||
)
|
||||
break
|
||||
LOGGER.info(
|
||||
("Station %s is too far from flat %s (%dm > %dm), "
|
||||
"discarding this station."),
|
||||
("Station %s is too far from flat %s (%dm > %dm), discarding this station."),
|
||||
station[0],
|
||||
flat["id"],
|
||||
int(distance),
|
||||
int(distance_threshold)
|
||||
int(distance_threshold),
|
||||
)
|
||||
else:
|
||||
LOGGER.info(
|
||||
"No postal code for flat %s, skipping stations detection.",
|
||||
flat["id"]
|
||||
)
|
||||
LOGGER.info("No postal code for flat %s, skipping stations detection.", flat["id"])
|
||||
|
||||
if not good_matched_stations:
|
||||
# No stations found, log it and cotninue with next housing
|
||||
LOGGER.info(
|
||||
"No stations found for flat %s, matching %s.",
|
||||
flat["id"],
|
||||
flat["station"]
|
||||
flat["station"],
|
||||
)
|
||||
continue
|
||||
|
||||
@ -370,29 +380,20 @@ def guess_stations(flats_list, constraint, config):
|
||||
"Found stations for flat %s: %s (matching %s).",
|
||||
flat["id"],
|
||||
", ".join(x["name"] for x in good_matched_stations),
|
||||
flat["station"]
|
||||
flat["station"],
|
||||
)
|
||||
|
||||
# If some stations were already filled in and the result is different,
|
||||
# display some warning to the user
|
||||
if (
|
||||
"matched_stations" in flat["flatisfy"] and
|
||||
(
|
||||
# Do a set comparison, as ordering is not important
|
||||
set([
|
||||
station["name"]
|
||||
for station in flat["flatisfy"]["matched_stations"]
|
||||
]) !=
|
||||
set([
|
||||
station["name"]
|
||||
for station in good_matched_stations
|
||||
])
|
||||
)
|
||||
if "matched_stations" in flat["flatisfy"] and (
|
||||
# Do a set comparison, as ordering is not important
|
||||
set([station["name"] for station in flat["flatisfy"]["matched_stations"]])
|
||||
!= set([station["name"] for station in good_matched_stations])
|
||||
):
|
||||
LOGGER.warning(
|
||||
"Replacing previously fetched stations for flat %s. Found "
|
||||
"stations differ from the previously found ones.",
|
||||
flat["id"]
|
||||
flat["id"],
|
||||
)
|
||||
|
||||
flat["flatisfy"]["matched_stations"] = good_matched_stations
|
||||
@ -417,9 +418,8 @@ def compute_travel_times(flats_list, constraint, config):
|
||||
if not flat["flatisfy"].get("matched_stations", []):
|
||||
# Skip any flat without matched stations
|
||||
LOGGER.info(
|
||||
"Skipping travel time computation for flat %s. No matched "
|
||||
"stations.",
|
||||
flat["id"]
|
||||
"Skipping travel time computation for flat %s. No matched stations.",
|
||||
flat["id"],
|
||||
)
|
||||
continue
|
||||
|
||||
@ -435,15 +435,10 @@ def compute_travel_times(flats_list, constraint, config):
|
||||
for station in flat["flatisfy"]["matched_stations"]:
|
||||
# Time from station is a dict with time and route
|
||||
time_from_station_dict = tools.get_travel_time_between(
|
||||
station["gps"],
|
||||
place["gps"],
|
||||
TimeToModes[mode],
|
||||
config
|
||||
station["gps"], place["gps"], TimeToModes[mode], config
|
||||
)
|
||||
if (
|
||||
time_from_station_dict and
|
||||
(time_from_station_dict["time"] < time_to_place_dict or
|
||||
time_to_place_dict is None)
|
||||
if time_from_station_dict and (
|
||||
time_from_station_dict["time"] < time_to_place_dict or time_to_place_dict is None
|
||||
):
|
||||
# If starting from this station makes the route to the
|
||||
# specified place shorter, update
|
||||
@ -452,7 +447,10 @@ def compute_travel_times(flats_list, constraint, config):
|
||||
if time_to_place_dict:
|
||||
LOGGER.info(
|
||||
"Travel time between %s and flat %s by %s is %ds.",
|
||||
place_name, flat["id"], mode, time_to_place_dict["time"]
|
||||
place_name,
|
||||
flat["id"],
|
||||
mode,
|
||||
time_to_place_dict["time"],
|
||||
)
|
||||
flat["flatisfy"]["time_to"][place_name] = time_to_place_dict
|
||||
return flats_list
|
||||
|
@ -11,7 +11,15 @@ import enum
|
||||
import arrow
|
||||
|
||||
from sqlalchemy import (
|
||||
Boolean, Column, DateTime, Enum, Float, SmallInteger, String, Text, inspect
|
||||
Boolean,
|
||||
Column,
|
||||
DateTime,
|
||||
Enum,
|
||||
Float,
|
||||
SmallInteger,
|
||||
String,
|
||||
Text,
|
||||
inspect,
|
||||
)
|
||||
from sqlalchemy.orm import validates
|
||||
|
||||
@ -26,6 +34,7 @@ class FlatUtilities(enum.Enum):
|
||||
"""
|
||||
An enum of the possible utilities status for a flat entry.
|
||||
"""
|
||||
|
||||
included = 10
|
||||
unknown = 0
|
||||
excluded = -10
|
||||
@ -35,6 +44,7 @@ class FlatStatus(enum.Enum):
|
||||
"""
|
||||
An enum of the possible status for a flat entry.
|
||||
"""
|
||||
|
||||
user_deleted = -100
|
||||
duplicate = -20
|
||||
ignored = -10
|
||||
@ -47,21 +57,16 @@ class FlatStatus(enum.Enum):
|
||||
|
||||
# List of statuses that are automatically handled, and which the user cannot
|
||||
# manually set through the UI.
|
||||
AUTOMATED_STATUSES = [
|
||||
FlatStatus.new,
|
||||
FlatStatus.duplicate,
|
||||
FlatStatus.ignored
|
||||
]
|
||||
AUTOMATED_STATUSES = [FlatStatus.new, FlatStatus.duplicate, FlatStatus.ignored]
|
||||
|
||||
|
||||
class Flat(BASE):
|
||||
"""
|
||||
SQLAlchemy ORM model to store a flat.
|
||||
"""
|
||||
|
||||
__tablename__ = "flats"
|
||||
__searchable__ = [
|
||||
"title", "text", "station", "location", "details", "notes"
|
||||
]
|
||||
__searchable__ = ["title", "text", "station", "location", "details", "notes"]
|
||||
|
||||
# Weboob data
|
||||
id = Column(String, primary_key=True)
|
||||
@ -91,6 +96,7 @@ class Flat(BASE):
|
||||
flatisfy_postal_code = Column(String)
|
||||
flatisfy_time_to = Column(MagicJSON)
|
||||
flatisfy_constraint = Column(String)
|
||||
flatisfy_position = Column(MagicJSON)
|
||||
|
||||
# Status
|
||||
status = Column(Enum(FlatStatus), default=FlatStatus.new)
|
||||
@ -98,7 +104,7 @@ class Flat(BASE):
|
||||
# Date for visit
|
||||
visit_date = Column(DateTime)
|
||||
|
||||
@validates('utilities')
|
||||
@validates("utilities")
|
||||
def validate_utilities(self, _, utilities):
|
||||
"""
|
||||
Utilities validation method
|
||||
@ -123,8 +129,7 @@ class Flat(BASE):
|
||||
try:
|
||||
return getattr(FlatStatus, status)
|
||||
except (AttributeError, TypeError):
|
||||
LOGGER.warn("Unkown flat status %s, ignoring it.",
|
||||
status)
|
||||
LOGGER.warn("Unkown flat status %s, ignoring it.", status)
|
||||
return self.status.default.arg
|
||||
|
||||
@validates("notation")
|
||||
@ -136,7 +141,7 @@ class Flat(BASE):
|
||||
notation = int(notation)
|
||||
assert notation >= 0 and notation <= 5
|
||||
except (ValueError, AssertionError):
|
||||
raise ValueError('notation should be an integer between 0 and 5')
|
||||
raise ValueError("notation should be an integer between 0 and 5")
|
||||
return notation
|
||||
|
||||
@validates("date")
|
||||
@ -144,14 +149,18 @@ class Flat(BASE):
|
||||
"""
|
||||
Date validation method
|
||||
"""
|
||||
return arrow.get(date).naive
|
||||
if date:
|
||||
return arrow.get(date).naive
|
||||
return None
|
||||
|
||||
@validates("visit_date")
|
||||
def validate_visit_date(self, _, visit_date):
|
||||
"""
|
||||
Visit date validation method
|
||||
"""
|
||||
return arrow.get(visit_date).naive
|
||||
if visit_date:
|
||||
return arrow.get(visit_date).naive
|
||||
return None
|
||||
|
||||
@validates("photos")
|
||||
def validate_photos(self, _, photos):
|
||||
@ -177,22 +186,14 @@ class Flat(BASE):
|
||||
# Handle flatisfy metadata
|
||||
flat_dict = flat_dict.copy()
|
||||
if "flatisfy" in flat_dict:
|
||||
flat_dict["flatisfy_stations"] = (
|
||||
flat_dict["flatisfy"].get("matched_stations", [])
|
||||
)
|
||||
flat_dict["flatisfy_postal_code"] = (
|
||||
flat_dict["flatisfy"].get("postal_code", None)
|
||||
)
|
||||
flat_dict["flatisfy_time_to"] = (
|
||||
flat_dict["flatisfy"].get("time_to", {})
|
||||
)
|
||||
flat_dict["flatisfy_constraint"] = (
|
||||
flat_dict["flatisfy"].get("constraint", "default")
|
||||
)
|
||||
flat_dict["flatisfy_stations"] = flat_dict["flatisfy"].get("matched_stations", [])
|
||||
flat_dict["flatisfy_postal_code"] = flat_dict["flatisfy"].get("postal_code", None)
|
||||
flat_dict["flatisfy_position"] = flat_dict["flatisfy"].get("position", None)
|
||||
flat_dict["flatisfy_time_to"] = flat_dict["flatisfy"].get("time_to", {})
|
||||
flat_dict["flatisfy_constraint"] = flat_dict["flatisfy"].get("constraint", "default")
|
||||
del flat_dict["flatisfy"]
|
||||
|
||||
flat_dict = {k: v for k, v in flat_dict.items()
|
||||
if k in inspect(Flat).columns.keys()}
|
||||
flat_dict = {k: v for k, v in flat_dict.items() if k in inspect(Flat).columns.keys()}
|
||||
return Flat(**flat_dict)
|
||||
|
||||
def __repr__(self):
|
||||
@ -203,11 +204,7 @@ class Flat(BASE):
|
||||
Return a dict representation of this flat object that is JSON
|
||||
serializable.
|
||||
"""
|
||||
flat_repr = {
|
||||
k: v
|
||||
for k, v in self.__dict__.items()
|
||||
if not k.startswith("_")
|
||||
}
|
||||
flat_repr = {k: v for k, v in self.__dict__.items() if not k.startswith("_")}
|
||||
if isinstance(flat_repr["status"], FlatStatus):
|
||||
flat_repr["status"] = flat_repr["status"].name
|
||||
if isinstance(flat_repr["utilities"], FlatUtilities):
|
||||
|
@ -7,9 +7,7 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
from sqlalchemy import (
|
||||
Column, Float, Integer, String, UniqueConstraint
|
||||
)
|
||||
from sqlalchemy import Column, Float, Integer, String, UniqueConstraint
|
||||
|
||||
from flatisfy.database.base import BASE
|
||||
|
||||
@ -21,6 +19,7 @@ class PostalCode(BASE):
|
||||
"""
|
||||
SQLAlchemy ORM model to store a postal code opendata.
|
||||
"""
|
||||
|
||||
__tablename__ = "postal_codes"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
@ -28,6 +27,7 @@ class PostalCode(BASE):
|
||||
# following ISO 3166-2.
|
||||
area = Column(String, index=True)
|
||||
postal_code = Column(String, index=True)
|
||||
insee_code = Column(String, index=True)
|
||||
name = Column(String, index=True)
|
||||
lat = Column(Float)
|
||||
lng = Column(Float)
|
||||
@ -41,8 +41,4 @@ class PostalCode(BASE):
|
||||
Return a dict representation of this postal code object that is JSON
|
||||
serializable.
|
||||
"""
|
||||
return {
|
||||
k: v
|
||||
for k, v in self.__dict__.items()
|
||||
if not k.startswith("_")
|
||||
}
|
||||
return {k: v for k, v in self.__dict__.items() if not k.startswith("_")}
|
||||
|
@ -7,9 +7,7 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
from sqlalchemy import (
|
||||
Column, Float, Integer, String
|
||||
)
|
||||
from sqlalchemy import Column, Float, Integer, String
|
||||
|
||||
from flatisfy.database.base import BASE
|
||||
|
||||
@ -21,6 +19,7 @@ class PublicTransport(BASE):
|
||||
"""
|
||||
SQLAlchemy ORM model to store public transport opendata.
|
||||
"""
|
||||
|
||||
__tablename__ = "public_transports"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
@ -30,6 +30,7 @@ class LocalImageCache(ImageCache):
|
||||
"""
|
||||
A local cache for images, stored in memory.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def on_miss(path):
|
||||
"""
|
||||
@ -46,48 +47,34 @@ class TestTexts(unittest.TestCase):
|
||||
"""
|
||||
Checks string normalizations.
|
||||
"""
|
||||
|
||||
def test_roman_numbers(self):
|
||||
"""
|
||||
Checks roman numbers replacement.
|
||||
"""
|
||||
self.assertEqual(
|
||||
"XIV",
|
||||
tools.convert_arabic_to_roman("14")
|
||||
)
|
||||
self.assertEqual("XIV", tools.convert_arabic_to_roman("14"))
|
||||
|
||||
self.assertEqual(
|
||||
"XXXIX",
|
||||
tools.convert_arabic_to_roman("39")
|
||||
)
|
||||
self.assertEqual("XXXIX", tools.convert_arabic_to_roman("39"))
|
||||
|
||||
self.assertEqual(
|
||||
"40",
|
||||
tools.convert_arabic_to_roman("40")
|
||||
)
|
||||
self.assertEqual("40", tools.convert_arabic_to_roman("40"))
|
||||
|
||||
self.assertEqual(
|
||||
"1987",
|
||||
tools.convert_arabic_to_roman("1987")
|
||||
)
|
||||
self.assertEqual("1987", tools.convert_arabic_to_roman("1987"))
|
||||
|
||||
self.assertEqual(
|
||||
"Dans le XVe arrondissement",
|
||||
tools.convert_arabic_to_roman_in_text("Dans le 15e arrondissement")
|
||||
tools.convert_arabic_to_roman_in_text("Dans le 15e arrondissement"),
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
"XXeme arr.",
|
||||
tools.convert_arabic_to_roman_in_text("20eme arr.")
|
||||
)
|
||||
self.assertEqual("XXeme arr.", tools.convert_arabic_to_roman_in_text("20eme arr."))
|
||||
|
||||
self.assertEqual(
|
||||
"A AIX EN PROVENCE",
|
||||
tools.convert_arabic_to_roman_in_text("A AIX EN PROVENCE")
|
||||
tools.convert_arabic_to_roman_in_text("A AIX EN PROVENCE"),
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
"Montigny Le Bretonneux",
|
||||
tools.convert_arabic_to_roman_in_text("Montigny Le Bretonneux")
|
||||
tools.convert_arabic_to_roman_in_text("Montigny Le Bretonneux"),
|
||||
)
|
||||
|
||||
def test_roman_numbers_in_text(self):
|
||||
@ -97,77 +84,54 @@ class TestTexts(unittest.TestCase):
|
||||
"""
|
||||
self.assertEqual(
|
||||
"dans le XVe arrondissement",
|
||||
tools.normalize_string("Dans le 15e arrondissement")
|
||||
tools.normalize_string("Dans le 15e arrondissement"),
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
"paris XVe, 75005",
|
||||
tools.normalize_string("Paris 15e, 75005")
|
||||
)
|
||||
self.assertEqual("paris XVe, 75005", tools.normalize_string("Paris 15e, 75005"))
|
||||
|
||||
self.assertEqual(
|
||||
"paris xve, 75005",
|
||||
tools.normalize_string("Paris XVe, 75005")
|
||||
)
|
||||
self.assertEqual("paris xve, 75005", tools.normalize_string("Paris XVe, 75005"))
|
||||
|
||||
def test_multiple_whitespaces(self):
|
||||
"""
|
||||
Checks whitespaces are collapsed.
|
||||
"""
|
||||
self.assertEqual(
|
||||
"avec ascenseur",
|
||||
tools.normalize_string("avec ascenseur")
|
||||
)
|
||||
self.assertEqual("avec ascenseur", tools.normalize_string("avec ascenseur"))
|
||||
|
||||
def test_whitespace_trim(self):
|
||||
"""
|
||||
Checks that trailing and beginning whitespaces are trimmed.
|
||||
"""
|
||||
self.assertEqual(
|
||||
"rennes 35000",
|
||||
tools.normalize_string(" Rennes 35000 ")
|
||||
)
|
||||
self.assertEqual("rennes 35000", tools.normalize_string(" Rennes 35000 "))
|
||||
|
||||
def test_accents(self):
|
||||
"""
|
||||
Checks accents are replaced.
|
||||
"""
|
||||
self.assertEqual(
|
||||
"eeeaui",
|
||||
tools.normalize_string(u"éèêàüï")
|
||||
)
|
||||
self.assertEqual("eeeaui", tools.normalize_string(u"éèêàüï"))
|
||||
|
||||
|
||||
class TestPhoneNumbers(unittest.TestCase):
|
||||
"""
|
||||
Checks phone numbers normalizations.
|
||||
"""
|
||||
|
||||
def test_prefix(self):
|
||||
"""
|
||||
Checks phone numbers with international prefixes.
|
||||
"""
|
||||
self.assertEqual(
|
||||
"0605040302",
|
||||
duplicates.homogeneize_phone_number("+33605040302")
|
||||
)
|
||||
self.assertEqual("0605040302", duplicates.homogeneize_phone_number("+33605040302"))
|
||||
|
||||
def test_dots_separators(self):
|
||||
"""
|
||||
Checks phone numbers with dots.
|
||||
"""
|
||||
self.assertEqual(
|
||||
"0605040302",
|
||||
duplicates.homogeneize_phone_number("06.05.04.03.02")
|
||||
)
|
||||
self.assertEqual("0605040302", duplicates.homogeneize_phone_number("06.05.04.03.02"))
|
||||
|
||||
def test_spaces_separators(self):
|
||||
"""
|
||||
Checks phone numbers with spaces.
|
||||
"""
|
||||
self.assertEqual(
|
||||
"0605040302",
|
||||
duplicates.homogeneize_phone_number("06 05 04 03 02")
|
||||
)
|
||||
self.assertEqual("0605040302", duplicates.homogeneize_phone_number("06 05 04 03 02"))
|
||||
|
||||
|
||||
class TestPhotos(unittest.TestCase):
|
||||
@ -183,96 +147,104 @@ class TestPhotos(unittest.TestCase):
|
||||
"""
|
||||
Compares a photo against itself.
|
||||
"""
|
||||
photo = {
|
||||
"url": TESTS_DATA_DIR + "127028739@seloger.jpg"
|
||||
}
|
||||
photo = {"url": TESTS_DATA_DIR + "127028739@seloger.jpg"}
|
||||
|
||||
self.assertTrue(duplicates.compare_photos(
|
||||
photo,
|
||||
photo,
|
||||
self.IMAGE_CACHE,
|
||||
self.HASH_THRESHOLD
|
||||
))
|
||||
self.assertTrue(duplicates.compare_photos(photo, photo, self.IMAGE_CACHE, self.HASH_THRESHOLD))
|
||||
|
||||
def test_different_photos(self):
|
||||
"""
|
||||
Compares two different photos.
|
||||
"""
|
||||
self.assertFalse(duplicates.compare_photos(
|
||||
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
|
||||
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
|
||||
self.IMAGE_CACHE,
|
||||
self.HASH_THRESHOLD
|
||||
))
|
||||
self.assertFalse(
|
||||
duplicates.compare_photos(
|
||||
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
|
||||
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
|
||||
self.IMAGE_CACHE,
|
||||
self.HASH_THRESHOLD,
|
||||
)
|
||||
)
|
||||
|
||||
self.assertFalse(duplicates.compare_photos(
|
||||
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
|
||||
{"url": TESTS_DATA_DIR + "127028739-3@seloger.jpg"},
|
||||
self.IMAGE_CACHE,
|
||||
self.HASH_THRESHOLD
|
||||
))
|
||||
self.assertFalse(
|
||||
duplicates.compare_photos(
|
||||
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
|
||||
{"url": TESTS_DATA_DIR + "127028739-3@seloger.jpg"},
|
||||
self.IMAGE_CACHE,
|
||||
self.HASH_THRESHOLD,
|
||||
)
|
||||
)
|
||||
|
||||
def test_matching_photos(self):
|
||||
"""
|
||||
Compares two matching photos with different size and source.
|
||||
"""
|
||||
self.assertTrue(duplicates.compare_photos(
|
||||
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
|
||||
{"url": TESTS_DATA_DIR + "14428129@explorimmo.jpg"},
|
||||
self.IMAGE_CACHE,
|
||||
self.HASH_THRESHOLD
|
||||
))
|
||||
self.assertTrue(
|
||||
duplicates.compare_photos(
|
||||
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
|
||||
{"url": TESTS_DATA_DIR + "14428129@explorimmo.jpg"},
|
||||
self.IMAGE_CACHE,
|
||||
self.HASH_THRESHOLD,
|
||||
)
|
||||
)
|
||||
|
||||
self.assertTrue(duplicates.compare_photos(
|
||||
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
|
||||
{"url": TESTS_DATA_DIR + "14428129-2@explorimmo.jpg"},
|
||||
self.IMAGE_CACHE,
|
||||
self.HASH_THRESHOLD
|
||||
))
|
||||
self.assertTrue(
|
||||
duplicates.compare_photos(
|
||||
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
|
||||
{"url": TESTS_DATA_DIR + "14428129-2@explorimmo.jpg"},
|
||||
self.IMAGE_CACHE,
|
||||
self.HASH_THRESHOLD,
|
||||
)
|
||||
)
|
||||
|
||||
self.assertTrue(duplicates.compare_photos(
|
||||
{"url": TESTS_DATA_DIR + "127028739-3@seloger.jpg"},
|
||||
{"url": TESTS_DATA_DIR + "14428129-3@explorimmo.jpg"},
|
||||
self.IMAGE_CACHE,
|
||||
self.HASH_THRESHOLD
|
||||
))
|
||||
self.assertTrue(
|
||||
duplicates.compare_photos(
|
||||
{"url": TESTS_DATA_DIR + "127028739-3@seloger.jpg"},
|
||||
{"url": TESTS_DATA_DIR + "14428129-3@explorimmo.jpg"},
|
||||
self.IMAGE_CACHE,
|
||||
self.HASH_THRESHOLD,
|
||||
)
|
||||
)
|
||||
|
||||
self.assertTrue(duplicates.compare_photos(
|
||||
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
|
||||
{"url": TESTS_DATA_DIR + "127028739-watermark@seloger.jpg"},
|
||||
self.IMAGE_CACHE,
|
||||
self.HASH_THRESHOLD
|
||||
))
|
||||
self.assertTrue(
|
||||
duplicates.compare_photos(
|
||||
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
|
||||
{"url": TESTS_DATA_DIR + "127028739-watermark@seloger.jpg"},
|
||||
self.IMAGE_CACHE,
|
||||
self.HASH_THRESHOLD,
|
||||
)
|
||||
)
|
||||
|
||||
def test_matching_cropped_photos(self):
|
||||
"""
|
||||
Compares two matching photos with one being cropped.
|
||||
"""
|
||||
# Fixme: the image hash treshold should be 10 ideally
|
||||
self.assertTrue(duplicates.compare_photos(
|
||||
{"url": TESTS_DATA_DIR + "vertical.jpg"},
|
||||
{"url": TESTS_DATA_DIR + "vertical-cropped.jpg"},
|
||||
self.IMAGE_CACHE,
|
||||
20
|
||||
))
|
||||
self.assertTrue(
|
||||
duplicates.compare_photos(
|
||||
{"url": TESTS_DATA_DIR + "vertical.jpg"},
|
||||
{"url": TESTS_DATA_DIR + "vertical-cropped.jpg"},
|
||||
self.IMAGE_CACHE,
|
||||
20,
|
||||
)
|
||||
)
|
||||
|
||||
# Fixme: the image hash treshold should be 10 ideally
|
||||
self.assertTrue(duplicates.compare_photos(
|
||||
{"url": TESTS_DATA_DIR + "13783671@explorimmo.jpg"},
|
||||
{"url": TESTS_DATA_DIR + "124910113@seloger.jpg"},
|
||||
self.IMAGE_CACHE,
|
||||
20
|
||||
))
|
||||
self.assertTrue(
|
||||
duplicates.compare_photos(
|
||||
{"url": TESTS_DATA_DIR + "13783671@explorimmo.jpg"},
|
||||
{"url": TESTS_DATA_DIR + "124910113@seloger.jpg"},
|
||||
self.IMAGE_CACHE,
|
||||
20,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class TestImageCache(unittest.TestCase):
|
||||
"""
|
||||
Checks image cache is working as expected.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.IMAGE_CACHE = ImageCache( # pylint: disable=invalid-name
|
||||
storage_dir=tempfile.mkdtemp(prefix="flatisfy-")
|
||||
)
|
||||
self.IMAGE_CACHE = ImageCache(storage_dir=tempfile.mkdtemp(prefix="flatisfy-")) # pylint: disable=invalid-name
|
||||
super(TestImageCache, self).__init__(*args, **kwargs)
|
||||
|
||||
def test_invalid_url(self):
|
||||
@ -280,27 +252,22 @@ class TestImageCache(unittest.TestCase):
|
||||
Check that it returns nothing on an invalid URL.
|
||||
"""
|
||||
# See https://framagit.org/phyks/Flatisfy/issues/116.
|
||||
self.assertIsNone(
|
||||
self.IMAGE_CACHE.get("https://httpbin.org/status/404")
|
||||
)
|
||||
self.assertIsNone(
|
||||
self.IMAGE_CACHE.get("https://httpbin.org/status/500")
|
||||
)
|
||||
self.assertIsNone(self.IMAGE_CACHE.get("https://httpbin.org/status/404"))
|
||||
self.assertIsNone(self.IMAGE_CACHE.get("https://httpbin.org/status/500"))
|
||||
|
||||
def test_invalid_data(self):
|
||||
"""
|
||||
Check that it returns nothing on an invalid data.
|
||||
"""
|
||||
# See https://framagit.org/phyks/Flatisfy/issues/116.
|
||||
self.assertIsNone(
|
||||
self.IMAGE_CACHE.get("https://httpbin.org/")
|
||||
)
|
||||
self.assertIsNone(self.IMAGE_CACHE.get("https://httpbin.org/"))
|
||||
|
||||
|
||||
class TestDuplicates(unittest.TestCase):
|
||||
"""
|
||||
Checks duplicates detection.
|
||||
"""
|
||||
|
||||
DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS = 8 # pylint: disable=invalid-name
|
||||
DUPLICATES_MIN_SCORE_WITH_PHOTOS = 15 # pylint: disable=invalid-name
|
||||
HASH_THRESHOLD = 10 # pylint: disable=invalid-name
|
||||
@ -316,9 +283,7 @@ class TestDuplicates(unittest.TestCase):
|
||||
"""
|
||||
Generates a fake flat post.
|
||||
"""
|
||||
backend = BACKENDS_BY_PRECEDENCE[
|
||||
random.randint(0, len(BACKENDS_BY_PRECEDENCE) - 1)
|
||||
]
|
||||
backend = BACKENDS_BY_PRECEDENCE[random.randint(0, len(BACKENDS_BY_PRECEDENCE) - 1)]
|
||||
return {
|
||||
"id": str(random.randint(100000, 199999)) + "@" + backend,
|
||||
"phone": "0607080910",
|
||||
@ -326,7 +291,7 @@ class TestDuplicates(unittest.TestCase):
|
||||
"utilities": "",
|
||||
"area": random.randint(200, 1500) / 10,
|
||||
"cost": random.randint(100000, 300000),
|
||||
"bedrooms": random.randint(1, 4)
|
||||
"bedrooms": random.randint(1, 4),
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@ -350,10 +315,7 @@ class TestDuplicates(unittest.TestCase):
|
||||
"""
|
||||
flat1 = self.generate_fake_flat()
|
||||
flat2 = copy.deepcopy(flat1)
|
||||
score = duplicates.get_duplicate_score(
|
||||
flat1, flat2,
|
||||
self.IMAGE_CACHE, self.HASH_THRESHOLD
|
||||
)
|
||||
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
||||
self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
||||
|
||||
def test_different_prices(self):
|
||||
@ -364,10 +326,7 @@ class TestDuplicates(unittest.TestCase):
|
||||
flat2 = copy.deepcopy(flat1)
|
||||
flat2["cost"] += 1000
|
||||
|
||||
score = duplicates.get_duplicate_score(
|
||||
flat1, flat2,
|
||||
self.IMAGE_CACHE, self.HASH_THRESHOLD
|
||||
)
|
||||
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
||||
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
||||
|
||||
def test_different_rooms(self):
|
||||
@ -379,10 +338,7 @@ class TestDuplicates(unittest.TestCase):
|
||||
flat2 = copy.deepcopy(flat1)
|
||||
flat2["rooms"] += 1
|
||||
|
||||
score = duplicates.get_duplicate_score(
|
||||
flat1, flat2,
|
||||
self.IMAGE_CACHE, self.HASH_THRESHOLD
|
||||
)
|
||||
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
||||
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
||||
|
||||
def test_different_areas(self):
|
||||
@ -393,10 +349,7 @@ class TestDuplicates(unittest.TestCase):
|
||||
flat2 = copy.deepcopy(flat1)
|
||||
flat2["area"] += 10
|
||||
|
||||
score = duplicates.get_duplicate_score(
|
||||
flat1, flat2,
|
||||
self.IMAGE_CACHE, self.HASH_THRESHOLD
|
||||
)
|
||||
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
||||
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
||||
|
||||
def test_different_areas_decimals(self):
|
||||
@ -409,10 +362,7 @@ class TestDuplicates(unittest.TestCase):
|
||||
flat1["area"] = 50.65
|
||||
flat2["area"] = 50.37
|
||||
|
||||
score = duplicates.get_duplicate_score(
|
||||
flat1, flat2,
|
||||
self.IMAGE_CACHE, self.HASH_THRESHOLD
|
||||
)
|
||||
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
||||
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
||||
|
||||
def test_different_phones(self):
|
||||
@ -424,10 +374,7 @@ class TestDuplicates(unittest.TestCase):
|
||||
flat2 = copy.deepcopy(flat1)
|
||||
flat2["phone"] = "0708091011"
|
||||
|
||||
score = duplicates.get_duplicate_score(
|
||||
flat1, flat2,
|
||||
self.IMAGE_CACHE, self.HASH_THRESHOLD
|
||||
)
|
||||
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
||||
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
||||
|
||||
def test_real_duplicates(self):
|
||||
@ -435,15 +382,9 @@ class TestDuplicates(unittest.TestCase):
|
||||
Two flats with same price, area and rooms quantity should be detected
|
||||
as duplicates.
|
||||
"""
|
||||
flats = self.load_files(
|
||||
"127028739@seloger",
|
||||
"14428129@explorimmo"
|
||||
)
|
||||
flats = self.load_files("127028739@seloger", "14428129@explorimmo")
|
||||
|
||||
score = duplicates.get_duplicate_score(
|
||||
flats[0], flats[1],
|
||||
self.IMAGE_CACHE, self.HASH_THRESHOLD
|
||||
)
|
||||
score = duplicates.get_duplicate_score(flats[0], flats[1], self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
||||
self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITH_PHOTOS)
|
||||
|
||||
# TODO: fixme, find new testing examples
|
||||
@ -502,8 +443,13 @@ def run():
|
||||
"""
|
||||
LOGGER.info("Running tests…")
|
||||
try:
|
||||
for testsuite in [TestTexts, TestPhoneNumbers, TestImageCache,
|
||||
TestDuplicates, TestPhotos]:
|
||||
for testsuite in [
|
||||
TestTexts,
|
||||
TestPhoneNumbers,
|
||||
TestImageCache,
|
||||
TestDuplicates,
|
||||
TestPhotos,
|
||||
]:
|
||||
suite = unittest.TestLoader().loadTestsFromTestCase(testsuite)
|
||||
result = unittest.TextTestRunner(verbosity=2).run(suite)
|
||||
assert result.wasSuccessful()
|
||||
|
@ -3,9 +3,7 @@
|
||||
This module contains basic utility functions, such as pretty printing of JSON
|
||||
output, checking that a value is within a given interval etc.
|
||||
"""
|
||||
from __future__ import (
|
||||
absolute_import, division, print_function, unicode_literals
|
||||
)
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import datetime
|
||||
import itertools
|
||||
@ -41,7 +39,7 @@ def next_weekday(d, weekday):
|
||||
:returns: The datetime object for the next given weekday.
|
||||
"""
|
||||
days_ahead = weekday - d.weekday()
|
||||
if days_ahead <= 0: # Target day already happened this week
|
||||
if days_ahead <= 0: # Target day already happened this week
|
||||
days_ahead += 7
|
||||
return d + datetime.timedelta(days_ahead)
|
||||
|
||||
@ -61,8 +59,18 @@ def convert_arabic_to_roman(arabic):
|
||||
return arabic
|
||||
|
||||
to_roman = {
|
||||
1: 'I', 2: 'II', 3: 'III', 4: 'IV', 5: 'V', 6: 'VI', 7: 'VII',
|
||||
8: 'VIII', 9: 'IX', 10: 'X', 20: 'XX', 30: 'XXX'
|
||||
1: "I",
|
||||
2: "II",
|
||||
3: "III",
|
||||
4: "IV",
|
||||
5: "V",
|
||||
6: "VI",
|
||||
7: "VII",
|
||||
8: "VIII",
|
||||
9: "IX",
|
||||
10: "X",
|
||||
20: "XX",
|
||||
30: "XXX",
|
||||
}
|
||||
roman_chars_list = []
|
||||
count = 1
|
||||
@ -71,7 +79,7 @@ def convert_arabic_to_roman(arabic):
|
||||
if digit != 0:
|
||||
roman_chars_list.append(to_roman[digit * count])
|
||||
count *= 10
|
||||
return ''.join(roman_chars_list[::-1])
|
||||
return "".join(roman_chars_list[::-1])
|
||||
|
||||
|
||||
def convert_arabic_to_roman_in_text(text):
|
||||
@ -82,11 +90,7 @@ def convert_arabic_to_roman_in_text(text):
|
||||
:returns: The corresponding text with roman literals converted to
|
||||
arabic.
|
||||
"""
|
||||
return re.sub(
|
||||
r'(\d+)',
|
||||
lambda matchobj: convert_arabic_to_roman(matchobj.group(0)),
|
||||
text
|
||||
)
|
||||
return re.sub(r"(\d+)", lambda matchobj: convert_arabic_to_roman(matchobj.group(0)), text)
|
||||
|
||||
|
||||
def hash_dict(func):
|
||||
@ -96,11 +100,13 @@ def hash_dict(func):
|
||||
|
||||
From https://stackoverflow.com/a/44776960.
|
||||
"""
|
||||
|
||||
class HDict(dict):
|
||||
"""
|
||||
Transform mutable dictionnary into immutable. Useful to be compatible
|
||||
with lru_cache
|
||||
"""
|
||||
|
||||
def __hash__(self):
|
||||
return hash(json.dumps(self))
|
||||
|
||||
@ -108,17 +114,10 @@ def hash_dict(func):
|
||||
"""
|
||||
The wrapped function
|
||||
"""
|
||||
args = tuple(
|
||||
[
|
||||
HDict(arg) if isinstance(arg, dict) else arg
|
||||
for arg in args
|
||||
]
|
||||
)
|
||||
kwargs = {
|
||||
k: HDict(v) if isinstance(v, dict) else v
|
||||
for k, v in kwargs.items()
|
||||
}
|
||||
args = tuple([HDict(arg) if isinstance(arg, dict) else arg for arg in args])
|
||||
kwargs = {k: HDict(v) if isinstance(v, dict) else v for k, v in kwargs.items()}
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
@ -126,6 +125,7 @@ class DateAwareJSONEncoder(json.JSONEncoder):
|
||||
"""
|
||||
Extend the default JSON encoder to serialize datetimes to iso strings.
|
||||
"""
|
||||
|
||||
def default(self, o): # pylint: disable=locally-disabled,E0202
|
||||
if isinstance(o, (datetime.date, datetime.datetime)):
|
||||
return o.isoformat()
|
||||
@ -153,9 +153,7 @@ def pretty_json(data):
|
||||
"toto": "ok"
|
||||
}
|
||||
"""
|
||||
return json.dumps(data, cls=DateAwareJSONEncoder,
|
||||
indent=4, separators=(',', ': '),
|
||||
sort_keys=True)
|
||||
return json.dumps(data, cls=DateAwareJSONEncoder, indent=4, separators=(",", ": "), sort_keys=True)
|
||||
|
||||
|
||||
def batch(iterable, size):
|
||||
@ -294,10 +292,7 @@ def distance(gps1, gps2):
|
||||
long2 = math.radians(gps2[1])
|
||||
|
||||
# pylint: disable=locally-disabled,invalid-name
|
||||
a = (
|
||||
math.sin((lat2 - lat1) / 2.0)**2 +
|
||||
math.cos(lat1) * math.cos(lat2) * math.sin((long2 - long1) / 2.0)**2
|
||||
)
|
||||
a = math.sin((lat2 - lat1) / 2.0) ** 2 + math.cos(lat1) * math.cos(lat2) * math.sin((long2 - long1) / 2.0) ** 2
|
||||
c = 2.0 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
|
||||
earth_radius = 6371000
|
||||
|
||||
@ -385,13 +380,14 @@ def get_travel_time_between(latlng_from, latlng_to, mode, config):
|
||||
"from": "%s;%s" % (latlng_from[1], latlng_from[0]),
|
||||
"to": "%s;%s" % (latlng_to[1], latlng_to[0]),
|
||||
"datetime": date_from.isoformat(),
|
||||
"count": 1
|
||||
"count": 1,
|
||||
}
|
||||
try:
|
||||
# Do the query to Navitia API
|
||||
req = requests.get(
|
||||
NAVITIA_ENDPOINT, params=payload,
|
||||
auth=(config["navitia_api_key"], "")
|
||||
NAVITIA_ENDPOINT,
|
||||
params=payload,
|
||||
auth=(config["navitia_api_key"], ""),
|
||||
)
|
||||
req.raise_for_status()
|
||||
|
||||
@ -400,28 +396,28 @@ def get_travel_time_between(latlng_from, latlng_to, mode, config):
|
||||
for section in journeys["sections"]:
|
||||
if section["type"] == "public_transport":
|
||||
# Public transport
|
||||
sections.append({
|
||||
"geojson": section["geojson"],
|
||||
"color": (
|
||||
section["display_informations"].get("color", None)
|
||||
)
|
||||
})
|
||||
sections.append(
|
||||
{
|
||||
"geojson": section["geojson"],
|
||||
"color": (section["display_informations"].get("color", None)),
|
||||
}
|
||||
)
|
||||
elif section["type"] == "street_network":
|
||||
# Walking
|
||||
sections.append({
|
||||
"geojson": section["geojson"],
|
||||
"color": None
|
||||
})
|
||||
sections.append({"geojson": section["geojson"], "color": None})
|
||||
else:
|
||||
# Skip anything else
|
||||
continue
|
||||
except (requests.exceptions.RequestException,
|
||||
ValueError, IndexError, KeyError) as exc:
|
||||
except (
|
||||
requests.exceptions.RequestException,
|
||||
ValueError,
|
||||
IndexError,
|
||||
KeyError,
|
||||
) as exc:
|
||||
# Ignore any possible exception
|
||||
LOGGER.warning(
|
||||
"An exception occurred during travel time lookup on "
|
||||
"Navitia: %s.",
|
||||
str(exc)
|
||||
"An exception occurred during travel time lookup on Navitia: %s.",
|
||||
str(exc),
|
||||
)
|
||||
else:
|
||||
LOGGER.warning(
|
||||
@ -430,50 +426,43 @@ def get_travel_time_between(latlng_from, latlng_to, mode, config):
|
||||
)
|
||||
elif mode in [TimeToModes.WALK, TimeToModes.BIKE, TimeToModes.CAR]:
|
||||
MAPBOX_MODES = {
|
||||
TimeToModes.WALK: 'mapbox/walking',
|
||||
TimeToModes.BIKE: 'mapbox/cycling',
|
||||
TimeToModes.CAR: 'mapbox/driving'
|
||||
TimeToModes.WALK: "mapbox/walking",
|
||||
TimeToModes.BIKE: "mapbox/cycling",
|
||||
TimeToModes.CAR: "mapbox/driving",
|
||||
}
|
||||
# Check that Mapbox API key is available
|
||||
if config["mapbox_api_key"]:
|
||||
try:
|
||||
service = mapbox.Directions(
|
||||
access_token=config['mapbox_api_key']
|
||||
)
|
||||
service = mapbox.Directions(access_token=config["mapbox_api_key"])
|
||||
origin = {
|
||||
'type': 'Feature',
|
||||
'properties': {'name': 'Start'},
|
||||
'geometry': {
|
||||
'type': 'Point',
|
||||
'coordinates': [latlng_from[1], latlng_from[0]]}}
|
||||
"type": "Feature",
|
||||
"properties": {"name": "Start"},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [latlng_from[1], latlng_from[0]],
|
||||
},
|
||||
}
|
||||
destination = {
|
||||
'type': 'Feature',
|
||||
'properties': {'name': 'End'},
|
||||
'geometry': {
|
||||
'type': 'Point',
|
||||
'coordinates': [latlng_to[1], latlng_to[0]]}}
|
||||
response = service.directions(
|
||||
[origin, destination], MAPBOX_MODES[mode]
|
||||
)
|
||||
"type": "Feature",
|
||||
"properties": {"name": "End"},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [latlng_to[1], latlng_to[0]],
|
||||
},
|
||||
}
|
||||
response = service.directions([origin, destination], MAPBOX_MODES[mode])
|
||||
response.raise_for_status()
|
||||
route = response.geojson()['features'][0]
|
||||
route = response.geojson()["features"][0]
|
||||
# Fix longitude/latitude inversion in geojson output
|
||||
geometry = route['geometry']
|
||||
geometry['coordinates'] = [
|
||||
(x[1], x[0]) for x in geometry['coordinates']
|
||||
]
|
||||
sections = [{
|
||||
"geojson": geometry,
|
||||
"color": "000"
|
||||
}]
|
||||
travel_time = route['properties']['duration']
|
||||
except (requests.exceptions.RequestException,
|
||||
IndexError, KeyError) as exc:
|
||||
geometry = route["geometry"]
|
||||
geometry["coordinates"] = [(x[1], x[0]) for x in geometry["coordinates"]]
|
||||
sections = [{"geojson": geometry, "color": "000"}]
|
||||
travel_time = route["properties"]["duration"]
|
||||
except (requests.exceptions.RequestException, IndexError, KeyError) as exc:
|
||||
# Ignore any possible exception
|
||||
LOGGER.warning(
|
||||
"An exception occurred during travel time lookup on "
|
||||
"Mapbox: %s.",
|
||||
str(exc)
|
||||
"An exception occurred during travel time lookup on Mapbox: %s.",
|
||||
str(exc),
|
||||
)
|
||||
else:
|
||||
LOGGER.warning(
|
||||
@ -482,10 +471,7 @@ def get_travel_time_between(latlng_from, latlng_to, mode, config):
|
||||
)
|
||||
|
||||
if travel_time:
|
||||
return {
|
||||
"time": travel_time,
|
||||
"sections": sections
|
||||
}
|
||||
return {"time": travel_time, "sections": sections}
|
||||
return None
|
||||
|
||||
|
||||
@ -493,6 +479,7 @@ def timeit(func):
|
||||
"""
|
||||
A decorator that logs how much time was spent in the function.
|
||||
"""
|
||||
|
||||
def wrapped(*args, **kwargs):
|
||||
"""
|
||||
The wrapped function
|
||||
@ -502,4 +489,5 @@ def timeit(func):
|
||||
runtime = time.time() - before
|
||||
LOGGER.info("%s -- Execution took %s seconds.", func.__name__, runtime)
|
||||
return res
|
||||
|
||||
return wrapped
|
||||
|
@ -2,9 +2,7 @@
|
||||
"""
|
||||
This module contains the definition of the Bottle web app.
|
||||
"""
|
||||
from __future__ import (
|
||||
absolute_import, division, print_function, unicode_literals
|
||||
)
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import functools
|
||||
import json
|
||||
@ -25,14 +23,12 @@ class QuietWSGIRefServer(bottle.WSGIRefServer):
|
||||
Quiet implementation of Bottle built-in WSGIRefServer, as `Canister` is
|
||||
handling the logging through standard Python logging.
|
||||
"""
|
||||
|
||||
# pylint: disable=locally-disabled,too-few-public-methods
|
||||
quiet = True
|
||||
|
||||
def run(self, app):
|
||||
app.log.info(
|
||||
'Server is now up and ready! Listening on %s:%s.' %
|
||||
(self.host, self.port)
|
||||
)
|
||||
app.log.info("Server is now up and ready! Listening on %s:%s." % (self.host, self.port))
|
||||
super(QuietWSGIRefServer, self).run(app)
|
||||
|
||||
|
||||
@ -42,12 +38,10 @@ def _serve_static_file(filename):
|
||||
"""
|
||||
return bottle.static_file(
|
||||
filename,
|
||||
root=os.path.join(
|
||||
os.path.dirname(os.path.realpath(__file__)),
|
||||
"static"
|
||||
)
|
||||
root=os.path.join(os.path.dirname(os.path.realpath(__file__)), "static"),
|
||||
)
|
||||
|
||||
|
||||
def get_app(config):
|
||||
"""
|
||||
Get a Bottle app instance with all the routes set-up.
|
||||
@ -65,77 +59,69 @@ def get_app(config):
|
||||
app.install(canister.Canister())
|
||||
# Use DateAwareJSONEncoder to dump JSON strings
|
||||
# From http://stackoverflow.com/questions/21282040/bottle-framework-how-to-return-datetime-in-json-response#comment55718456_21282666. pylint: disable=locally-disabled,line-too-long
|
||||
app.install(
|
||||
bottle.JSONPlugin(
|
||||
json_dumps=functools.partial(json.dumps, cls=DateAwareJSONEncoder)
|
||||
)
|
||||
)
|
||||
app.install(bottle.JSONPlugin(json_dumps=functools.partial(json.dumps, cls=DateAwareJSONEncoder)))
|
||||
|
||||
# Enable CORS
|
||||
@app.hook('after_request')
|
||||
@app.hook("after_request")
|
||||
def enable_cors():
|
||||
"""
|
||||
Add CORS headers at each request.
|
||||
"""
|
||||
# The str() call is required as we import unicode_literal and WSGI
|
||||
# headers list should have plain str type.
|
||||
bottle.response.headers[str('Access-Control-Allow-Origin')] = str('*')
|
||||
bottle.response.headers[str('Access-Control-Allow-Methods')] = str(
|
||||
'PUT, GET, POST, DELETE, OPTIONS, PATCH'
|
||||
)
|
||||
bottle.response.headers[str('Access-Control-Allow-Headers')] = str(
|
||||
'Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token'
|
||||
bottle.response.headers[str("Access-Control-Allow-Origin")] = str("*")
|
||||
bottle.response.headers[str("Access-Control-Allow-Methods")] = str("PUT, GET, POST, DELETE, OPTIONS, PATCH")
|
||||
bottle.response.headers[str("Access-Control-Allow-Headers")] = str(
|
||||
"Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token"
|
||||
)
|
||||
|
||||
# API v1 routes
|
||||
app.route("/api/v1", ["GET", "OPTIONS"], api_routes.index_v1)
|
||||
|
||||
app.route("/api/v1/time_to_places", ["GET", "OPTIONS"],
|
||||
api_routes.time_to_places_v1)
|
||||
app.route("/api/v1/time_to_places", ["GET", "OPTIONS"], api_routes.time_to_places_v1)
|
||||
|
||||
app.route("/api/v1/flats", ["GET", "OPTIONS"], api_routes.flats_v1)
|
||||
app.route("/api/v1/flats/:flat_id", ["GET", "OPTIONS"], api_routes.flat_v1)
|
||||
app.route("/api/v1/flats/:flat_id", ["PATCH", "OPTIONS"],
|
||||
api_routes.update_flat_v1)
|
||||
app.route("/api/v1/flats/:flat_id", ["PATCH", "OPTIONS"], api_routes.update_flat_v1)
|
||||
|
||||
app.route("/api/v1/ics/visits.ics", ["GET", "OPTIONS"],
|
||||
api_routes.ics_feed_v1)
|
||||
app.route("/api/v1/ics/visits.ics", ["GET", "OPTIONS"], api_routes.ics_feed_v1)
|
||||
|
||||
app.route("/api/v1/search", ["POST", "OPTIONS"], api_routes.search_v1)
|
||||
|
||||
app.route("/api/v1/opendata", ["GET", "OPTIONS"], api_routes.opendata_index_v1)
|
||||
app.route("/api/v1/opendata/postal_codes", ["GET", "OPTIONS"],
|
||||
api_routes.opendata_postal_codes_v1)
|
||||
app.route(
|
||||
"/api/v1/opendata/postal_codes",
|
||||
["GET", "OPTIONS"],
|
||||
api_routes.opendata_postal_codes_v1,
|
||||
)
|
||||
|
||||
app.route("/api/v1/metadata", ["GET", "OPTIONS"], api_routes.metadata_v1)
|
||||
app.route("/api/v1/import", ["GET", "OPTIONS"], api_routes.import_v1)
|
||||
|
||||
# Index
|
||||
app.route("/", "GET", lambda: _serve_static_file("index.html"))
|
||||
|
||||
# Static files
|
||||
app.route("/favicon.ico", "GET",
|
||||
lambda: _serve_static_file("favicon.ico"))
|
||||
app.route("/favicon.ico", "GET", lambda: _serve_static_file("favicon.ico"))
|
||||
app.route(
|
||||
"/assets/<filename:path>", "GET",
|
||||
lambda filename: _serve_static_file("/assets/{}".format(filename))
|
||||
"/assets/<filename:path>",
|
||||
"GET",
|
||||
lambda filename: _serve_static_file("/assets/{}".format(filename)),
|
||||
)
|
||||
app.route(
|
||||
"/img/<filename:path>", "GET",
|
||||
lambda filename: _serve_static_file("/img/{}".format(filename))
|
||||
"/img/<filename:path>",
|
||||
"GET",
|
||||
lambda filename: _serve_static_file("/img/{}".format(filename)),
|
||||
)
|
||||
app.route(
|
||||
"/.well-known/<filename:path>", "GET",
|
||||
lambda filename: _serve_static_file("/.well-known/{}".format(filename))
|
||||
"/.well-known/<filename:path>",
|
||||
"GET",
|
||||
lambda filename: _serve_static_file("/.well-known/{}".format(filename)),
|
||||
)
|
||||
app.route(
|
||||
"/data/img/<filename:path>", "GET",
|
||||
lambda filename: bottle.static_file(
|
||||
filename,
|
||||
root=os.path.join(
|
||||
config["data_directory"],
|
||||
"images"
|
||||
)
|
||||
)
|
||||
"/data/img/<filename:path>",
|
||||
"GET",
|
||||
lambda filename: bottle.static_file(filename, root=os.path.join(config["data_directory"], "images")),
|
||||
)
|
||||
|
||||
return app
|
||||
|
@ -7,9 +7,7 @@ This module is heavily based on code from
|
||||
[Bottle-SQLAlchemy](https://github.com/iurisilvio/bottle-sqlalchemy) which is
|
||||
licensed under MIT license.
|
||||
"""
|
||||
from __future__ import (
|
||||
absolute_import, division, print_function, unicode_literals
|
||||
)
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
@ -22,7 +20,8 @@ class ConfigPlugin(object):
|
||||
A Bottle plugin to automatically pass the config object to the routes
|
||||
specifying they need it.
|
||||
"""
|
||||
name = 'config'
|
||||
|
||||
name = "config"
|
||||
api = 2
|
||||
KEYWORD = "config"
|
||||
|
||||
@ -41,9 +40,7 @@ class ConfigPlugin(object):
|
||||
if not isinstance(other, ConfigPlugin):
|
||||
continue
|
||||
else:
|
||||
raise bottle.PluginError(
|
||||
"Found another conflicting Config plugin."
|
||||
)
|
||||
raise bottle.PluginError("Found another conflicting Config plugin.")
|
||||
|
||||
def apply(self, callback, route):
|
||||
"""
|
||||
|
@ -7,9 +7,7 @@ This module is heavily based on code from
|
||||
[Bottle-SQLAlchemy](https://github.com/iurisilvio/bottle-sqlalchemy) which is
|
||||
licensed under MIT license.
|
||||
"""
|
||||
from __future__ import (
|
||||
absolute_import, division, print_function, unicode_literals
|
||||
)
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import inspect
|
||||
|
||||
@ -21,7 +19,8 @@ class DatabasePlugin(object):
|
||||
A Bottle plugin to automatically pass an SQLAlchemy database session object
|
||||
to the routes specifying they need it.
|
||||
"""
|
||||
name = 'database'
|
||||
|
||||
name = "database"
|
||||
api = 2
|
||||
KEYWORD = "db"
|
||||
|
||||
@ -41,9 +40,7 @@ class DatabasePlugin(object):
|
||||
if not isinstance(other, DatabasePlugin):
|
||||
continue
|
||||
else:
|
||||
raise bottle.PluginError(
|
||||
"Found another conflicting Database plugin."
|
||||
)
|
||||
raise bottle.PluginError("Found another conflicting Database plugin.")
|
||||
|
||||
def apply(self, callback, route):
|
||||
"""
|
||||
@ -64,6 +61,7 @@ class DatabasePlugin(object):
|
||||
if self.KEYWORD not in callback_args:
|
||||
# If no need for a db session, call the route callback
|
||||
return callback
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
"""
|
||||
Wrap the callback in a call to get_session.
|
||||
@ -72,6 +70,7 @@ class DatabasePlugin(object):
|
||||
# Get a db session and pass it to the callback
|
||||
kwargs[self.KEYWORD] = session
|
||||
return callback(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
|
@ -16,7 +16,7 @@
|
||||
<style>
|
||||
body {
|
||||
margin: 0 auto;
|
||||
max-width: 75em;
|
||||
/* max-width: 75em; */
|
||||
font-family: "Helvetica", "Arial", sans-serif;
|
||||
line-height: 1.5;
|
||||
padding: 4em 1em;
|
||||
|
398
flatisfy/web/js_src/components/flat.vue
Normal file
398
flatisfy/web/js_src/components/flat.vue
Normal file
@ -0,0 +1,398 @@
|
||||
<template>
|
||||
<div>
|
||||
<template v-if="isLoading">
|
||||
<p>{{ $t("common.loading") }}</p>
|
||||
</template>
|
||||
<div class="grid" v-else-if="flat && timeToPlaces">
|
||||
<div class="left-panel">
|
||||
<h2>
|
||||
(<!--
|
||||
--><router-link :to="{ name: 'status', params: { status: flat.status }}"><!--
|
||||
-->{{ flat.status ? capitalize($t("status." + flat.status)) : '' }}<!--
|
||||
--></router-link><!--
|
||||
-->) {{ flat.title }} [{{ flat.id.split("@")[1] }}]
|
||||
<span class="expired">{{ flat.is_expired ? '[' + $t('common.expired') + ']' : '' }}</span>
|
||||
</h2>
|
||||
<div class="grid">
|
||||
<div class="left-panel">
|
||||
<span>
|
||||
{{ flat.cost | cost(flat.currency) }}
|
||||
<template v-if="flat.utilities === 'included'">
|
||||
{{ $t("flatsDetails.utilities_included") }}
|
||||
</template>
|
||||
<template v-else-if="flat.utilities === 'excluded'">
|
||||
{{ $t("flatsDetails.utilities_excluded") }}
|
||||
</template>
|
||||
</span>
|
||||
<span v-if="flat.flatisfy_postal_code.postal_code">
|
||||
à {{ flat.flatisfy_postal_code.name }} ({{ flat.flatisfy_postal_code.postal_code }})
|
||||
</span>
|
||||
</div>
|
||||
<span class="right-panel right">
|
||||
<template v-if="flat.area"><span>{{flat.area}} m<sup>2</sup></span></template><template v-if="flat.rooms"><span>, {{flat.rooms}} {{ $tc("flatsDetails.rooms", flat.rooms) }}</span></template><template v-if="flat.bedrooms"><span>/ {{flat.bedrooms}} {{ $tc("flatsDetails.bedrooms", flat.bedrooms) }}</span></template>
|
||||
</span>
|
||||
</div>
|
||||
<div>
|
||||
<template v-if="flat.photos && flat.photos.length > 0">
|
||||
<Slider :photos="flat.photos"></Slider>
|
||||
</template>
|
||||
</div>
|
||||
<div>
|
||||
<h3>{{ $t("flatsDetails.Description") }}</h3>
|
||||
<p>{{ flat.text }}</p>
|
||||
<p class="right">{{ flat.location }}</p>
|
||||
<p>{{ $t("flatsDetails.First_posted") }} {{ flat.date ? flat.date.fromNow() : '?' }}.</p>
|
||||
</div>
|
||||
<div>
|
||||
<h3>{{ $t("flatsDetails.Details") }}</h3>
|
||||
<table>
|
||||
<tr v-for="(value, key) in flat.details">
|
||||
<th>{{ key }}</th>
|
||||
<td>{{ value }}</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
<div>
|
||||
<h3>{{ $t("flatsDetails.Metadata") }}</h3>
|
||||
<table>
|
||||
<tr>
|
||||
<th>
|
||||
{{ $t("flatsDetails.postal_code") }}
|
||||
</th>
|
||||
<td>
|
||||
<template v-if="flat.flatisfy_postal_code.postal_code">
|
||||
{{ flat.flatisfy_postal_code.name }} ({{ flat.flatisfy_postal_code.postal_code }})
|
||||
</template>
|
||||
<template v-else>
|
||||
?
|
||||
</template>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr v-if="displayedStations">
|
||||
<th>
|
||||
{{ $t("flatsDetails.nearby_stations") }}
|
||||
</th>
|
||||
<td>
|
||||
{{ displayedStations }}
|
||||
</td>
|
||||
</tr>
|
||||
<tr v-if="Object.keys(flat.flatisfy_time_to).length">
|
||||
<th>
|
||||
{{ $t("flatsDetails.Times_to") }}
|
||||
</th>
|
||||
<td>
|
||||
<ul class="time_to_list">
|
||||
<li v-for="(time_to, place) in flat.flatisfy_time_to" :key="place">
|
||||
{{ place }}: {{ humanizeTimeTo(time_to["time"]) }}
|
||||
</li>
|
||||
</ul>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>
|
||||
{{ $t("flatsDetails.SquareMeterCost") }}
|
||||
</th>
|
||||
<td>
|
||||
{{ flat.sqCost }} {{ flat.currency }}
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
<div>
|
||||
<h3>{{ $t("flatsDetails.Location") }}</h3>
|
||||
|
||||
<FlatsMap :flats="flatMarker" :places="timeToPlaces" :journeys="journeys"></FlatsMap>
|
||||
</div>
|
||||
<div>
|
||||
<h3>{{ $t("flatsDetails.Notes") }}</h3>
|
||||
|
||||
<form v-on:submit="updateFlatNotes">
|
||||
<textarea ref="notesTextarea" rows="10" :v-model="flat.notes"></textarea>
|
||||
<p class="right"><input type="submit" :value="$t('flatsDetails.Save')"/></p>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="right-panel">
|
||||
<h3>{{ $t("flatsDetails.Contact") }}</h3>
|
||||
<div class="contact">
|
||||
<template v-if="flat.phone">
|
||||
<p v-for="phoneNumber in flat.phone.split(',')">
|
||||
<a :href="'tel:+33' + normalizePhoneNumber(phoneNumber)">{{ phoneNumber }}</a>
|
||||
</p>
|
||||
</template>
|
||||
<template v-if="flat.urls.length == 1">
|
||||
<a :href="flat.urls[0]" target="_blank">
|
||||
{{ $tc("common.Original_post", 1) }}
|
||||
<i class="fa fa-external-link" aria-hidden="true"></i>
|
||||
</a>
|
||||
</template>
|
||||
<template v-else-if="flat.urls.length > 1">
|
||||
<p>{{ $tc("common.Original_post", flat.urls.length) }}
|
||||
<ul>
|
||||
<li v-for="(url, index) in flat.urls">
|
||||
<a :href="url" target="_blank">
|
||||
{{ $tc("common.Original_post", 1) }} {{ index + 1 }}
|
||||
<i class="fa fa-external-link" aria-hidden="true"></i>
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</p>
|
||||
</template>
|
||||
</div>
|
||||
|
||||
<h3>{{ $t("flatsDetails.Visit") }}</h3>
|
||||
<div class="visit">
|
||||
<flat-pickr
|
||||
:value="flatpickrValue"
|
||||
:config="flatpickrConfig"
|
||||
:placeholder="$t('flatsDetails.setDateOfVisit')"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<h3>{{ $t("common.Actions") }}</h3>
|
||||
|
||||
<nav>
|
||||
<ul>
|
||||
<template v-if="flat.status !== 'user_deleted'">
|
||||
<Notation :flat="flat"></Notation>
|
||||
<li>
|
||||
<button v-on:click="updateFlatStatus('user_deleted')" class="fullButton">
|
||||
<i class="fa fa-trash" aria-hidden="true"></i>
|
||||
{{ $t("common.Remove") }}
|
||||
</button>
|
||||
</li>
|
||||
</template>
|
||||
<template v-else>
|
||||
<li>
|
||||
<button v-on:click="updateFlatStatus('new')" class="fullButton">
|
||||
<i class="fa fa-undo" aria-hidden="true"></i>
|
||||
{{ $t("common.Restore") }}
|
||||
</button>
|
||||
</li>
|
||||
</template>
|
||||
</ul>
|
||||
</nav>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import flatPickr from 'vue-flatpickr-component'
|
||||
import moment from 'moment'
|
||||
import 'font-awesome-webpack'
|
||||
import 'flatpickr/dist/flatpickr.css'
|
||||
|
||||
import FlatsMap from '../components/flatsmap.vue'
|
||||
import Slider from '../components/slider.vue'
|
||||
import Notation from '../components/notation.vue'
|
||||
|
||||
import { capitalize } from '../tools'
|
||||
|
||||
export default {
|
||||
components: {
|
||||
FlatsMap,
|
||||
Slider,
|
||||
flatPickr,
|
||||
Notation
|
||||
},
|
||||
|
||||
created () {
|
||||
this.fetchData()
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
// TODO: Flatpickr locale
|
||||
'overloadNotation': null,
|
||||
'flatpickrConfig': {
|
||||
static: true,
|
||||
altFormat: 'h:i K, M j, Y',
|
||||
altInput: true,
|
||||
enableTime: true,
|
||||
onChange: selectedDates => this.updateFlatVisitDate(selectedDates.length > 0 ? selectedDates[0] : null)
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
props: ['flat'],
|
||||
|
||||
computed: {
|
||||
isLoading () {
|
||||
return this.$store.getters.isLoading
|
||||
},
|
||||
flatMarker () {
|
||||
return this.$store.getters.flatsMarkers(this.$router, flat => flat.id === this.flat.id)
|
||||
},
|
||||
'flatpickrValue' () {
|
||||
if (this.flat && this.flat.visit_date) {
|
||||
return this.flat.visit_date.local().format()
|
||||
}
|
||||
return null
|
||||
},
|
||||
timeToPlaces () {
|
||||
return this.$store.getters.timeToPlaces(this.flat.flatisfy_constraint)
|
||||
},
|
||||
notation () {
|
||||
if (this.overloadNotation) {
|
||||
return this.overloadNotation
|
||||
}
|
||||
return this.flat.notation
|
||||
},
|
||||
journeys () {
|
||||
if (Object.keys(this.flat.flatisfy_time_to).length > 0) {
|
||||
const journeys = []
|
||||
for (const place in this.flat.flatisfy_time_to) {
|
||||
this.flat.flatisfy_time_to[place].sections.forEach(
|
||||
section => journeys.push({
|
||||
geojson: section.geojson,
|
||||
options: {
|
||||
color: section.color ? ('#' + section.color) : '#2196f3',
|
||||
dashArray: section.color ? 'none' : '2, 10'
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
return journeys
|
||||
}
|
||||
return []
|
||||
},
|
||||
displayedStations () {
|
||||
if (this.flat.flatisfy_stations.length > 0) {
|
||||
const stationsNames = this.flat.flatisfy_stations.map(station => station.name)
|
||||
return stationsNames.join(', ')
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
watch: {
|
||||
flat: 'fetchData'
|
||||
},
|
||||
|
||||
methods: {
|
||||
fetchData () {
|
||||
this.$store.dispatch('getAllTimeToPlaces')
|
||||
},
|
||||
|
||||
updateFlatStatus (status) {
|
||||
this.$store.dispatch('updateFlatStatus', { flatId: this.flat.id, newStatus: status })
|
||||
},
|
||||
|
||||
updateFlatNotes () {
|
||||
const notes = this.$refs.notesTextarea.value
|
||||
this.$store.dispatch(
|
||||
'updateFlatNotes',
|
||||
{ flatId: this.flat.id, newNotes: notes }
|
||||
)
|
||||
},
|
||||
|
||||
updateFlatVisitDate (date) {
|
||||
if (date) {
|
||||
date = moment(date).utc().format()
|
||||
}
|
||||
this.$store.dispatch(
|
||||
'updateFlatVisitDate',
|
||||
{ flatId: this.flat.id, newVisitDate: date }
|
||||
)
|
||||
},
|
||||
|
||||
humanizeTimeTo (time) {
|
||||
const minutes = Math.floor(time.as('minutes'))
|
||||
return minutes + ' ' + this.$tc('common.mins', minutes)
|
||||
},
|
||||
|
||||
normalizePhoneNumber (phoneNumber) {
|
||||
phoneNumber = phoneNumber.replace(/ /g, '')
|
||||
phoneNumber = phoneNumber.replace(/\./g, '')
|
||||
return phoneNumber
|
||||
},
|
||||
|
||||
capitalize: capitalize
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.expired {
|
||||
font-weight: bold;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
@media screen and (min-width: 768px) {
|
||||
.grid {
|
||||
display: grid;
|
||||
grid-gap: 50px;
|
||||
grid-template-columns: 75fr 25fr;
|
||||
}
|
||||
|
||||
.left-panel {
|
||||
grid-column: 1;
|
||||
grid-row: 1;
|
||||
}
|
||||
|
||||
.right-panel {
|
||||
grid-column: 2;
|
||||
grid-row: 1;
|
||||
}
|
||||
}
|
||||
|
||||
.left-panel textarea {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.right {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
nav ul {
|
||||
list-style-type: none;
|
||||
padding-left: 1em;
|
||||
}
|
||||
|
||||
.contact {
|
||||
padding-left: 1em;
|
||||
}
|
||||
|
||||
.right-panel li {
|
||||
margin-bottom: 1em;
|
||||
margin-top: 1em;
|
||||
}
|
||||
|
||||
button {
|
||||
cursor: pointer;
|
||||
width: 75%;
|
||||
padding: 0.3em;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
table {
|
||||
table-layout: fixed;
|
||||
}
|
||||
|
||||
td {
|
||||
word-wrap: break-word;
|
||||
word-break: break-all;
|
||||
white-space: normal;
|
||||
}
|
||||
|
||||
.time_to_list {
|
||||
margin: 0;
|
||||
padding-left: 0;
|
||||
list-style-position: outside;
|
||||
list-style-type: none;
|
||||
}
|
||||
|
||||
@media screen and (max-width: 767px) {
|
||||
.right-panel nav {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.fullButton {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
</style>
|
@ -1,21 +1,26 @@
|
||||
<template lang="html">
|
||||
<div class="full">
|
||||
<v-map :zoom="zoom.defaultZoom" :center="center" :bounds="bounds" :min-zoom="zoom.minZoom" :max-zoom="zoom.maxZoom">
|
||||
<v-map v-if="bounds" :zoom="zoom.defaultZoom" :bounds="bounds" :min-zoom="zoom.minZoom" :max-zoom="zoom.maxZoom" v-on:click="$emit('select-flat', null)" @update:bounds="bounds = $event">
|
||||
<v-tilelayer :url="tiles.url" :attribution="tiles.attribution"></v-tilelayer>
|
||||
<template v-for="marker in flats">
|
||||
<v-marker :lat-lng="{ lat: marker.gps[0], lng: marker.gps[1] }" :icon="icons.flat">
|
||||
<v-popup :content="marker.content"></v-popup>
|
||||
</v-marker>
|
||||
</template>
|
||||
<template v-for="(place_gps, place_name) in places">
|
||||
<v-marker :lat-lng="{ lat: place_gps[0], lng: place_gps[1] }" :icon="icons.place">
|
||||
<v-tooltip :content="place_name"></v-tooltip>
|
||||
</v-marker>
|
||||
</template>
|
||||
<v-marker-cluster>
|
||||
<template v-for="marker in flats">
|
||||
<v-marker :lat-lng="{ lat: marker.gps[0], lng: marker.gps[1] }" :icon="icons.flat" v-on:click="$emit('select-flat', marker.flatId)">
|
||||
<!-- <v-popup :content="marker.content"></v-popup> -->
|
||||
</v-marker>
|
||||
</template>
|
||||
</v-marker-cluster>
|
||||
<v-marker-cluster>
|
||||
<template v-for="(place_gps, place_name) in places">
|
||||
<v-marker :lat-lng="{ lat: place_gps[0], lng: place_gps[1] }" :icon="icons.place">
|
||||
<v-tooltip :content="place_name"></v-tooltip>
|
||||
</v-marker>
|
||||
</template>
|
||||
</v-marker-cluster>
|
||||
<template v-for="journey in journeys">
|
||||
<v-geojson-layer :geojson="journey.geojson" :options="Object.assign({}, defaultGeoJSONOptions, journey.options)"></v-geojson-layer>
|
||||
</template>
|
||||
</v-map>
|
||||
<div v-else>Nothing to display yet</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
@ -31,10 +36,13 @@ L.Icon.Default.mergeOptions({
|
||||
})
|
||||
|
||||
import 'leaflet/dist/leaflet.css'
|
||||
import 'leaflet.markercluster/dist/MarkerCluster.css'
|
||||
import 'leaflet.markercluster/dist/MarkerCluster.Default.css'
|
||||
|
||||
require('leaflet.icon.glyph')
|
||||
|
||||
import Vue2Leaflet from 'vue2-leaflet'
|
||||
import { LMap, LTileLayer, LMarker, LTooltip, LPopup, LGeoJson } from 'vue2-leaflet'
|
||||
import Vue2LeafletMarkerCluster from 'vue2-leaflet-markercluster'
|
||||
|
||||
export default {
|
||||
data () {
|
||||
@ -46,11 +54,11 @@ export default {
|
||||
fillColor: '#e4ce7f',
|
||||
fillOpacity: 1
|
||||
},
|
||||
center: null,
|
||||
bounds: [[40.91351257612758, -7.580566406250001], [51.65892664880053, 12.0849609375]],
|
||||
zoom: {
|
||||
defaultZoom: 13,
|
||||
defaultZoom: 6,
|
||||
minZoom: 5,
|
||||
maxZoom: 17
|
||||
maxZoom: 20
|
||||
},
|
||||
tiles: {
|
||||
url: 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
|
||||
@ -67,25 +75,27 @@ export default {
|
||||
},
|
||||
|
||||
components: {
|
||||
'v-map': Vue2Leaflet.Map,
|
||||
'v-tilelayer': Vue2Leaflet.TileLayer,
|
||||
'v-marker': Vue2Leaflet.Marker,
|
||||
'v-tooltip': Vue2Leaflet.Tooltip,
|
||||
'v-popup': Vue2Leaflet.Popup,
|
||||
'v-geojson-layer': Vue2Leaflet.GeoJSON
|
||||
'v-map': LMap,
|
||||
'v-tilelayer': LTileLayer,
|
||||
'v-marker': LMarker,
|
||||
'v-marker-cluster': Vue2LeafletMarkerCluster,
|
||||
'v-tooltip': LTooltip,
|
||||
'v-popup': LPopup,
|
||||
'v-geojson-layer': LGeoJson
|
||||
},
|
||||
|
||||
computed: {
|
||||
bounds () {
|
||||
let bounds = []
|
||||
this.flats.forEach(flat => bounds.push(flat.gps))
|
||||
Object.keys(this.places).forEach(place => bounds.push(this.places[place]))
|
||||
watch: {
|
||||
flats: 'computeBounds',
|
||||
places: 'computeBounds'
|
||||
},
|
||||
|
||||
if (bounds.length > 0) {
|
||||
bounds = L.latLngBounds(bounds)
|
||||
return bounds
|
||||
} else {
|
||||
return null
|
||||
methods: {
|
||||
computeBounds (newData, oldData) {
|
||||
if (this.flats.length && JSON.stringify(newData) !== JSON.stringify(oldData)) {
|
||||
const allBounds = []
|
||||
this.flats.forEach(flat => allBounds.push(flat.gps))
|
||||
Object.keys(this.places).forEach(place => allBounds.push(this.places[place]))
|
||||
this.bounds = allBounds.length ? L.latLngBounds(allBounds) : undefined
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -1,16 +1,11 @@
|
||||
<template>
|
||||
<tr>
|
||||
<td v-if="showNotationColumn">
|
||||
<template v-for="n in notationRange">
|
||||
<i class="fa fa-star" aria-hidden="true" :title="capitalizedStatus"></i>
|
||||
</template>
|
||||
<Notation :flat="flat" :title="capitalizedStatus"></Notation>
|
||||
</td>
|
||||
<td class="no-padding">
|
||||
<Notation v-if="!showNotationColumn" :flat="flat" :title="capitalizedStatus"></Notation>
|
||||
<router-link class="fill" :to="{name: 'details', params: {id: flat.id}}">
|
||||
<template v-if="!showNotationColumn" v-for="n in notationRange">
|
||||
<i class="fa fa-star" aria-hidden="true" :title="capitalizedStatus"></i>
|
||||
</template>
|
||||
|
||||
[{{ flat.id.split("@")[1] }}]
|
||||
<span class="expired">{{ flat.is_expired ? "[" + $t("common.expired") + "]" : null }}</span>
|
||||
{{ flat.title }}
|
||||
@ -31,7 +26,7 @@
|
||||
{{ flat.rooms ? flat.rooms : '?'}}
|
||||
</td>
|
||||
<td>
|
||||
{{ flat.cost }} {{ flat.currency }}
|
||||
{{ flat.cost | cost(flat.currency) }}
|
||||
<template v-if="flat.utilities == 'included'">
|
||||
{{ $t("flatsDetails.utilities_included") }}
|
||||
</template>
|
||||
@ -60,7 +55,8 @@
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { capitalize, range } from '../tools'
|
||||
import { capitalize } from '../tools'
|
||||
import Notation from '../components/notation.vue'
|
||||
|
||||
export default {
|
||||
props: {
|
||||
@ -69,6 +65,10 @@ export default {
|
||||
showNotes: Boolean
|
||||
},
|
||||
|
||||
components: {
|
||||
Notation
|
||||
},
|
||||
|
||||
computed: {
|
||||
capitalizedStatus () {
|
||||
return capitalize(this.$t('status.followed'))
|
||||
@ -81,9 +81,6 @@ export default {
|
||||
return this.flat.photos[0].url
|
||||
}
|
||||
return null
|
||||
},
|
||||
notationRange () {
|
||||
return range(this.flat.notation)
|
||||
}
|
||||
},
|
||||
|
||||
|
68
flatisfy/web/js_src/components/notation.vue
Normal file
68
flatisfy/web/js_src/components/notation.vue
Normal file
@ -0,0 +1,68 @@
|
||||
<template>
|
||||
<div>
|
||||
<template v-for="n in range(5)">
|
||||
<button v-bind:key="n" v-on:mouseover="handleHover(n)" v-on:mouseout="handleOut()" v-on:click="updateNotation(n)">
|
||||
<i class="fa" v-bind:class="{'fa-star': n < notation, 'fa-star-o': n >= notation}" aria-hidden="true"></i>
|
||||
</button>
|
||||
</template>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
|
||||
import { range } from '../tools'
|
||||
import 'flatpickr/dist/flatpickr.css'
|
||||
|
||||
export default {
|
||||
data () {
|
||||
return {
|
||||
'overloadNotation': null
|
||||
}
|
||||
},
|
||||
|
||||
props: ['flat'],
|
||||
|
||||
computed: {
|
||||
notation () {
|
||||
if (this.overloadNotation) {
|
||||
return this.overloadNotation
|
||||
}
|
||||
return this.flat.notation
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
updateNotation (notation) {
|
||||
notation = notation + 1
|
||||
|
||||
if (notation === this.flat.notation) {
|
||||
this.flat.notation = 0
|
||||
this.$store.dispatch('updateFlatNotation', { flatId: this.flat.id, newNotation: 0 })
|
||||
this.$store.dispatch('updateFlatStatus', { flatId: this.flat.id, newStatus: 'new' })
|
||||
} else {
|
||||
this.flat.notation = notation
|
||||
this.$store.dispatch('updateFlatNotation', { flatId: this.flat.id, newNotation: notation })
|
||||
this.$store.dispatch('updateFlatStatus', { flatId: this.flat.id, newStatus: 'followed' })
|
||||
}
|
||||
},
|
||||
|
||||
handleHover (n) {
|
||||
this.overloadNotation = n + 1
|
||||
},
|
||||
|
||||
handleOut () {
|
||||
this.overloadNotation = null
|
||||
},
|
||||
|
||||
range: range
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
button {
|
||||
border: none;
|
||||
width: auto;
|
||||
background-color: transparent;
|
||||
}
|
||||
</style>
|
@ -3,9 +3,12 @@ import Vue from 'vue'
|
||||
import i18n from './i18n'
|
||||
import router from './router'
|
||||
import store from './store'
|
||||
import { costFilter } from './tools'
|
||||
|
||||
import App from './components/app.vue'
|
||||
|
||||
Vue.filter('cost', costFilter)
|
||||
|
||||
new Vue({
|
||||
i18n,
|
||||
router,
|
||||
|
@ -1,73 +1,88 @@
|
||||
import { findFlatGPS } from '../tools'
|
||||
import { findFlatGPS, costFilter } from '../tools'
|
||||
|
||||
export default {
|
||||
allFlats: state => state.flats,
|
||||
allFlats: (state) => state.flats,
|
||||
|
||||
flat: (state, getters) => id => state.flats.find(flat => flat.id === id),
|
||||
flat: (state, getters) => (id) =>
|
||||
state.flats.find((flat) => flat.id === id),
|
||||
|
||||
isLoading: state => state.loading > 0,
|
||||
isLoading: (state) => state.loading > 0,
|
||||
|
||||
postalCodesFlatsBuckets: (state, getters) => filter => {
|
||||
const postalCodeBuckets = {}
|
||||
inseeCodesFlatsBuckets: (state, getters) => (filter) => {
|
||||
const buckets = {};
|
||||
|
||||
state.flats.forEach(flat => {
|
||||
state.flats.forEach((flat) => {
|
||||
if (!filter || filter(flat)) {
|
||||
const postalCode = flat.flatisfy_postal_code.postal_code
|
||||
if (!postalCodeBuckets[postalCode]) {
|
||||
postalCodeBuckets[postalCode] = {
|
||||
'name': flat.flatisfy_postal_code.name,
|
||||
'flats': []
|
||||
}
|
||||
const insee = flat.flatisfy_postal_code.insee_code;
|
||||
if (!buckets[insee]) {
|
||||
buckets[insee] = {
|
||||
name: flat.flatisfy_postal_code.name,
|
||||
flats: [],
|
||||
};
|
||||
}
|
||||
postalCodeBuckets[postalCode].flats.push(flat)
|
||||
buckets[insee].flats.push(flat);
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
return postalCodeBuckets
|
||||
return buckets;
|
||||
},
|
||||
|
||||
flatsMarkers: (state, getters) => (router, filter) => {
|
||||
const markers = []
|
||||
state.flats.forEach(flat => {
|
||||
const markers = [];
|
||||
state.flats.forEach((flat) => {
|
||||
if (filter && filter(flat)) {
|
||||
const gps = findFlatGPS(flat)
|
||||
const gps = findFlatGPS(flat);
|
||||
|
||||
if (gps) {
|
||||
const previousMarkerIndex = markers.findIndex(
|
||||
marker => marker.gps[0] === gps[0] && marker.gps[1] === gps[1]
|
||||
)
|
||||
|
||||
const href = router.resolve({ name: 'details', params: { id: flat.id }}).href
|
||||
if (previousMarkerIndex !== -1) {
|
||||
markers[previousMarkerIndex].content += '<br/><a href="' + href + '">' + flat.title + '</a>'
|
||||
} else {
|
||||
markers.push({
|
||||
'title': '',
|
||||
'content': '<a href="' + href + '">' + flat.title + '</a>',
|
||||
'gps': gps
|
||||
})
|
||||
const previousMarker = markers.find(
|
||||
(marker) =>
|
||||
marker.gps[0] === gps[0] && marker.gps[1] === gps[1]
|
||||
);
|
||||
if (previousMarker) {
|
||||
// randomize position a bit
|
||||
// gps[0] += (Math.random() - 0.5) / 500
|
||||
// gps[1] += (Math.random() - 0.5) / 500
|
||||
}
|
||||
const href = router.resolve({
|
||||
name: "details",
|
||||
params: { id: flat.id },
|
||||
}).href;
|
||||
const cost = flat.cost
|
||||
? costFilter(flat.cost, flat.currency)
|
||||
: "";
|
||||
markers.push({
|
||||
title: "",
|
||||
content:
|
||||
'<a href="' +
|
||||
href +
|
||||
'">' +
|
||||
flat.title +
|
||||
"</a>" +
|
||||
cost,
|
||||
gps: gps,
|
||||
flatId: flat.id,
|
||||
});
|
||||
}
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
return markers
|
||||
return markers;
|
||||
},
|
||||
|
||||
allTimeToPlaces: state => {
|
||||
const places = {}
|
||||
Object.keys(state.timeToPlaces).forEach(constraint => {
|
||||
const constraintTimeToPlaces = state.timeToPlaces[constraint]
|
||||
Object.keys(constraintTimeToPlaces).forEach(name => {
|
||||
places[name] = constraintTimeToPlaces[name]
|
||||
})
|
||||
})
|
||||
return places
|
||||
allTimeToPlaces: (state) => {
|
||||
const places = {};
|
||||
Object.keys(state.timeToPlaces).forEach((constraint) => {
|
||||
const constraintTimeToPlaces = state.timeToPlaces[constraint];
|
||||
Object.keys(constraintTimeToPlaces).forEach((name) => {
|
||||
places[name] = constraintTimeToPlaces[name];
|
||||
});
|
||||
});
|
||||
return places;
|
||||
},
|
||||
|
||||
timeToPlaces: (state, getters) => (constraintName) => {
|
||||
return state.timeToPlaces[constraintName]
|
||||
return state.timeToPlaces[constraintName];
|
||||
},
|
||||
|
||||
metadata: state => state.metadata
|
||||
}
|
||||
metadata: (state) => state.metadata,
|
||||
};
|
||||
|
@ -1,8 +1,10 @@
|
||||
export function findFlatGPS (flat) {
|
||||
let gps
|
||||
|
||||
// Try to push a marker based on stations
|
||||
if (flat.flatisfy_stations && flat.flatisfy_stations.length > 0) {
|
||||
if (flat.flatisfy_position) {
|
||||
gps = [flat.flatisfy_position.lat, flat.flatisfy_position.lng]
|
||||
} else if (flat.flatisfy_stations && flat.flatisfy_stations.length > 0) {
|
||||
// Try to push a marker based on stations
|
||||
gps = [0.0, 0.0]
|
||||
flat.flatisfy_stations.forEach(station => {
|
||||
gps = [gps[0] + station.gps[0], gps[1] + station.gps[1]]
|
||||
@ -23,3 +25,18 @@ export function capitalize (string) {
|
||||
export function range (n) {
|
||||
return [...Array(n).keys()]
|
||||
}
|
||||
|
||||
export function costFilter (value, currency) {
|
||||
if (!value) {
|
||||
return 'N/A'
|
||||
}
|
||||
|
||||
if (currency === 'EUR') {
|
||||
currency = ' €'
|
||||
}
|
||||
|
||||
var valueStr = value.toString()
|
||||
valueStr = ' '.repeat((3 + valueStr.length) % 3) + valueStr
|
||||
|
||||
return valueStr.match(/.{1,3}/g).join('.') + currency
|
||||
}
|
||||
|
@ -3,217 +3,18 @@
|
||||
<template v-if="isLoading">
|
||||
<p>{{ $t("common.loading") }}</p>
|
||||
</template>
|
||||
<div class="grid" v-else-if="flat && timeToPlaces">
|
||||
<div class="left-panel">
|
||||
<h2>
|
||||
(<!--
|
||||
--><router-link :to="{ name: 'status', params: { status: flat.status }}"><!--
|
||||
-->{{ flat.status ? capitalize($t("status." + flat.status)) : '' }}<!--
|
||||
--></router-link><!--
|
||||
-->) {{ flat.title }} [{{ flat.id.split("@")[1] }}]
|
||||
<span class="expired">{{ flat.is_expired ? '[' + $t('common.expired') + ']' : '' }}</span>
|
||||
</h2>
|
||||
<div class="grid">
|
||||
<div class="left-panel">
|
||||
<p>
|
||||
{{ flat.cost }} {{ flat.currency }}
|
||||
<template v-if="flat.utilities === 'included'">
|
||||
{{ $t("flatsDetails.utilities_included") }}
|
||||
</template>
|
||||
<template v-else-if="flat.utilities === 'excluded'">
|
||||
{{ $t("flatsDetails.utilities_excluded") }}
|
||||
</template>
|
||||
</p>
|
||||
</div>
|
||||
<p class="right-panel right">
|
||||
{{ flat.area ? flat.area : '?' }} m<sup>2</sup>,
|
||||
{{ flat.rooms ? flat.rooms : '?' }} {{ $tc("flatsDetails.rooms", flat.rooms) }} /
|
||||
{{ flat.bedrooms ? flat.bedrooms : '?' }} {{ $tc("flatsDetails.bedrooms", flat.bedrooms) }}
|
||||
</p>
|
||||
</div>
|
||||
<div>
|
||||
<template v-if="flat.photos && flat.photos.length > 0">
|
||||
<Slider :photos="flat.photos"></Slider>
|
||||
</template>
|
||||
</div>
|
||||
<div>
|
||||
<h3>{{ $t("flatsDetails.Description") }}</h3>
|
||||
<p>{{ flat.text }}</p>
|
||||
<p class="right">{{ flat.location }}</p>
|
||||
<p>{{ $t("flatsDetails.First_posted") }} {{ flat.date ? flat.date.fromNow() : '?' }}.</p>
|
||||
</div>
|
||||
<div>
|
||||
<h3>{{ $t("flatsDetails.Details") }}</h3>
|
||||
<table>
|
||||
<tr v-for="(value, key) in flat.details">
|
||||
<th>{{ key }}</th>
|
||||
<td>{{ value }}</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
<div>
|
||||
<h3>{{ $t("flatsDetails.Metadata") }}</h3>
|
||||
<table>
|
||||
<tr>
|
||||
<th>
|
||||
{{ $t("flatsDetails.postal_code") }}
|
||||
</th>
|
||||
<td>
|
||||
<template v-if="flat.flatisfy_postal_code.postal_code">
|
||||
{{ flat.flatisfy_postal_code.name }} ({{ flat.flatisfy_postal_code.postal_code }})
|
||||
</template>
|
||||
<template v-else>
|
||||
?
|
||||
</template>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<th>
|
||||
{{ $t("flatsDetails.nearby_stations") }}
|
||||
</th>
|
||||
<td>
|
||||
<template v-if="displayedStations">
|
||||
{{ displayedStations }}
|
||||
</template>
|
||||
<template v-else>
|
||||
?
|
||||
</template>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>
|
||||
{{ $t("flatsDetails.Times_to") }}
|
||||
</th>
|
||||
<td>
|
||||
<template v-if="Object.keys(flat.flatisfy_time_to).length">
|
||||
<ul class="time_to_list">
|
||||
<li v-for="(time_to, place) in flat.flatisfy_time_to" :key="place">
|
||||
{{ place }}: {{ humanizeTimeTo(time_to["time"]) }}
|
||||
</li>
|
||||
</ul>
|
||||
</template>
|
||||
<template v-else>
|
||||
?
|
||||
</template>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>
|
||||
{{ $t("flatsDetails.SquareMeterCost") }}
|
||||
</th>
|
||||
<td>
|
||||
{{ flat.sqCost }} {{ flat.currency }}
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
<div>
|
||||
<h3>{{ $t("flatsDetails.Location") }}</h3>
|
||||
|
||||
<FlatsMap :flats="flatMarkers" :places="timeToPlaces" :journeys="journeys"></FlatsMap>
|
||||
</div>
|
||||
<div>
|
||||
<h3>{{ $t("flatsDetails.Notes") }}</h3>
|
||||
|
||||
<form v-on:submit="updateFlatNotes">
|
||||
<textarea ref="notesTextarea" rows="10">{{ flat.notes }}</textarea>
|
||||
<p class="right"><input type="submit" :value="$t('flatsDetails.Save')"/></p>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="right-panel">
|
||||
<h3>{{ $t("flatsDetails.Contact") }}</h3>
|
||||
<div class="contact">
|
||||
<p>
|
||||
<template v-if="flat.phone">
|
||||
<template v-for="phoneNumber in flat.phone.split(',')">
|
||||
<a :href="'tel:+33' + normalizePhoneNumber(phoneNumber)">{{ phoneNumber }}</a><br/>
|
||||
</template>
|
||||
</template>
|
||||
<template v-else>
|
||||
{{ $t("flatsDetails.no_phone_found") }}
|
||||
</template>
|
||||
</p>
|
||||
<p>{{ $tc("common.Original_post", 42) }}
|
||||
<ul>
|
||||
<li v-for="(url, index) in flat.urls">
|
||||
<a :href="url">
|
||||
{{ $tc("common.Original_post", 1) }} {{ index + 1 }}
|
||||
<i class="fa fa-external-link" aria-hidden="true"></i>
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<h3>{{ $t("flatsDetails.Visit") }}</h3>
|
||||
<div class="visit">
|
||||
<flat-pickr
|
||||
:value="flatpickrValue"
|
||||
:config="flatpickrConfig"
|
||||
:placeholder="$t('flatsDetails.setDateOfVisit')"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<h3>{{ $t("common.Actions") }}</h3>
|
||||
|
||||
<nav>
|
||||
<ul>
|
||||
<template v-if="flat.status !== 'user_deleted'">
|
||||
<li ref="notationButton">
|
||||
<template v-for="n in range(notation)">
|
||||
<button class="btnIcon" v-on:mouseover="handleNotationHover(n)" v-on:mouseout="handleNotationOut()" v-on:click="updateFlatNotation(n)">
|
||||
<i class="fa fa-star" aria-hidden="true"></i>
|
||||
</button>
|
||||
</template>
|
||||
<template v-for="n in range(5 - notation)">
|
||||
<button class="btnIcon" v-on:mouseover="handleNotationHover(notation + n)" v-on:mouseout="handleNotationOut()" v-on:click="updateFlatNotation(notation + n)">
|
||||
<i class="fa fa-star-o" aria-hidden="true"></i>
|
||||
</button>
|
||||
</template>
|
||||
</li>
|
||||
<li>
|
||||
<button v-on:click="updateFlatStatus('user_deleted')" class="fullButton">
|
||||
<i class="fa fa-trash" aria-hidden="true"></i>
|
||||
{{ $t("common.Remove") }}
|
||||
</button>
|
||||
</li>
|
||||
</template>
|
||||
<template v-else>
|
||||
<li>
|
||||
<button v-on:click="updateFlatStatus('new')" class="fullButton">
|
||||
<i class="fa fa-undo" aria-hidden="true"></i>
|
||||
{{ $t("common.Restore") }}
|
||||
</button>
|
||||
</li>
|
||||
</template>
|
||||
</ul>
|
||||
</nav>
|
||||
</div>
|
||||
</div>
|
||||
<Flat :flat="flat"></Flat>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import flatPickr from 'vue-flatpickr-component'
|
||||
import moment from 'moment'
|
||||
import 'font-awesome-webpack'
|
||||
import 'flatpickr/dist/flatpickr.css'
|
||||
|
||||
import FlatsMap from '../components/flatsmap.vue'
|
||||
import Slider from '../components/slider.vue'
|
||||
|
||||
import { capitalize, range } from '../tools'
|
||||
import Flat from '../components/flat.vue'
|
||||
|
||||
export default {
|
||||
components: {
|
||||
FlatsMap,
|
||||
Slider,
|
||||
flatPickr
|
||||
Flat
|
||||
},
|
||||
|
||||
created () {
|
||||
document.title = this.title // Set title
|
||||
|
||||
@ -232,20 +33,6 @@ export default {
|
||||
}
|
||||
},
|
||||
|
||||
data () {
|
||||
return {
|
||||
// TODO: Flatpickr locale
|
||||
'overloadNotation': null,
|
||||
'flatpickrConfig': {
|
||||
static: true,
|
||||
altFormat: 'h:i K, M j, Y',
|
||||
altInput: true,
|
||||
enableTime: true,
|
||||
onChange: selectedDates => this.updateFlatVisitDate(selectedDates.length > 0 ? selectedDates[0] : null)
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
isLoading () {
|
||||
return this.$store.getters.isLoading
|
||||
@ -253,204 +40,16 @@ export default {
|
||||
title () {
|
||||
return 'Flatisfy - ' + this.$route.params.id
|
||||
},
|
||||
flatMarkers () {
|
||||
return this.$store.getters.flatsMarkers(this.$router, flat => flat.id === this.$route.params.id)
|
||||
},
|
||||
flat () {
|
||||
return this.$store.getters.flat(this.$route.params.id)
|
||||
},
|
||||
'flatpickrValue' () {
|
||||
if (this.flat && this.flat.visit_date) {
|
||||
return this.flat.visit_date.local().format()
|
||||
}
|
||||
return null
|
||||
},
|
||||
timeToPlaces () {
|
||||
return this.$store.getters.timeToPlaces(this.flat.flatisfy_constraint)
|
||||
},
|
||||
notation () {
|
||||
if (this.overloadNotation) {
|
||||
return this.overloadNotation
|
||||
}
|
||||
return this.flat.notation
|
||||
},
|
||||
journeys () {
|
||||
if (Object.keys(this.flat.flatisfy_time_to).length > 0) {
|
||||
const journeys = []
|
||||
for (const place in this.flat.flatisfy_time_to) {
|
||||
this.flat.flatisfy_time_to[place].sections.forEach(
|
||||
section => journeys.push({
|
||||
geojson: section.geojson,
|
||||
options: {
|
||||
color: section.color ? ('#' + section.color) : '#2196f3',
|
||||
dashArray: section.color ? 'none' : '2, 10'
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
return journeys
|
||||
}
|
||||
return []
|
||||
},
|
||||
displayedStations () {
|
||||
if (this.flat.flatisfy_stations.length > 0) {
|
||||
const stationsNames = this.flat.flatisfy_stations.map(station => station.name)
|
||||
return stationsNames.join(', ')
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
fetchData () {
|
||||
this.$store.dispatch('getFlat', { flatId: this.$route.params.id })
|
||||
this.$store.dispatch('getAllTimeToPlaces')
|
||||
},
|
||||
|
||||
updateFlatNotation (notation) {
|
||||
notation = notation + 1
|
||||
|
||||
if (notation === this.flat.notation) {
|
||||
this.$store.dispatch('updateFlatNotation', { flatId: this.$route.params.id, newNotation: 0 })
|
||||
this.$store.dispatch('updateFlatStatus', { flatId: this.$route.params.id, newStatus: 'new' })
|
||||
} else {
|
||||
this.$store.dispatch('updateFlatNotation', { flatId: this.$route.params.id, newNotation: notation })
|
||||
this.$store.dispatch('updateFlatStatus', { flatId: this.$route.params.id, newStatus: 'followed' })
|
||||
}
|
||||
},
|
||||
|
||||
updateFlatStatus (status) {
|
||||
this.$store.dispatch('updateFlatStatus', { flatId: this.$route.params.id, newStatus: status })
|
||||
},
|
||||
|
||||
updateFlatNotes () {
|
||||
const notes = this.$refs.notesTextarea.value
|
||||
this.$store.dispatch(
|
||||
'updateFlatNotes',
|
||||
{ flatId: this.$route.params.id, newNotes: notes }
|
||||
)
|
||||
},
|
||||
|
||||
updateFlatVisitDate (date) {
|
||||
if (date) {
|
||||
date = moment(date).utc().format()
|
||||
}
|
||||
this.$store.dispatch(
|
||||
'updateFlatVisitDate',
|
||||
{ flatId: this.$route.params.id, newVisitDate: date }
|
||||
)
|
||||
},
|
||||
|
||||
humanizeTimeTo (time) {
|
||||
const minutes = Math.floor(time.as('minutes'))
|
||||
return minutes + ' ' + this.$tc('common.mins', minutes)
|
||||
},
|
||||
|
||||
handleNotationHover (n) {
|
||||
this.overloadNotation = n + 1
|
||||
},
|
||||
|
||||
handleNotationOut () {
|
||||
this.overloadNotation = null
|
||||
},
|
||||
|
||||
normalizePhoneNumber (phoneNumber) {
|
||||
phoneNumber = phoneNumber.replace(/ /g, '')
|
||||
phoneNumber = phoneNumber.replace(/\./g, '')
|
||||
return phoneNumber
|
||||
},
|
||||
|
||||
capitalize: capitalize,
|
||||
|
||||
range: range
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.expired {
|
||||
font-weight: bold;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
@media screen and (min-width: 768px) {
|
||||
.grid {
|
||||
display: grid;
|
||||
grid-gap: 50px;
|
||||
grid-template-columns: 75fr 25fr;
|
||||
}
|
||||
|
||||
.left-panel {
|
||||
grid-column: 1;
|
||||
grid-row: 1;
|
||||
}
|
||||
|
||||
.right-panel {
|
||||
grid-column: 2;
|
||||
grid-row: 1;
|
||||
}
|
||||
}
|
||||
|
||||
.left-panel textarea {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.right {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
nav ul {
|
||||
list-style-type: none;
|
||||
padding-left: 1em;
|
||||
}
|
||||
|
||||
.contact {
|
||||
padding-left: 1em;
|
||||
}
|
||||
|
||||
.right-panel li {
|
||||
margin-bottom: 1em;
|
||||
margin-top: 1em;
|
||||
}
|
||||
|
||||
button {
|
||||
cursor: pointer;
|
||||
width: 75%;
|
||||
padding: 0.3em;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
table {
|
||||
table-layout: fixed;
|
||||
}
|
||||
|
||||
td {
|
||||
word-wrap: break-word;
|
||||
word-break: break-all;
|
||||
white-space: normal;
|
||||
}
|
||||
|
||||
.time_to_list {
|
||||
margin: 0;
|
||||
padding-left: 0;
|
||||
list-style-position: outside;
|
||||
list-style-type: none;
|
||||
}
|
||||
|
||||
.btnIcon {
|
||||
border: none;
|
||||
width: auto;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
@media screen and (max-width: 767px) {
|
||||
.right-panel nav {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.fullButton {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
@ -1,50 +1,56 @@
|
||||
<template>
|
||||
<div>
|
||||
<FlatsMap :flats="flatsMarkers" :places="timeToPlaces"></FlatsMap>
|
||||
|
||||
<h2>
|
||||
{{ $t("home.new_available_flats") }}
|
||||
<template v-if="lastUpdate">
|
||||
<label class="show-last-update">
|
||||
{{ $t("home.Last_update") }} {{ lastUpdate.fromNow() }}
|
||||
</label>
|
||||
</template>
|
||||
<label class="show-expired-flats-label">
|
||||
<input type="checkbox" class="show-expired-flats-checkbox" v-model="showExpiredFlats" />
|
||||
{{ $t("home.show_expired_flats") }}
|
||||
</label>
|
||||
</h2>
|
||||
|
||||
<template v-if="Object.keys(postalCodesFlatsBuckets).length > 0">
|
||||
<template v-for="(postal_code_data, postal_code) in postalCodesFlatsBuckets">
|
||||
<h3>
|
||||
{{ postal_code_data.name || $t('common.Unknown') }}
|
||||
<span v-if="postal_code !== 'undefined'">
|
||||
({{ postal_code }})
|
||||
</span>
|
||||
- {{ postal_code_data.flats.length }} {{ $tc("common.flats", postal_code_data.flats.length) }}
|
||||
</h3>
|
||||
<FlatsTable :flats="postal_code_data.flats" :key="postal_code"></FlatsTable>
|
||||
<div class="flex-row">
|
||||
<div class="flex" style="overflow: auto;">
|
||||
<FlatsMap :flats="flatsMarkers" :places="timeToPlaces" v-on:select-flat="selectFlat($event)"></FlatsMap>
|
||||
<h2>
|
||||
{{ $t("home.new_available_flats") }}
|
||||
<template v-if="lastUpdate">
|
||||
<label class="show-last-update">
|
||||
{{ $t("home.Last_update") }} {{ lastUpdate.fromNow() }}
|
||||
</label>
|
||||
</template>
|
||||
</template>
|
||||
<template v-else-if="isLoading">
|
||||
<p>{{ $t("common.loading") }}</p>
|
||||
</template>
|
||||
<template v-else>
|
||||
<p>{{ $t("flatListing.no_available_flats") }}</p>
|
||||
</template>
|
||||
<label class="show-expired-flats-label">
|
||||
<input type="checkbox" class="show-expired-flats-checkbox" v-model="showExpiredFlats" />
|
||||
{{ $t("home.show_expired_flats") }}
|
||||
</label>
|
||||
</h2>
|
||||
|
||||
<template v-if="Object.keys(inseeCodesFlatsBuckets).length > 0">
|
||||
<template v-for="(insee_code_data, insee_code) in inseeCodesFlatsBuckets">
|
||||
<h3>
|
||||
{{ insee_code_data.name || $t('common.Unknown') }}
|
||||
<span v-if="insee_code !== 'undefined'">
|
||||
({{ insee_code }})
|
||||
</span>
|
||||
- {{ insee_code_data.flats.length }} {{ $tc("common.flats", insee_code_data.flats.length) }}
|
||||
</h3>
|
||||
<FlatsTable :flats="insee_code_data.flats" :key="insee_code"></FlatsTable>
|
||||
</template>
|
||||
</template>
|
||||
<template v-else-if="isLoading">
|
||||
<p>{{ $t("common.loading") }}</p>
|
||||
</template>
|
||||
<template v-else>
|
||||
<p>{{ $t("flatListing.no_available_flats") }}</p>
|
||||
</template>
|
||||
</div>
|
||||
<div v-if="selectedFlat" class="flex">
|
||||
<Flat :flat="selectedFlat"></Flat>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import FlatsMap from '../components/flatsmap.vue'
|
||||
import FlatsTable from '../components/flatstable.vue'
|
||||
import Flat from '../components/flat.vue'
|
||||
import moment from 'moment'
|
||||
|
||||
export default {
|
||||
components: {
|
||||
FlatsMap,
|
||||
FlatsTable
|
||||
FlatsTable,
|
||||
Flat
|
||||
},
|
||||
|
||||
created () {
|
||||
@ -60,13 +66,25 @@ export default {
|
||||
|
||||
data () {
|
||||
return {
|
||||
showExpiredFlats: false
|
||||
showExpiredFlats: false,
|
||||
selectedFlat: undefined
|
||||
}
|
||||
},
|
||||
|
||||
methods: {
|
||||
selectFlat: async function (flatId) {
|
||||
if (flatId) {
|
||||
await this.$store.dispatch('getFlat', { flatId })
|
||||
this.selectedFlat = await this.$store.getters.flat(flatId)
|
||||
} else {
|
||||
this.selectedFlat = undefined
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
computed: {
|
||||
postalCodesFlatsBuckets () {
|
||||
return this.$store.getters.postalCodesFlatsBuckets(flat =>
|
||||
inseeCodesFlatsBuckets () {
|
||||
return this.$store.getters.inseeCodesFlatsBuckets(flat =>
|
||||
flat.status === 'new' &&
|
||||
(this.showExpiredFlats || !flat.is_expired)
|
||||
)
|
||||
@ -100,7 +118,12 @@ h2 {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.flex-row {
|
||||
display:flex;
|
||||
}
|
||||
.flex {
|
||||
flex: 1;
|
||||
}
|
||||
table {
|
||||
margin-left: 0;
|
||||
margin-right: 0;
|
||||
|
@ -12,10 +12,10 @@
|
||||
<template v-if="isLoading">
|
||||
<p>{{ $t("common.loading") }}</p>
|
||||
</template>
|
||||
<template v-else-if="Object.keys(postalCodesFlatsBuckets).length > 0">
|
||||
<template v-for="(postal_code_data, postal_code) in postalCodesFlatsBuckets">
|
||||
<h3>{{ postal_code_data.name }} ({{ postal_code }}) - {{ postal_code_data.flats.length }} {{ $tc("common.flats", postal_code_data.flats.length) }}</h3>
|
||||
<FlatsTable :flats="postal_code_data.flats"></FlatsTable>
|
||||
<template v-else-if="Object.keys(inseeCodesFlatsBuckets).length > 0">
|
||||
<template v-for="(insee_code_data, insee_code) in inseeCodesFlatsBuckets">
|
||||
<h3>{{ insee_code_data.name }} ({{ insee_code }}) - {{ insee_code_data.flats.length }} {{ $tc("common.flats", insee_code_data.flats.length) }}</h3>
|
||||
<FlatsTable :flats="insee_code_data.flats"></FlatsTable>
|
||||
</template>
|
||||
</template>
|
||||
<template v-else>
|
||||
@ -51,12 +51,12 @@ export default {
|
||||
},
|
||||
|
||||
computed: {
|
||||
postalCodesFlatsBuckets () {
|
||||
inseeCodesFlatsBuckets () {
|
||||
if (!this.$route.query.query || this.loading) {
|
||||
return {}
|
||||
}
|
||||
|
||||
return this.$store.getters.postalCodesFlatsBuckets(
|
||||
return this.$store.getters.inseeCodesFlatsBuckets(
|
||||
flat => flat.status !== 'duplicate' && flat.status !== 'ignored' && flat.status !== 'user_deleted'
|
||||
)
|
||||
},
|
||||
|
@ -17,11 +17,11 @@
|
||||
<template v-if="isLoading">
|
||||
<p>{{ $t("common.loading") }}</p>
|
||||
</template>
|
||||
<template v-else-if="Object.keys(postalCodesFlatsBuckets).length">
|
||||
<template v-for="(postal_code_data, postal_code) in postalCodesFlatsBuckets">
|
||||
<h3>{{ postal_code_data.name }} ({{ postal_code }}) - {{ postal_code_data.flats.length }} {{ $tc("common.flats", postal_code_data.flats.length) }}</h3>
|
||||
<template v-else-if="Object.keys(inseeCodesFlatsBuckets).length">
|
||||
<template v-for="(insee_code_data, insee_code) in inseeCodesFlatsBuckets">
|
||||
<h3>{{ insee_code_data.name }} ({{ insee_code }}) - {{ insee_code_data.flats.length }} {{ $tc("common.flats", insee_code_data.flats.length) }}</h3>
|
||||
<FlatsTable
|
||||
:flats="postal_code_data.flats"
|
||||
:flats="insee_code_data.flats"
|
||||
:showNotationColumn="$route.params.status === 'followed'"
|
||||
:showNotes="$route.params.status === 'followed'"
|
||||
:initialSortBy="$route.params.status === 'followed' ? 'notation' : undefined"
|
||||
@ -81,8 +81,8 @@ export default {
|
||||
},
|
||||
|
||||
computed: {
|
||||
postalCodesFlatsBuckets () {
|
||||
return this.$store.getters.postalCodesFlatsBuckets(flat => flat.status === this.$route.params.status)
|
||||
inseeCodesFlatsBuckets () {
|
||||
return this.$store.getters.inseeCodesFlatsBuckets(flat => flat.status === this.$route.params.status)
|
||||
},
|
||||
title () {
|
||||
return 'Flatisfy - ' + capitalize(this.$t('status.' + this.$route.params.status))
|
||||
|
@ -2,13 +2,12 @@
|
||||
"""
|
||||
This module contains the definition of the web app API routes.
|
||||
"""
|
||||
from __future__ import (
|
||||
absolute_import, division, print_function, unicode_literals
|
||||
)
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import datetime
|
||||
import itertools
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import os
|
||||
|
||||
@ -17,7 +16,9 @@ import vobject
|
||||
|
||||
import flatisfy.data
|
||||
from flatisfy.models import flat as flat_model
|
||||
from flatisfy.models import postal_code
|
||||
from flatisfy.models.postal_code import PostalCode
|
||||
from flatisfy import cmds
|
||||
|
||||
FILTER_RE = re.compile(r"filter\[([A-z0-9_]+)\]")
|
||||
|
||||
@ -60,26 +61,24 @@ def _JSONApiSpec(query, model, default_sorting=None):
|
||||
# Handle pagination according to JSON API spec
|
||||
page_number, page_size = 0, None
|
||||
try:
|
||||
if 'page[size]' in query:
|
||||
page_size = int(query['page[size]'])
|
||||
if "page[size]" in query:
|
||||
page_size = int(query["page[size]"])
|
||||
assert page_size > 0
|
||||
if 'page[number]' in query:
|
||||
page_number = int(query['page[number]'])
|
||||
if "page[number]" in query:
|
||||
page_number = int(query["page[number]"])
|
||||
assert page_number >= 0
|
||||
except (AssertionError, ValueError):
|
||||
raise ValueError("Invalid pagination provided.")
|
||||
|
||||
# Handle sorting according to JSON API spec
|
||||
sorting = []
|
||||
if 'sort' in query:
|
||||
for index in query['sort'].split(','):
|
||||
if "sort" in query:
|
||||
for index in query["sort"].split(","):
|
||||
try:
|
||||
sort_field = getattr(model, index.lstrip('-'))
|
||||
sort_field = getattr(model, index.lstrip("-"))
|
||||
except AttributeError:
|
||||
raise ValueError(
|
||||
"Invalid sorting key provided: {}.".format(index)
|
||||
)
|
||||
if index.startswith('-'):
|
||||
raise ValueError("Invalid sorting key provided: {}.".format(index))
|
||||
if index.startswith("-"):
|
||||
sort_field = sort_field.desc()
|
||||
sorting.append(sort_field)
|
||||
# Default sorting options
|
||||
@ -87,11 +86,7 @@ def _JSONApiSpec(query, model, default_sorting=None):
|
||||
try:
|
||||
sorting.append(getattr(model, default_sorting))
|
||||
except AttributeError:
|
||||
raise ValueError(
|
||||
"Invalid default sorting key provided: {}.".format(
|
||||
default_sorting
|
||||
)
|
||||
)
|
||||
raise ValueError("Invalid default sorting key provided: {}.".format(default_sorting))
|
||||
|
||||
return filters, page_number, page_size, sorting
|
||||
|
||||
@ -110,22 +105,22 @@ def _serialize_flat(flat, config):
|
||||
|
||||
postal_codes = {}
|
||||
for constraint_name, constraint in config["constraints"].items():
|
||||
postal_codes[constraint_name] = flatisfy.data.load_data(
|
||||
PostalCode, constraint, config
|
||||
)
|
||||
postal_codes[constraint_name] = flatisfy.data.load_data(PostalCode, constraint, config)
|
||||
|
||||
try:
|
||||
assert flat["flatisfy_postal_code"]
|
||||
assert flat["flatisfy_position"]
|
||||
|
||||
lat = flat["flatisfy_position"]["lat"]
|
||||
lng = flat["flatisfy_position"]["lng"]
|
||||
postal_code_data = next(
|
||||
x
|
||||
for x in postal_codes.get(flat["flatisfy_constraint"], [])
|
||||
if x.postal_code == flat["flatisfy_postal_code"]
|
||||
x for x in postal_codes.get(flat["flatisfy_constraint"], []) if x.lat == lat and x.lng == lng
|
||||
)
|
||||
logging.warn(f"{postal_code_data.name}, {lat}, {lng}")
|
||||
flat["flatisfy_postal_code"] = {
|
||||
"postal_code": flat["flatisfy_postal_code"],
|
||||
"postal_code": postal_code_data.postal_code,
|
||||
"insee_code": postal_code_data.insee_code,
|
||||
"name": postal_code_data.name,
|
||||
"gps": (postal_code_data.lat, postal_code_data.lng)
|
||||
"gps": (postal_code_data.lat, postal_code_data.lng),
|
||||
}
|
||||
except (AssertionError, StopIteration):
|
||||
flat["flatisfy_postal_code"] = {}
|
||||
@ -148,7 +143,7 @@ def index_v1():
|
||||
"search": "/api/v1/search",
|
||||
"ics": "/api/v1/ics/visits.ics",
|
||||
"time_to_places": "/api/v1/time_to_places",
|
||||
"metadata": "/api/v1/metadata"
|
||||
"metadata": "/api/v1/metadata",
|
||||
}
|
||||
|
||||
|
||||
@ -179,36 +174,32 @@ def flats_v1(config, db):
|
||||
|
||||
:return: The available flats objects in a JSON ``data`` dict.
|
||||
"""
|
||||
if bottle.request.method == 'OPTIONS':
|
||||
if bottle.request.method == "OPTIONS":
|
||||
# CORS
|
||||
return ''
|
||||
return ""
|
||||
|
||||
try:
|
||||
try:
|
||||
filters, page_number, page_size, sorting = _JSONApiSpec(
|
||||
bottle.request.query,
|
||||
flat_model.Flat,
|
||||
default_sorting='cost'
|
||||
bottle.request.query, flat_model.Flat, default_sorting="cost"
|
||||
)
|
||||
except ValueError as exc:
|
||||
return JSONError(400, str(exc))
|
||||
|
||||
# Build flat list
|
||||
db_query = (
|
||||
db.query(flat_model.Flat).filter_by(**filters).order_by(*sorting)
|
||||
)
|
||||
db_query = db.query(flat_model.Flat).filter_by(**filters).order_by(*sorting)
|
||||
flats = [
|
||||
_serialize_flat(flat, config)
|
||||
for flat in itertools.islice(
|
||||
db_query,
|
||||
page_number * page_size if page_size else None,
|
||||
page_number * page_size + page_size if page_size else None
|
||||
page_number * page_size + page_size if page_size else None,
|
||||
)
|
||||
]
|
||||
return {
|
||||
"data": flats,
|
||||
"page": page_number,
|
||||
"items_per_page": page_size if page_size else len(flats)
|
||||
"items_per_page": page_size if page_size else len(flats),
|
||||
}
|
||||
except Exception as exc: # pylint: disable= broad-except
|
||||
return JSONError(500, str(exc))
|
||||
@ -224,7 +215,7 @@ def flat_v1(flat_id, config, db):
|
||||
|
||||
:return: The flat object in a JSON ``data`` dict.
|
||||
"""
|
||||
if bottle.request.method == 'OPTIONS':
|
||||
if bottle.request.method == "OPTIONS":
|
||||
# CORS
|
||||
return {}
|
||||
|
||||
@ -234,9 +225,7 @@ def flat_v1(flat_id, config, db):
|
||||
if not flat:
|
||||
return JSONError(404, "No flat with id {}.".format(flat_id))
|
||||
|
||||
return {
|
||||
"data": _serialize_flat(flat, config)
|
||||
}
|
||||
return {"data": _serialize_flat(flat, config)}
|
||||
except Exception as exc: # pylint: disable= broad-except
|
||||
return JSONError(500, str(exc))
|
||||
|
||||
@ -260,7 +249,7 @@ def update_flat_v1(flat_id, config, db):
|
||||
|
||||
:return: The new flat object in a JSON ``data`` dict.
|
||||
"""
|
||||
if bottle.request.method == 'OPTIONS':
|
||||
if bottle.request.method == "OPTIONS":
|
||||
# CORS
|
||||
return {}
|
||||
|
||||
@ -274,14 +263,9 @@ def update_flat_v1(flat_id, config, db):
|
||||
for key, value in json_body.items():
|
||||
setattr(flat, key, value)
|
||||
except ValueError as exc:
|
||||
return JSONError(
|
||||
400,
|
||||
"Invalid payload provided: {}.".format(str(exc))
|
||||
)
|
||||
return JSONError(400, "Invalid payload provided: {}.".format(str(exc)))
|
||||
|
||||
return {
|
||||
"data": _serialize_flat(flat, config)
|
||||
}
|
||||
return {"data": _serialize_flat(flat, config)}
|
||||
except Exception as exc: # pylint: disable= broad-except
|
||||
return JSONError(500, str(exc))
|
||||
|
||||
@ -297,20 +281,15 @@ def time_to_places_v1(config):
|
||||
:return: The JSON dump of the places to compute time to (dict of places
|
||||
names mapped to GPS coordinates).
|
||||
"""
|
||||
if bottle.request.method == 'OPTIONS':
|
||||
if bottle.request.method == "OPTIONS":
|
||||
# CORS
|
||||
return {}
|
||||
|
||||
try:
|
||||
places = {}
|
||||
for constraint_name, constraint in config["constraints"].items():
|
||||
places[constraint_name] = {
|
||||
k: v["gps"]
|
||||
for k, v in constraint["time_to"].items()
|
||||
}
|
||||
return {
|
||||
"data": places
|
||||
}
|
||||
places[constraint_name] = {k: v["gps"] for k, v in constraint["time_to"].items()}
|
||||
return {"data": places}
|
||||
except Exception as exc: # pylint: disable= broad-except
|
||||
return JSONError(500, str(exc))
|
||||
|
||||
@ -345,7 +324,7 @@ def search_v1(db, config):
|
||||
|
||||
:return: The matching flat objects in a JSON ``data`` dict.
|
||||
"""
|
||||
if bottle.request.method == 'OPTIONS':
|
||||
if bottle.request.method == "OPTIONS":
|
||||
# CORS
|
||||
return {}
|
||||
|
||||
@ -357,30 +336,25 @@ def search_v1(db, config):
|
||||
|
||||
try:
|
||||
filters, page_number, page_size, sorting = _JSONApiSpec(
|
||||
bottle.request.query,
|
||||
flat_model.Flat,
|
||||
default_sorting='cost'
|
||||
bottle.request.query, flat_model.Flat, default_sorting="cost"
|
||||
)
|
||||
except ValueError as exc:
|
||||
return JSONError(400, str(exc))
|
||||
|
||||
flats_db_query = (flat_model.Flat
|
||||
.search_query(db, query)
|
||||
.filter_by(**filters)
|
||||
.order_by(*sorting))
|
||||
flats_db_query = flat_model.Flat.search_query(db, query).filter_by(**filters).order_by(*sorting)
|
||||
flats = [
|
||||
_serialize_flat(flat, config)
|
||||
for flat in itertools.islice(
|
||||
flats_db_query,
|
||||
page_number * page_size if page_size else None,
|
||||
page_number * page_size + page_size if page_size else None
|
||||
page_number * page_size + page_size if page_size else None,
|
||||
)
|
||||
]
|
||||
|
||||
return {
|
||||
"data": flats,
|
||||
"page": page_number,
|
||||
"items_per_page": page_size if page_size else len(flats)
|
||||
"items_per_page": page_size if page_size else len(flats),
|
||||
}
|
||||
except Exception as exc: # pylint: disable= broad-except
|
||||
return JSONError(500, str(exc))
|
||||
@ -396,35 +370,33 @@ def ics_feed_v1(config, db):
|
||||
|
||||
:return: The ICS feed for the visits.
|
||||
"""
|
||||
if bottle.request.method == 'OPTIONS':
|
||||
if bottle.request.method == "OPTIONS":
|
||||
# CORS
|
||||
return {}
|
||||
|
||||
cal = vobject.iCalendar()
|
||||
try:
|
||||
flats_with_visits = db.query(flat_model.Flat).filter(
|
||||
flat_model.Flat.visit_date.isnot(None)
|
||||
)
|
||||
flats_with_visits = db.query(flat_model.Flat).filter(flat_model.Flat.visit_date.isnot(None))
|
||||
|
||||
for flat in flats_with_visits:
|
||||
vevent = cal.add('vevent')
|
||||
vevent.add('dtstart').value = flat.visit_date
|
||||
vevent.add('dtend').value = (
|
||||
flat.visit_date + datetime.timedelta(hours=1)
|
||||
)
|
||||
vevent.add('summary').value = 'Visit - {}'.format(flat.title)
|
||||
vevent = cal.add("vevent")
|
||||
vevent.add("dtstart").value = flat.visit_date
|
||||
vevent.add("dtend").value = flat.visit_date + datetime.timedelta(hours=1)
|
||||
vevent.add("summary").value = "Visit - {}".format(flat.title)
|
||||
|
||||
description = (
|
||||
'{} (area: {}, cost: {} {})\n{}#/flat/{}\n'.format(
|
||||
flat.title, flat.area, flat.cost, flat.currency,
|
||||
config['website_url'], flat.id
|
||||
)
|
||||
description = "{} (area: {}, cost: {} {})\n{}#/flat/{}\n".format(
|
||||
flat.title,
|
||||
flat.area,
|
||||
flat.cost,
|
||||
flat.currency,
|
||||
config["website_url"],
|
||||
flat.id,
|
||||
)
|
||||
description += '\n{}\n'.format(flat.text)
|
||||
description += "\n{}\n".format(flat.text)
|
||||
if flat.notes:
|
||||
description += '\n{}\n'.format(flat.notes)
|
||||
description += "\n{}\n".format(flat.notes)
|
||||
|
||||
vevent.add('description').value = description
|
||||
vevent.add("description").value = description
|
||||
except Exception: # pylint: disable= broad-except
|
||||
pass
|
||||
|
||||
@ -439,13 +411,11 @@ def opendata_index_v1():
|
||||
|
||||
GET /api/v1/opendata
|
||||
"""
|
||||
if bottle.request.method == 'OPTIONS':
|
||||
if bottle.request.method == "OPTIONS":
|
||||
# CORS
|
||||
return {}
|
||||
|
||||
return {
|
||||
"postal_codes": "/api/v1/opendata/postal_codes"
|
||||
}
|
||||
return {"postal_codes": "/api/v1/opendata/postal_codes"}
|
||||
|
||||
|
||||
def opendata_postal_codes_v1(db):
|
||||
@ -476,36 +446,36 @@ def opendata_postal_codes_v1(db):
|
||||
|
||||
:return: The postal codes data from opendata.
|
||||
"""
|
||||
if bottle.request.method == 'OPTIONS':
|
||||
if bottle.request.method == "OPTIONS":
|
||||
# CORS
|
||||
return {}
|
||||
|
||||
try:
|
||||
try:
|
||||
filters, page_number, page_size, sorting = _JSONApiSpec(
|
||||
bottle.request.query,
|
||||
PostalCode,
|
||||
default_sorting='postal_code'
|
||||
bottle.request.query, PostalCode, default_sorting="postal_code"
|
||||
)
|
||||
except ValueError as exc:
|
||||
return JSONError(400, str(exc))
|
||||
|
||||
db_query = db.query(PostalCode).filter_by(**filters).order_by(*sorting)
|
||||
postal_codes = [
|
||||
x.json_api_repr() for x in itertools.islice(
|
||||
x.json_api_repr()
|
||||
for x in itertools.islice(
|
||||
db_query,
|
||||
page_number * page_size if page_size else None,
|
||||
page_number * page_size + page_size if page_size else None
|
||||
page_number * page_size + page_size if page_size else None,
|
||||
)
|
||||
]
|
||||
return {
|
||||
"data": postal_codes,
|
||||
"page": page_number,
|
||||
"items_per_page": page_size if page_size else len(postal_codes)
|
||||
"items_per_page": page_size if page_size else len(postal_codes),
|
||||
}
|
||||
except Exception as exc: # pylint: disable= broad-except
|
||||
return JSONError(500, str(exc))
|
||||
|
||||
|
||||
def metadata_v1(config):
|
||||
"""
|
||||
API v1 metadata of the application.
|
||||
@ -516,25 +486,39 @@ def metadata_v1(config):
|
||||
|
||||
:return: The application metadata.
|
||||
"""
|
||||
if bottle.request.method == 'OPTIONS':
|
||||
if bottle.request.method == "OPTIONS":
|
||||
# CORS
|
||||
return {}
|
||||
|
||||
try:
|
||||
last_update = None
|
||||
try:
|
||||
ts_file = os.path.join(
|
||||
config['data_directory'],
|
||||
'timestamp'
|
||||
)
|
||||
ts_file = os.path.join(config["data_directory"], "timestamp")
|
||||
last_update = os.path.getmtime(ts_file)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return {
|
||||
'data': {
|
||||
'last_update': last_update
|
||||
}
|
||||
}
|
||||
return {"data": {"last_update": last_update}}
|
||||
except Exception as exc: # pylint: disable= broad-except
|
||||
return JSONError(500, str(exc))
|
||||
|
||||
|
||||
def import_v1(config):
|
||||
"""
|
||||
API v1 import new flats.
|
||||
|
||||
Example::
|
||||
|
||||
GET /api/v1/import
|
||||
|
||||
:return: The new flats.
|
||||
"""
|
||||
if bottle.request.method == "OPTIONS":
|
||||
# CORS
|
||||
return {}
|
||||
|
||||
try:
|
||||
flats_id = cmds.import_and_filter(config, False, True)
|
||||
return {"flats": flats_id}
|
||||
except Exception as exc: # pylint: disable= broad-except
|
||||
return JSONError(500, str(exc))
|
||||
|
2
import.sh
Executable file
2
import.sh
Executable file
@ -0,0 +1,2 @@
|
||||
#!/bin/sh -ev
|
||||
python -m flatisfy import --config config.json --new-only -v "$@"
|
@ -36,8 +36,7 @@ def run_migrations_offline():
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url, target_metadata=target_metadata, literal_binds=True)
|
||||
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
@ -52,18 +51,17 @@ def run_migrations_online():
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool)
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata
|
||||
)
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
|
@ -10,21 +10,15 @@ import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '8155b83242eb'
|
||||
revision = "8155b83242eb"
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
'flats',
|
||||
sa.Column('is_expired', sa.Boolean(), default=False)
|
||||
)
|
||||
op.add_column("flats", sa.Column("is_expired", sa.Boolean(), default=False))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column(
|
||||
'flats',
|
||||
'is_expired'
|
||||
)
|
||||
op.drop_column("flats", "is_expired")
|
||||
|
24
migrations/versions/9e58c66f1ac1_add_flat_insee_column.py
Normal file
24
migrations/versions/9e58c66f1ac1_add_flat_insee_column.py
Normal file
@ -0,0 +1,24 @@
|
||||
"""Add flat INSEE column
|
||||
|
||||
Revision ID: 9e58c66f1ac1
|
||||
Revises: d21933db9ad8
|
||||
Create Date: 2021-02-08 16:31:18.961186
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "9e58c66f1ac1"
|
||||
down_revision = "d21933db9ad8"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column("postal_codes", sa.Column("insee_code", sa.String()))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("postal_codes", "insee_code")
|
69
migrations/versions/d21933db9ad8_add_flat_position_column.py
Normal file
69
migrations/versions/d21933db9ad8_add_flat_position_column.py
Normal file
@ -0,0 +1,69 @@
|
||||
"""Add flat position column
|
||||
|
||||
Revision ID: d21933db9ad8
|
||||
Revises: 8155b83242eb
|
||||
Create Date: 2021-02-08 16:26:37.190842
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy.types as types
|
||||
import json
|
||||
|
||||
|
||||
class StringyJSON(types.TypeDecorator):
|
||||
"""
|
||||
Stores and retrieves JSON as TEXT for SQLite.
|
||||
|
||||
From
|
||||
https://avacariu.me/articles/2016/compiling-json-as-text-for-sqlite-with-sqlalchemy.
|
||||
|
||||
.. note ::
|
||||
|
||||
The associated field is immutable. That is, changes to the data
|
||||
(typically, changing the value of a dict field) will not trigger an
|
||||
update on the SQL side upon ``commit`` as the reference to the object
|
||||
will not have been updated. One should force the update by forcing an
|
||||
update of the reference (by performing a ``copy`` operation on the dict
|
||||
for instance).
|
||||
"""
|
||||
|
||||
impl = types.TEXT
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
"""
|
||||
Process the bound param, serialize the object to JSON before saving
|
||||
into database.
|
||||
"""
|
||||
if value is not None:
|
||||
value = json.dumps(value)
|
||||
return value
|
||||
|
||||
def process_result_value(self, value, dialect):
|
||||
"""
|
||||
Process the value fetched from the database, deserialize the JSON
|
||||
string before returning the object.
|
||||
"""
|
||||
if value is not None:
|
||||
value = json.loads(value)
|
||||
return value
|
||||
|
||||
|
||||
# TypeEngine.with_variant says "use StringyJSON instead when
|
||||
# connecting to 'sqlite'"
|
||||
# pylint: disable=locally-disabled,invalid-name
|
||||
MagicJSON = types.JSON().with_variant(StringyJSON, "sqlite")
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "d21933db9ad8"
|
||||
down_revision = "8155b83242eb"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column("flats", sa.Column("flatisfy_position", MagicJSON, default=False))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("flats", "flatisfy_position")
|
10
package.json
10
package.json
@ -14,7 +14,8 @@
|
||||
"watch:dev": "webpack --colors --progress --watch",
|
||||
"build:prod": "NODE_ENV=production webpack --colors --progress -p",
|
||||
"watch:prod": "NODE_ENV=production webpack --colors --progress --watch -p",
|
||||
"lint": "eslint --ext .js,.vue ./flatisfy/web/js_src/**"
|
||||
"lint": "eslint --fix --ext .js,.vue ./flatisfy/web/js_src/**",
|
||||
"ziparound": "cp flatisfy/data_files/laposte.json node_modules/ziparound/laposte.json && node node_modules/ziparound"
|
||||
},
|
||||
"dependencies": {
|
||||
"es6-promise": "^4.1.0",
|
||||
@ -23,6 +24,7 @@
|
||||
"imagesloaded": "^4.1.1",
|
||||
"isomorphic-fetch": "^2.2.1",
|
||||
"isotope-layout": "^3.0.3",
|
||||
"leaflet": "^1.7.1",
|
||||
"leaflet.icon.glyph": "^0.2.0",
|
||||
"masonry": "0.0.2",
|
||||
"moment": "^2.18.1",
|
||||
@ -31,7 +33,8 @@
|
||||
"vue-i18n": "^6.1.1",
|
||||
"vue-images-loaded": "^1.1.2",
|
||||
"vue-router": "^2.4.0",
|
||||
"vue2-leaflet": "0.0.44",
|
||||
"vue2-leaflet": "2.6.0",
|
||||
"vue2-leaflet-markercluster": "^3.1.0",
|
||||
"vueisotope": "^3.0.0-rc",
|
||||
"vuex": "^2.3.0"
|
||||
},
|
||||
@ -53,6 +56,7 @@
|
||||
"vue-html-loader": "^1.2.4",
|
||||
"vue-loader": "^11.3.4",
|
||||
"vue-template-compiler": "^2.2.6",
|
||||
"webpack": "^2.3.3"
|
||||
"webpack": "^2.3.3",
|
||||
"ziparound": "1.0.0"
|
||||
}
|
||||
}
|
||||
|
@ -8,6 +8,7 @@ future
|
||||
imagehash
|
||||
mapbox
|
||||
pillow
|
||||
ratelimit
|
||||
requests
|
||||
requests_mock
|
||||
sqlalchemy
|
||||
@ -15,5 +16,6 @@ titlecase
|
||||
unidecode
|
||||
vobject
|
||||
whoosh
|
||||
https://git.weboob.org/weboob/devel/repository/archive.zip?ref=master
|
||||
https://git.weboob.org/weboob/modules/repository/archive.zip?ref=master
|
||||
git+https://git.weboob.org/weboob/weboob/
|
||||
git+https://git.weboob.org/weboob/modules/
|
||||
money
|
||||
|
18
start.sh
Executable file
18
start.sh
Executable file
@ -0,0 +1,18 @@
|
||||
#!/bin/sh -ev
|
||||
|
||||
function clean_up {
|
||||
|
||||
# Perform program exit housekeeping
|
||||
kill $SERVE_PID $YARN_PID
|
||||
exit
|
||||
}
|
||||
|
||||
python -m flatisfy serve --config config.json -v &
|
||||
SERVE_PID=$!
|
||||
|
||||
yarn watch:dev &
|
||||
YARN_PID=$!
|
||||
|
||||
trap clean_up SIGHUP SIGINT SIGTERM
|
||||
|
||||
wait $SERVE_PID $YARN_PID
|
11
wsgi.py
11
wsgi.py
@ -12,11 +12,8 @@ import flatisfy.config
|
||||
from flatisfy.web import app as web_app
|
||||
|
||||
|
||||
class Args():
|
||||
config = os.path.join(
|
||||
os.path.dirname(os.path.realpath(__file__)),
|
||||
"config/config.json"
|
||||
)
|
||||
class Args:
|
||||
config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "config/config.json")
|
||||
|
||||
|
||||
LOGGER = logging.getLogger("flatisfy")
|
||||
@ -24,9 +21,7 @@ LOGGER = logging.getLogger("flatisfy")
|
||||
|
||||
CONFIG = flatisfy.config.load_config(Args())
|
||||
if CONFIG is None:
|
||||
LOGGER.error("Invalid configuration. Exiting. "
|
||||
"Run init-config before if this is the first time "
|
||||
"you run Flatisfy.")
|
||||
LOGGER.error("Invalid configuration. Exiting. Run init-config before if this is the first time you run Flatisfy.")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user