Compare commits
132 Commits
Author | SHA1 | Date | |
---|---|---|---|
9e495bb63e | |||
2358bc42c5 | |||
c463b468e8 | |||
fd33e5cb3f | |||
3b9cf0917a | |||
7fc9077520 | |||
a7ee94653f | |||
ebd031c047 | |||
ff927cbafa | |||
699cec5329 | |||
f442d91188 | |||
28270a15b9 | |||
e284a71a43 | |||
cbe9461809 | |||
977e354646 | |||
0b86a8fd23 | |||
d98af1788c | |||
76ef3a3879 | |||
b73dbdb10e | |||
a6a40081e7 | |||
9b6407db05 | |||
9a532c0da1 | |||
7dca7c535d | |||
c04d81a5ff | |||
36c8c3e38d | |||
0d732aa3de | |||
1a95495c30 | |||
3201c3ddd3 | |||
|
e32db2648a | ||
|
764a5c68ec | ||
|
6862648d50 | ||
|
67da9a055e | ||
|
4d11726332 | ||
|
1bd855dbd8 | ||
|
bd07988549 | ||
|
e4aef0bfaf | ||
|
7790eb0a32 | ||
|
9f328259a7 | ||
|
b3e316cf5b | ||
|
4e3b0055cf | ||
|
f6f1593384 | ||
|
62da67332f | ||
|
48f249bae2 | ||
|
fcb20d2878 | ||
|
711590b809 | ||
|
a92db5e8ee | ||
|
582a868a1d | ||
|
0f2a14b024 | ||
|
36e98bc5b3 | ||
|
713912cfbc | ||
|
42909bd46f | ||
|
9c5afac91c | ||
|
c6f711030a | ||
|
cc4c1ccb18 | ||
|
c659dc6b76 | ||
|
5a3a82ca8d | ||
|
2374763d3b | ||
|
e6b71c6ed5 | ||
|
da6cb83f93 | ||
|
caa4961679 | ||
|
89234b5c5b | ||
|
395a8ba72b | ||
|
fefab6ea36 | ||
|
361725ea4d | ||
|
76a06cf795 | ||
|
9698a889ad | ||
|
b19459b97f | ||
|
0da04a4b6e | ||
|
31f08cb36b | ||
|
af8d864c38 | ||
|
8f24ed48a3 | ||
|
8f0e13fa6d | ||
|
5736056a60 | ||
|
a82df4da50 | ||
|
0747c46400 | ||
|
5b15ce6918 | ||
|
a150312daa | ||
5daae2cc38 | |||
|
af7026a3d6 | ||
|
b60179ab54 | ||
|
822daed43e | ||
|
e4f1ce96bb | ||
0f2c4e0685 | |||
11684b6fd7 | |||
15fcb04368 | |||
|
5083f002d2 | ||
|
f67ec66d9b | ||
|
1434cfedda | ||
|
a21b0ef7bd | ||
|
c5187172f6 | ||
|
22906b063f | ||
|
b93ca5177d | ||
|
a1b3470f6b | ||
|
ff9c46dac5 | ||
|
d2be9c106f | ||
|
0d374bae14 | ||
|
a38cf0e9a8 | ||
d87f2ec37d | |||
|
4ef32a5653 | ||
|
ef896f999f | ||
5da06280be | |||
dc9392e6f0 | |||
|
35c902d3d3 | ||
|
7a9767ffce | ||
82f2e84243 | |||
|
1548f0176d | ||
|
e631d09df3 | ||
|
e8c28a4a91 | ||
|
dc9abadf12 | ||
|
c2f0a2389a | ||
6c7527f443 | |||
e0e04317ca | |||
9e71b946e3 | |||
139568f85e | |||
c7fa6c8b5b | |||
|
07cb54b179 | ||
|
bd00a5c076 | ||
|
3aa8843142 | ||
|
af8fa705dd | ||
0e7a577041 | |||
826989f77c | |||
cc9ed3d34b | |||
56c5aa20d4 | |||
0d3f984545 | |||
4df1f1d2a1 | |||
540d69f9e2 | |||
|
43b14ada51 | ||
0c9f7d3067 | |||
|
3855888bcb | ||
0b89f27a43 | |||
4ff4510ab7 | |||
c5953cfe7e |
1
.dockerignore
Normal file
1
.dockerignore
Normal file
@ -0,0 +1 @@
|
|||||||
|
data
|
10
.editorconfig
Normal file
10
.editorconfig
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
||||||
|
end_of_line = lf
|
||||||
|
insert_final_newline = true
|
||||||
|
|
||||||
|
[*.py]
|
||||||
|
max_line_length=120
|
@ -4,6 +4,9 @@
|
|||||||
"env": {
|
"env": {
|
||||||
"browser": true
|
"browser": true
|
||||||
},
|
},
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaVersion": 8
|
||||||
|
},
|
||||||
rules: {
|
rules: {
|
||||||
'indent': ["error", 4, { 'SwitchCase': 1 }],
|
'indent': ["error", 4, { 'SwitchCase': 1 }],
|
||||||
}
|
}
|
||||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -7,5 +7,7 @@ config/
|
|||||||
node_modules
|
node_modules
|
||||||
flatisfy/web/static/assets
|
flatisfy/web/static/assets
|
||||||
data/
|
data/
|
||||||
package-lock.json
|
|
||||||
doc/_build
|
doc/_build
|
||||||
|
data_rework/
|
||||||
|
.env
|
||||||
|
.htpasswd
|
||||||
|
@ -1,29 +1,17 @@
|
|||||||
before_script:
|
before_script:
|
||||||
- "pip install -r requirements.txt"
|
- "pip install -r requirements.txt"
|
||||||
- "pip install pylint"
|
- "pip install pylint"
|
||||||
- "curl -sL https://deb.nodesource.com/setup_6.x | bash -"
|
- "curl -sL https://deb.nodesource.com/setup_10.x | bash -"
|
||||||
- "apt-get install -y nodejs jq"
|
- "apt-get install -y nodejs jq"
|
||||||
- "npm install"
|
- "npm install"
|
||||||
|
|
||||||
lint:2.7:
|
lint:
|
||||||
image: "python:2.7"
|
|
||||||
stage: "test"
|
|
||||||
script:
|
|
||||||
- "hooks/pre-commit"
|
|
||||||
|
|
||||||
lint:3:
|
|
||||||
image: "python:3"
|
image: "python:3"
|
||||||
stage: "test"
|
stage: "test"
|
||||||
script:
|
script:
|
||||||
- "hooks/pre-commit"
|
- "hooks/pre-commit"
|
||||||
|
|
||||||
test:2.7:
|
test:
|
||||||
image: "python:2.7"
|
|
||||||
stage: "test"
|
|
||||||
script:
|
|
||||||
- python -m flatisfy init-config | jq '.constraints.default.house_types = ["APART"] | .constraints.default.type = "RENT" | .constraints.default.postal_codes = ["75014"]' > /tmp/config.json
|
|
||||||
- python -m flatisfy test --config /tmp/config.json
|
|
||||||
test:3:
|
|
||||||
image: "python:3"
|
image: "python:3"
|
||||||
stage: "test"
|
stage: "test"
|
||||||
script:
|
script:
|
||||||
|
6
.vscode/extensions.json
vendored
Normal file
6
.vscode/extensions.json
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"recommendations": [
|
||||||
|
"mtxr.sqltools",
|
||||||
|
"mtxr.sqltools-driver-sqlite"
|
||||||
|
]
|
||||||
|
}
|
15
.vscode/settings.json
vendored
Normal file
15
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"cSpell.words": [
|
||||||
|
"woob",
|
||||||
|
"flatisfy"
|
||||||
|
],
|
||||||
|
"sqltools.useNodeRuntime": true,
|
||||||
|
"sqltools.connections": [
|
||||||
|
{
|
||||||
|
"previewLimit": 50,
|
||||||
|
"driver": "SQLite",
|
||||||
|
"name": "flatisfy",
|
||||||
|
"database": "${workspaceFolder:flatisfy}/data/flatisfy.db"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@ -53,9 +53,9 @@ locale.
|
|||||||
Thanks!
|
Thanks!
|
||||||
|
|
||||||
|
|
||||||
## Adding support for a new Weboob backend
|
## Adding support for a new Woob backend
|
||||||
|
|
||||||
To enable a new Weboob `CapHousing` backend in Flatisfy, you should add it to
|
To enable a new Woob `CapHousing` backend in Flatisfy, you should add it to
|
||||||
the list of available backends in
|
the list of available backends in
|
||||||
[flatisfy/fetch.py#L69-70](https://git.phyks.me/Phyks/flatisfy/blob/master/flatisfy/fetch.py#L69-70)
|
[flatisfy/fetch.py#L69-70](https://git.phyks.me/Phyks/flatisfy/blob/master/flatisfy/fetch.py#L69-70)
|
||||||
and update the list of `BACKEND_PRECEDENCES` for deduplication in
|
and update the list of `BACKEND_PRECEDENCES` for deduplication in
|
||||||
@ -77,4 +77,20 @@ If you want to add new data files, especially for public transportation stops
|
|||||||
3. Write a preprocessing function in `flatisfy/data_files/__init__.py`. You
|
3. Write a preprocessing function in `flatisfy/data_files/__init__.py`. You
|
||||||
can have a look at the existing functions for a model.
|
can have a look at the existing functions for a model.
|
||||||
|
|
||||||
|
|
||||||
|
## Adding new migrations
|
||||||
|
|
||||||
|
If you want to change the database schema, you should create a matching
|
||||||
|
migration. Here is the way to do it correctly:
|
||||||
|
|
||||||
|
1. First, edit the `flatisfy/models` files to create / remove the required
|
||||||
|
fields. If you create a new database from scratch, these are the files
|
||||||
|
which will be used.
|
||||||
|
2. Then, run `alembic revision -m "Some description"` in the root of the git
|
||||||
|
repo to create a new migration.
|
||||||
|
3. Finally, edit the newly created migration file under the `migrations/`
|
||||||
|
folder to add the required code to alter the database (both upgrade and
|
||||||
|
downgrade).
|
||||||
|
|
||||||
|
|
||||||
Thanks!
|
Thanks!
|
||||||
|
@ -19,7 +19,7 @@ and it is working fine :)
|
|||||||
|
|
||||||
<img src="doc/img/home.png" width="45%"/> <img src="doc/img/home2.png" width="45%"/>
|
<img src="doc/img/home.png" width="45%"/> <img src="doc/img/home2.png" width="45%"/>
|
||||||
|
|
||||||
It uses [Weboob](http://weboob.org/) to get all the housing posts on most of
|
It uses [woob](https://gitlab.com/woob/woob/) to get all the housing posts on most of
|
||||||
the websites offering housings posts, and then offers a bunch of pipelines to
|
the websites offering housings posts, and then offers a bunch of pipelines to
|
||||||
filter and deduplicate the fetched housings.
|
filter and deduplicate the fetched housings.
|
||||||
|
|
||||||
@ -116,7 +116,9 @@ Feel free to open issues. An IRC channel is available at [irc://irc.freenode.net
|
|||||||
|
|
||||||
## Thanks
|
## Thanks
|
||||||
|
|
||||||
* [Weboob](http://weboob.org/)
|
* [Woob](https://gitlab.com/woob/woob/). Note that this is actually the only and best
|
||||||
|
software out there to scrape housing posts online. Using it in Flatisfy does
|
||||||
|
not mean the authors of Flatisfy endorse Woob authors' views.
|
||||||
* The OpenData providers listed above!
|
* The OpenData providers listed above!
|
||||||
* Navitia for their really cool public transportation API.
|
* Navitia for their really cool public transportation API.
|
||||||
* A lots of Python modules, required for this script (see `requirements.txt`).
|
* A lots of Python modules, required for this script (see `requirements.txt`).
|
||||||
|
74
alembic.ini
Normal file
74
alembic.ini
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
script_location = migrations
|
||||||
|
|
||||||
|
# template used to generate migration files
|
||||||
|
# file_template = %%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# timezone to use when rendering the date
|
||||||
|
# within the migration file as well as the filename.
|
||||||
|
# string value is passed to dateutil.tz.gettz()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the
|
||||||
|
# "slug" field
|
||||||
|
#truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; this defaults
|
||||||
|
# to migrations/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path
|
||||||
|
# version_locations = %(here)s/bar %(here)s/bat migrations/versions
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
sqlalchemy.url = sqlite:///data/flatisfy.db
|
||||||
|
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
@ -2,30 +2,35 @@ Getting started
|
|||||||
===============
|
===============
|
||||||
|
|
||||||
|
|
||||||
## Dependency on Weboob
|
## Dependency on Woob
|
||||||
|
|
||||||
**Important**: Flatisfy relies on [Weboob](http://weboob.org/) to fetch
|
**Important**: Flatisfy relies on [Woob](https://gitlab.com/woob/woob/) to fetch
|
||||||
housing posts from housing websites. Then, you should install the [`devel`
|
housing posts from housing websites.
|
||||||
branch](https://git.weboob.org/weboob/devel/) and update it regularly,
|
|
||||||
especially if Flatisfy suddenly stops fetching housing posts.
|
|
||||||
|
|
||||||
If you `pip install -r requirements.txt` it will install the latest
|
If you `pip install -r requirements.txt` it will install the latest
|
||||||
development version of [Weboob](https://git.weboob.org/weboob/devel/) and the
|
development version of [Woob](https://gitlab.com/woob/woob/) and the
|
||||||
[Weboob modules](https://git.weboob.org/weboob/modules/), which should be the
|
[Woob modules](https://gitlab.com/woob/modules/), which should be the
|
||||||
best version available out there. You should update these packages regularly,
|
best version available out there. You should update these packages regularly,
|
||||||
as they evolve quickly.
|
as they evolve quickly.
|
||||||
|
|
||||||
Weboob is made of two parts: a core and modules (which is the actual code
|
Woob is made of two parts: a core and modules (which is the actual code
|
||||||
fetching data from websites). Modules tend to break often and are then updated
|
fetching data from websites). Modules tend to break often and are then updated
|
||||||
often, you should keep them up to date. This can be done by installing the
|
often, you should keep them up to date. This can be done by installing and
|
||||||
`weboob-modules` package listed in the `requirements.txt` and using the
|
upgrading the packages listed in the `requirements.txt` and using the default
|
||||||
default configuration.
|
configuration.
|
||||||
|
|
||||||
This is a safe default configuration. However, a better option is usually to
|
This is a safe default configuration. However, a better option is usually to
|
||||||
clone [Weboob git repo](https://git.weboob.org/weboob/devel/) somewhere, on
|
clone [Woob git repo](https://gitlab.com/woob/woob/) somewhere, on
|
||||||
your disk, to point `modules_path` configuration option to
|
your disk, to point `modules_path` configuration option to
|
||||||
`path_to_weboob_git/modules` (see the configuration section below) and to run
|
`path_to_woob_git/modules` (see the configuration section below) and to run
|
||||||
a `git pull; python setup.py install` in the Weboob git repo often.
|
a `git pull; python setup.py install` in the Woob git repo often.
|
||||||
|
|
||||||
|
A copy of the Woob modules is available in the `modules` directory at the
|
||||||
|
root of this repository, you can use `"modules_path": "/path/to/flatisfy/modules"` to use them.
|
||||||
|
This copy may or may not be more up to date than the current state of official
|
||||||
|
Woob modules. Some changes are made there, which are not backported
|
||||||
|
upstream. Woob official modules are not synced in the `modules` folder on a
|
||||||
|
regular basis, so try both and see which ones match your needs! :)
|
||||||
|
|
||||||
|
|
||||||
## TL;DR
|
## TL;DR
|
||||||
@ -73,7 +78,11 @@ The available commands are:
|
|||||||
|
|
||||||
_Note:_ Fetching flats can be quite long and take up to a few minutes. This
|
_Note:_ Fetching flats can be quite long and take up to a few minutes. This
|
||||||
should be better optimized. To get a verbose output and have an hint about the
|
should be better optimized. To get a verbose output and have an hint about the
|
||||||
progress, use the `-v` argument.
|
progress, use the `-v` argument. It can remain stuck at "Loading flats for
|
||||||
|
constraint XXX...", which simply means it is fetching flats (using Woob
|
||||||
|
under the hood) and this step can be super long if there are lots of flats to
|
||||||
|
fetch. If this happens to you, you can set `max_entries` in your config to
|
||||||
|
limit the number of flats to fetch.
|
||||||
|
|
||||||
|
|
||||||
### Common arguments
|
### Common arguments
|
||||||
@ -108,12 +117,15 @@ List of configuration options:
|
|||||||
means that it will store the database in the default location, in
|
means that it will store the database in the default location, in
|
||||||
`data_directory`.
|
`data_directory`.
|
||||||
* `navitia_api_key` is an API token for [Navitia](https://www.navitia.io/)
|
* `navitia_api_key` is an API token for [Navitia](https://www.navitia.io/)
|
||||||
which is required to compute travel times.
|
which is required to compute travel times for `PUBLIC_TRANSPORT` mode.
|
||||||
* `modules_path` is the path to the Weboob modules. It can be `null` if you
|
* `mapbox_api_key` is an API token for [Mapbox](http://mapbox.com/)
|
||||||
want Weboob to use the locally installed [Weboob
|
which is required to compute travel times for `WALK`, `BIKE` and `CAR`
|
||||||
modules](https://git.weboob.org/weboob/modules), which you should install
|
modes.
|
||||||
|
* `modules_path` is the path to the Woob modules. It can be `null` if you
|
||||||
|
want Woob to use the locally installed [Woob
|
||||||
|
modules](https://gitlab.com/woob/modules/), which you should install
|
||||||
yourself. This is the default value. If it is a string, it should be an
|
yourself. This is the default value. If it is a string, it should be an
|
||||||
absolute path to the folder containing Weboob modules.
|
absolute path to the folder containing Woob modules.
|
||||||
* `port` is the port on which the development webserver should be
|
* `port` is the port on which the development webserver should be
|
||||||
listening (default to `8080`).
|
listening (default to `8080`).
|
||||||
* `host` is the host on which the development webserver should be listening
|
* `host` is the host on which the development webserver should be listening
|
||||||
@ -121,8 +133,8 @@ List of configuration options:
|
|||||||
* `webserver` is a server to use instead of the default Bottle built-in
|
* `webserver` is a server to use instead of the default Bottle built-in
|
||||||
webserver, see [Bottle deployment
|
webserver, see [Bottle deployment
|
||||||
doc](http://bottlepy.org/docs/dev/deployment.html).
|
doc](http://bottlepy.org/docs/dev/deployment.html).
|
||||||
* `backends` is a list of Weboob backends to enable. It defaults to any
|
* `backends` is a list of Woob backends to enable. It defaults to any
|
||||||
available and supported Weboob backend.
|
available and supported Woob backend.
|
||||||
* `store_personal_data` is a boolean indicated whether or not Flatisfy should
|
* `store_personal_data` is a boolean indicated whether or not Flatisfy should
|
||||||
fetch personal data from housing posts and store them in database. Such
|
fetch personal data from housing posts and store them in database. Such
|
||||||
personal data include contact phone number for instance. By default,
|
personal data include contact phone number for instance. By default,
|
||||||
@ -132,6 +144,10 @@ List of configuration options:
|
|||||||
`1500`). This is useful to avoid false-positive.
|
`1500`). This is useful to avoid false-positive.
|
||||||
* `duplicate_threshold` is the minimum score in the deep duplicate detection
|
* `duplicate_threshold` is the minimum score in the deep duplicate detection
|
||||||
step to consider two flats as being duplicates (defaults to `15`).
|
step to consider two flats as being duplicates (defaults to `15`).
|
||||||
|
* `serve_images_locally` lets you download all the images from the housings
|
||||||
|
websites when importing the posts. Then, all your Flatisfy works standalone,
|
||||||
|
serving the local copy of the images instead of fetching the images from the
|
||||||
|
remote websites every time you look through the fetched housing posts.
|
||||||
|
|
||||||
_Note:_ In production, you can either use the `serve` command with a reliable
|
_Note:_ In production, you can either use the `serve` command with a reliable
|
||||||
webserver instead of the default Bottle webserver (specifying a `webserver`
|
webserver instead of the default Bottle webserver (specifying a `webserver`
|
||||||
@ -156,14 +172,14 @@ under the `constraints` key. The available constraints are:
|
|||||||
* `postal_codes` (as strings) is a list of postal codes. You should include any postal code
|
* `postal_codes` (as strings) is a list of postal codes. You should include any postal code
|
||||||
you want, and especially the postal codes close to the precise location you
|
you want, and especially the postal codes close to the precise location you
|
||||||
want.
|
want.
|
||||||
* `time_to` is a dictionary of places to compute travel time to them (using
|
* `time_to` is a dictionary of places to compute travel time to them.
|
||||||
public transport, relies on [Navitia API](http://navitia.io/)).
|
|
||||||
Typically,
|
Typically,
|
||||||
|
|
||||||
```
|
```
|
||||||
"time_to": {
|
"time_to": {
|
||||||
"foobar": {
|
"foobar": {
|
||||||
"gps": [LAT, LNG],
|
"gps": [LAT, LNG],
|
||||||
|
"mode": A transport mode,
|
||||||
"time": [min, max]
|
"time": [min, max]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -172,13 +188,25 @@ under the `constraints` key. The available constraints are:
|
|||||||
means that the housings must be between the `min` and `max` bounds (possibly
|
means that the housings must be between the `min` and `max` bounds (possibly
|
||||||
`null`) from the place identified by the GPS coordinates `LAT` and `LNG`
|
`null`) from the place identified by the GPS coordinates `LAT` and `LNG`
|
||||||
(latitude and longitude), and we call this place `foobar` in human-readable
|
(latitude and longitude), and we call this place `foobar` in human-readable
|
||||||
form. Beware that `time` constraints are in **seconds**.
|
form. `mode` should be either `PUBLIC_TRANSPORT`, `WALK`, `BIKE` or `CAR`.
|
||||||
|
Beware that `time` constraints are in **seconds**. You should take
|
||||||
|
some margin as the travel time computation is done with found nearby public
|
||||||
|
transport stations, which is only a rough estimate of the flat position. For
|
||||||
|
`PUBLIC_TRANSPORT` the travel time is computed assuming a route the next
|
||||||
|
Monday at 8am.
|
||||||
* `minimum_nb_photos` lets you filter out posts with less than this number of
|
* `minimum_nb_photos` lets you filter out posts with less than this number of
|
||||||
photos.
|
photos.
|
||||||
* `description_should_contain` lets you specify a list of terms that should
|
* `description_should_contain` lets you specify a list of terms that should
|
||||||
be present in the posts descriptions. Typically, if you expect "parking" to
|
be present in the posts descriptions. Typically, if you expect "parking" to
|
||||||
be in all the posts Flatisfy fetches for you, you can set
|
be in all the posts Flatisfy fetches for you, you can set
|
||||||
`description_should_contain: ["parking"]`.
|
`description_should_contain: ["parking"]`. You can also use list of terms
|
||||||
|
which acts as an "or" operation. For example, if you are looking for a flat
|
||||||
|
with a parking and with either a balcony or a terrace, you can use
|
||||||
|
`description_should_contain: ["parking", ["balcony", "terrace"]]`
|
||||||
|
* `description_should_not_contain` lets you specify a list of terms that should
|
||||||
|
never occur in the posts descriptions. Typically, if you wish to avoid
|
||||||
|
"coloc" in the posts Flatisfy fetches for you, you can set
|
||||||
|
`description_should_not_contain: ["coloc"]`.
|
||||||
|
|
||||||
|
|
||||||
You can think of constraints as "a set of criterias to filter out flats". You
|
You can think of constraints as "a set of criterias to filter out flats". You
|
||||||
@ -191,4 +219,30 @@ provided that you name each of them uniquely.
|
|||||||
If you want to build the web assets, you can use `npm run build:dev`
|
If you want to build the web assets, you can use `npm run build:dev`
|
||||||
(respectively `npm run watch:dev` to build continuously and monitor changes in
|
(respectively `npm run watch:dev` to build continuously and monitor changes in
|
||||||
source files). You can use `npm run build:prod` (`npm run watch:prod`) to do
|
source files). You can use `npm run build:prod` (`npm run watch:prod`) to do
|
||||||
the same in production mode (with minification etc).
|
the same in production mode (main difference is minification of generated
|
||||||
|
source code).
|
||||||
|
|
||||||
|
**Note**: If `npm run build:prod` fails, you can fall back to `npm run
|
||||||
|
build:dev` safely.
|
||||||
|
|
||||||
|
|
||||||
|
## Upgrading
|
||||||
|
|
||||||
|
To update the app, you can simply `git pull` the latest version. The database
|
||||||
|
schema might change from time to time. Here is how to update it automatically:
|
||||||
|
|
||||||
|
* First, edit the `alembic.ini` file and ensure the `sqlalchemy.url` entry
|
||||||
|
points to the database URI you are actually using for Flatisfy.
|
||||||
|
* Then, run `alembic upgrade head` to run the required migrations.
|
||||||
|
|
||||||
|
## Misc
|
||||||
|
|
||||||
|
### Other tools more or less connected with Flatisfy
|
||||||
|
|
||||||
|
+ [ZipAround](https://github.com/guix77/ziparound) generates a list of ZIP codes centered on a city name, within a radius of N kilometers and within a certain travel time by car (France only). You can invoke it with:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm ziparound
|
||||||
|
# or alternatively
|
||||||
|
npm ziparound --code 75001 --distance 3
|
||||||
|
```
|
||||||
|
@ -20,9 +20,6 @@ virtualenv .env && source .env/bin/activate
|
|||||||
# Install required Python modules
|
# Install required Python modules
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
|
|
||||||
# Clone and install weboob
|
|
||||||
git clone https://git.weboob.org/weboob/devel weboob && cd weboob && python setup.py install && cd ..
|
|
||||||
|
|
||||||
# Install required JS libraries and build the webapp
|
# Install required JS libraries and build the webapp
|
||||||
npm install && npm run build:prod
|
npm install && npm run build:prod
|
||||||
|
|
||||||
@ -42,13 +39,19 @@ python -m flatisfy build-data --config config/config.json -v
|
|||||||
python -m flatisfy import --config config/config.json -v
|
python -m flatisfy import --config config/config.json -v
|
||||||
```
|
```
|
||||||
|
|
||||||
_Note_: In the config, you should set `data_directory` to the absolute path of
|
_Note 1_: In the config, you should set `data_directory` to the absolute path of
|
||||||
the `data` directory created below. This directory should be writable by the
|
the `data` directory created below. This directory should be writable by the
|
||||||
user running Flatisfy. You should also set `modules_path` to the absolute path
|
user running Flatisfy. You should also set `modules_path` to the absolute path
|
||||||
to the `modules` folder under the previous `weboob` clone. Finally, the last
|
to the `modules` folder under the previous `woob` clone. Finally, the last
|
||||||
`import` command can be `cron`-tasked to automatically fetch available
|
`import` command can be `cron`-tasked to automatically fetch available
|
||||||
housings posts periodically.
|
housings posts periodically.
|
||||||
|
|
||||||
|
_Note 2_: As of 2019-03-13, building the webapp requires libpng-dev to be able to build pngquant-bin. On Debian Stretch (tested with Node v11.11.0):
|
||||||
|
|
||||||
|
sudo apt install libpng-dev
|
||||||
|
|
||||||
|
_Note 3_: If `npm run build:prod` fails, you can fall back to `npm run
|
||||||
|
build:dev` safely.
|
||||||
|
|
||||||
## Use an alternative Bottle backend (production)
|
## Use an alternative Bottle backend (production)
|
||||||
|
|
||||||
@ -221,3 +224,26 @@ setup. You should also set the `.htpasswd` file with users and credentials.
|
|||||||
_Note_: This vhost is really minimalistic and you should adapt it to your
|
_Note_: This vhost is really minimalistic and you should adapt it to your
|
||||||
setup, enforce SSL ciphers for increased security and do such good practices
|
setup, enforce SSL ciphers for increased security and do such good practices
|
||||||
things.
|
things.
|
||||||
|
|
||||||
|
### If database is in read only
|
||||||
|
|
||||||
|
In the case of you have a "flatisfy" user, and another user runs the webserver, for instance "www-data", you should have problems with the webapp reading, but not writing, the database. Workaround (Debian):
|
||||||
|
|
||||||
|
Add www-data in flatisfy group:
|
||||||
|
|
||||||
|
sudo usermod -a -G flatisfy www-data
|
||||||
|
|
||||||
|
Chmod data dir + DB file:
|
||||||
|
|
||||||
|
sudo chmod 775 data
|
||||||
|
sudo chmod 664 data/flatisfy.db
|
||||||
|
|
||||||
|
Edit /etc/uwsgi/apps-available/flatisfy.ini and add:
|
||||||
|
|
||||||
|
chmod-socket = 664
|
||||||
|
|
||||||
|
Restart:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
systemctl restart uwsgi
|
||||||
|
```
|
||||||
|
@ -20,7 +20,7 @@ docker build -t phyks/flatisfy .
|
|||||||
mkdir flatisfy
|
mkdir flatisfy
|
||||||
cd flatisfy
|
cd flatisfy
|
||||||
FLATISFY_VOLUME=$(pwd)
|
FLATISFY_VOLUME=$(pwd)
|
||||||
docker run -it -e LOCAL_USER_ID=`id -u` -v $FLATISFY_VOLUME:/flatisfy phyks/flatisfy sh -c "cd /home/user/app && python -m flatisfy init-config > /flatisfy/config.json"
|
docker run --rm -it -e LOCAL_USER_ID=`id -u` -v $FLATISFY_VOLUME:/flatisfy phyks/flatisfy sh -c "cd /home/user/app && python -m flatisfy init-config > /flatisfy/config.json"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
@ -44,7 +44,7 @@ Your Flatisfy instance is now available at `localhost:8080`!
|
|||||||
To fetch new housing posts, you should manually call
|
To fetch new housing posts, you should manually call
|
||||||
|
|
||||||
```
|
```
|
||||||
docker run -it -e LOCAL_USER_ID=`id -u` -v $FLATISFY_VOLUME:/flatisfy phyks/flatisfy /home/user/fetch.sh
|
docker run --rm -it -e LOCAL_USER_ID=`id -u` -v $FLATISFY_VOLUME:/flatisfy phyks/flatisfy /home/user/fetch.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
This can be done easily in a crontask on your host system, to run it typically
|
This can be done easily in a crontask on your host system, to run it typically
|
||||||
|
19
doc/3.faq.md
19
doc/3.faq.md
@ -15,3 +15,22 @@ references to all the other "duplicate" posts. These latter duplicate posts
|
|||||||
are then simply marked as such and never shown anymore.
|
are then simply marked as such and never shown anymore.
|
||||||
|
|
||||||
All origins are kept in a `urls` field in the remaining post.
|
All origins are kept in a `urls` field in the remaining post.
|
||||||
|
|
||||||
|
|
||||||
|
## Flatisfy seems to be stuck fetching posts
|
||||||
|
|
||||||
|
Fetching posts can be a long process, depending on your criterias. Run the
|
||||||
|
import command with `-v` argument to get a more verbose output and check
|
||||||
|
things are indeed happening. If fetching the flats is still too long, try to
|
||||||
|
set `max_entries` in your config to limit the number of posts fetched.
|
||||||
|
|
||||||
|
|
||||||
|
## Docker image does not start the webserver at first start?
|
||||||
|
|
||||||
|
When you launch the Docker image, it first updates Woob and fetches the
|
||||||
|
housing posts matching your criterias. The webserver is only started once this
|
||||||
|
is done. As fetching housing posts can take a bit of time (up to 10 minutes),
|
||||||
|
the webserver will not be available right away.
|
||||||
|
|
||||||
|
Once everything is ready, you should see a log message in the console running
|
||||||
|
the Docker image, confirming you that webserver is up and running.
|
||||||
|
88
doc/conf.py
88
doc/conf.py
@ -18,7 +18,8 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
sys.path.insert(0, os.path.abspath('..'))
|
|
||||||
|
sys.path.insert(0, os.path.abspath(".."))
|
||||||
|
|
||||||
# -- General configuration ------------------------------------------------
|
# -- General configuration ------------------------------------------------
|
||||||
|
|
||||||
@ -30,19 +31,19 @@ sys.path.insert(0, os.path.abspath('..'))
|
|||||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||||
# ones.
|
# ones.
|
||||||
extensions = [
|
extensions = [
|
||||||
'sphinx.ext.autodoc',
|
"sphinx.ext.autodoc",
|
||||||
'sphinx.ext.viewcode',
|
"sphinx.ext.viewcode",
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ['_templates']
|
templates_path = ["_templates"]
|
||||||
|
|
||||||
# The suffix(es) of source filenames.
|
# The suffix(es) of source filenames.
|
||||||
# You can specify multiple suffix as a list of string:
|
# You can specify multiple suffix as a list of string:
|
||||||
#
|
#
|
||||||
source_suffix = ['.rst', '.md']
|
source_suffix = [".rst", ".md"]
|
||||||
source_parsers = {
|
source_parsers = {
|
||||||
'.md': 'recommonmark.parser.CommonMarkParser',
|
".md": "recommonmark.parser.CommonMarkParser",
|
||||||
}
|
}
|
||||||
|
|
||||||
# The encoding of source files.
|
# The encoding of source files.
|
||||||
@ -50,21 +51,21 @@ source_parsers = {
|
|||||||
# source_encoding = 'utf-8-sig'
|
# source_encoding = 'utf-8-sig'
|
||||||
|
|
||||||
# The master toctree document.
|
# The master toctree document.
|
||||||
master_doc = 'index'
|
master_doc = "index"
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'Flatisfy'
|
project = u"Flatisfy"
|
||||||
copyright = u'2017, Phyks (Lucas Verney)'
|
copyright = u"2017, Phyks (Lucas Verney)"
|
||||||
author = u'Phyks (Lucas Verney)'
|
author = u"Phyks (Lucas Verney)"
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
# built documents.
|
# built documents.
|
||||||
#
|
#
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = u'0.1'
|
version = u"0.1"
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
release = u'0.1'
|
release = u"0.1"
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
@ -85,7 +86,7 @@ language = None
|
|||||||
# List of patterns, relative to source directory, that match files and
|
# List of patterns, relative to source directory, that match files and
|
||||||
# directories to ignore when looking for source files.
|
# directories to ignore when looking for source files.
|
||||||
# This patterns also effect to html_static_path and html_extra_path
|
# This patterns also effect to html_static_path and html_extra_path
|
||||||
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all
|
# The reST default role (used for this markup: `text`) to use for all
|
||||||
# documents.
|
# documents.
|
||||||
@ -107,7 +108,7 @@ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
|||||||
# show_authors = False
|
# show_authors = False
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
pygments_style = 'sphinx'
|
pygments_style = "sphinx"
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
# A list of ignored prefixes for module index sorting.
|
||||||
# modindex_common_prefix = []
|
# modindex_common_prefix = []
|
||||||
@ -124,7 +125,7 @@ todo_include_todos = False
|
|||||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
# a list of builtin themes.
|
# a list of builtin themes.
|
||||||
#
|
#
|
||||||
html_theme = 'classic'
|
html_theme = "classic"
|
||||||
|
|
||||||
# Theme options are theme-specific and customize the look and feel of a theme
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
# further. For a list of options available for each theme, see the
|
# further. For a list of options available for each theme, see the
|
||||||
@ -158,7 +159,7 @@ html_theme = 'classic'
|
|||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
html_static_path = ['_static']
|
html_static_path = ["_static"]
|
||||||
|
|
||||||
# Add any extra paths that contain custom files (such as robots.txt or
|
# Add any extra paths that contain custom files (such as robots.txt or
|
||||||
# .htaccess) here, relative to this directory. These files are copied
|
# .htaccess) here, relative to this directory. These files are copied
|
||||||
@ -238,34 +239,36 @@ html_static_path = ['_static']
|
|||||||
# html_search_scorer = 'scorer.js'
|
# html_search_scorer = 'scorer.js'
|
||||||
|
|
||||||
# Output file base name for HTML help builder.
|
# Output file base name for HTML help builder.
|
||||||
htmlhelp_basename = 'Flatisfydoc'
|
htmlhelp_basename = "Flatisfydoc"
|
||||||
|
|
||||||
# -- Options for LaTeX output ---------------------------------------------
|
# -- Options for LaTeX output ---------------------------------------------
|
||||||
|
|
||||||
latex_elements = {
|
latex_elements = {
|
||||||
# The paper size ('letterpaper' or 'a4paper').
|
# The paper size ('letterpaper' or 'a4paper').
|
||||||
#
|
#
|
||||||
# 'papersize': 'letterpaper',
|
# 'papersize': 'letterpaper',
|
||||||
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
# The font size ('10pt', '11pt' or '12pt').
|
#
|
||||||
#
|
# 'pointsize': '10pt',
|
||||||
# 'pointsize': '10pt',
|
# Additional stuff for the LaTeX preamble.
|
||||||
|
#
|
||||||
# Additional stuff for the LaTeX preamble.
|
# 'preamble': '',
|
||||||
#
|
# Latex figure (float) alignment
|
||||||
# 'preamble': '',
|
#
|
||||||
|
# 'figure_align': 'htbp',
|
||||||
# Latex figure (float) alignment
|
|
||||||
#
|
|
||||||
# 'figure_align': 'htbp',
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title,
|
# (source start file, target name, title,
|
||||||
# author, documentclass [howto, manual, or own class]).
|
# author, documentclass [howto, manual, or own class]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
(master_doc, 'Flatisfy.tex', u'Flatisfy Documentation',
|
(
|
||||||
u'Phyks (Lucas Verney)', 'manual'),
|
master_doc,
|
||||||
|
"Flatisfy.tex",
|
||||||
|
u"Flatisfy Documentation",
|
||||||
|
u"Phyks (Lucas Verney)",
|
||||||
|
"manual",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
@ -305,10 +308,7 @@ latex_documents = [
|
|||||||
|
|
||||||
# One entry per manual page. List of tuples
|
# One entry per manual page. List of tuples
|
||||||
# (source start file, name, description, authors, manual section).
|
# (source start file, name, description, authors, manual section).
|
||||||
man_pages = [
|
man_pages = [(master_doc, "flatisfy", u"Flatisfy Documentation", [author], 1)]
|
||||||
(master_doc, 'flatisfy', u'Flatisfy Documentation',
|
|
||||||
[author], 1)
|
|
||||||
]
|
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
#
|
#
|
||||||
@ -321,9 +321,15 @@ man_pages = [
|
|||||||
# (source start file, target name, title, author,
|
# (source start file, target name, title, author,
|
||||||
# dir menu entry, description, category)
|
# dir menu entry, description, category)
|
||||||
texinfo_documents = [
|
texinfo_documents = [
|
||||||
(master_doc, 'Flatisfy', u'Flatisfy Documentation',
|
(
|
||||||
author, 'Flatisfy', 'One line description of project.',
|
master_doc,
|
||||||
'Miscellaneous'),
|
"Flatisfy",
|
||||||
|
u"Flatisfy Documentation",
|
||||||
|
author,
|
||||||
|
"Flatisfy",
|
||||||
|
"One line description of project.",
|
||||||
|
"Miscellaneous",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
FROM python:2
|
FROM python:3
|
||||||
MAINTAINER Phyks <phyks@phyks.me>
|
MAINTAINER Phyks <phyks@phyks.me>
|
||||||
|
|
||||||
# Setup layout.
|
# Setup layout.
|
||||||
@ -12,39 +12,27 @@ RUN apt-get update && \
|
|||||||
|
|
||||||
# Install latest pip and python dependencies.
|
# Install latest pip and python dependencies.
|
||||||
RUN pip install -U setuptools && \
|
RUN pip install -U setuptools && \
|
||||||
pip install html2text simplejson BeautifulSoup
|
pip install html2text simplejson beautifulsoup4
|
||||||
|
|
||||||
# Install node.js.
|
# Install node.js.
|
||||||
RUN curl -sL https://deb.nodesource.com/setup_8.x | bash - \
|
RUN curl -sL https://deb.nodesource.com/setup_10.x | bash - \
|
||||||
&& apt-get install -y nodejs
|
&& apt-get install -y nodejs
|
||||||
|
|
||||||
# Install weboob's code itself.
|
|
||||||
RUN git clone --depth 1 https://git.weboob.org/weboob/devel /home/user/weboob \
|
|
||||||
&& cd /home/user/weboob \
|
|
||||||
&& pip install .
|
|
||||||
|
|
||||||
RUN mkdir -p /flatisfy/data
|
RUN mkdir -p /flatisfy/data
|
||||||
VOLUME /flatisfy
|
VOLUME /flatisfy
|
||||||
|
|
||||||
# Install Flatisfy.
|
COPY ./*.sh /home/user/
|
||||||
|
|
||||||
|
# Install Flatisfy, set up directories and permissions.
|
||||||
RUN cd /home/user \
|
RUN cd /home/user \
|
||||||
&& git clone https://git.phyks.me/Phyks/flatisfy/ ./app \
|
&& git clone https://framagit.org/phyks/Flatisfy.git/ ./app \
|
||||||
&& cd ./app \
|
&& cd ./app \
|
||||||
&& pip install -r requirements.txt \
|
&& pip install -r requirements.txt \
|
||||||
&& npm install \
|
&& npm install \
|
||||||
&& npm run build:prod
|
&& npm run build:dev \
|
||||||
|
&& mkdir -p /home/user/.local/share/flatisfy \
|
||||||
RUN chown user:user -R /home/user
|
&& chown user:user -R /home/user \
|
||||||
RUN mkdir -p /home/user/.local/share/flatisfy
|
&& chmod +x /home/user/*.sh
|
||||||
|
|
||||||
COPY ./run.sh /home/user/run.sh
|
|
||||||
RUN chmod +x /home/user/run.sh
|
|
||||||
|
|
||||||
COPY ./entrypoint.sh /home/user/entrypoint.sh
|
|
||||||
RUN chmod +x /home/user/entrypoint.sh
|
|
||||||
|
|
||||||
COPY ./update_weboob.sh /home/user/update_weboob.sh
|
|
||||||
RUN chmod +x /home/user/update_weboob.sh
|
|
||||||
|
|
||||||
# Run server.
|
# Run server.
|
||||||
EXPOSE 8080
|
EXPOSE 8080
|
||||||
|
12
docker/docker-compose.yml
Normal file
12
docker/docker-compose.yml
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
version: "3"
|
||||||
|
services:
|
||||||
|
app:
|
||||||
|
build: .
|
||||||
|
# image: phyks/flatisfy
|
||||||
|
environment:
|
||||||
|
- LOCAL_USER_ID=1000
|
||||||
|
volumes:
|
||||||
|
- ./data:/flatisfy
|
||||||
|
ports:
|
||||||
|
- "8080:8080"
|
||||||
|
working_dir: /home/user/app
|
@ -14,7 +14,4 @@ echo "[ENTRYPOINT] Setting fake values for git config..."
|
|||||||
git config --global user.email flatisfy@example.com
|
git config --global user.email flatisfy@example.com
|
||||||
git config --global user.name "Flatisfy Root"
|
git config --global user.name "Flatisfy Root"
|
||||||
|
|
||||||
echo "Update Weboob..."
|
|
||||||
/home/user/update_weboob.sh
|
|
||||||
|
|
||||||
exec su user -c "$@"
|
exec su user -c "$@"
|
||||||
|
6
docker/fetch.sh
Normal file
6
docker/fetch.sh
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
echo "Fetching new housing posts..."
|
||||||
|
cd /home/user/app
|
||||||
|
python -m flatisfy import -v --config /flatisfy/config.json
|
@ -1,7 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
echo "Updating Weboob..."
|
|
||||||
cd /home/user/weboob
|
|
||||||
git pull
|
|
||||||
pip install --upgrade .
|
|
@ -17,6 +17,7 @@ from flatisfy import data
|
|||||||
from flatisfy import fetch
|
from flatisfy import fetch
|
||||||
from flatisfy import tools
|
from flatisfy import tools
|
||||||
from flatisfy import tests
|
from flatisfy import tests
|
||||||
|
|
||||||
# pylint: enable=locally-disabled,wrong-import-position
|
# pylint: enable=locally-disabled,wrong-import-position
|
||||||
|
|
||||||
|
|
||||||
@ -27,68 +28,47 @@ def parse_args(argv=None):
|
|||||||
"""
|
"""
|
||||||
Create parser and parse arguments.
|
Create parser and parse arguments.
|
||||||
"""
|
"""
|
||||||
parser = argparse.ArgumentParser(prog="Flatisfy",
|
parser = argparse.ArgumentParser(prog="Flatisfy", description="Find the perfect flat.")
|
||||||
description="Find the perfect flat.")
|
|
||||||
|
|
||||||
# Parent parser containing arguments common to any subcommand
|
# Parent parser containing arguments common to any subcommand
|
||||||
parent_parser = argparse.ArgumentParser(add_help=False)
|
parent_parser = argparse.ArgumentParser(add_help=False)
|
||||||
|
parent_parser.add_argument("--data-dir", help="Location of Flatisfy data directory.")
|
||||||
|
parent_parser.add_argument("--config", help="Configuration file to use.")
|
||||||
parent_parser.add_argument(
|
parent_parser.add_argument(
|
||||||
"--data-dir",
|
"--passes",
|
||||||
help="Location of Flatisfy data directory."
|
choices=[0, 1, 2, 3],
|
||||||
|
type=int,
|
||||||
|
help="Number of passes to do on the filtered data.",
|
||||||
)
|
)
|
||||||
|
parent_parser.add_argument("--max-entries", type=int, help="Maximum number of entries to fetch.")
|
||||||
|
parent_parser.add_argument("-v", "--verbose", action="store_true", help="Verbose logging output.")
|
||||||
|
parent_parser.add_argument("-vv", action="store_true", help="Debug logging output.")
|
||||||
parent_parser.add_argument(
|
parent_parser.add_argument(
|
||||||
"--config",
|
"--constraints",
|
||||||
help="Configuration file to use."
|
type=str,
|
||||||
)
|
help="Comma-separated list of constraints to consider.",
|
||||||
parent_parser.add_argument(
|
|
||||||
"--passes", choices=[0, 1, 2, 3], type=int,
|
|
||||||
help="Number of passes to do on the filtered data."
|
|
||||||
)
|
|
||||||
parent_parser.add_argument(
|
|
||||||
"--max-entries", type=int,
|
|
||||||
help="Maximum number of entries to fetch."
|
|
||||||
)
|
|
||||||
parent_parser.add_argument(
|
|
||||||
"-v", "--verbose", action="store_true",
|
|
||||||
help="Verbose logging output."
|
|
||||||
)
|
|
||||||
parent_parser.add_argument(
|
|
||||||
"-vv", action="store_true",
|
|
||||||
help="Debug logging output."
|
|
||||||
)
|
|
||||||
parent_parser.add_argument(
|
|
||||||
"--constraints", type=str,
|
|
||||||
help="Comma-separated list of constraints to consider."
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Subcommands
|
# Subcommands
|
||||||
subparsers = parser.add_subparsers(
|
subparsers = parser.add_subparsers(dest="cmd", help="Available subcommands")
|
||||||
dest="cmd", help="Available subcommands"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Build data subcommand
|
# Build data subcommand
|
||||||
subparsers.add_parser(
|
subparsers.add_parser("build-data", parents=[parent_parser], help="Build necessary data")
|
||||||
"build-data", parents=[parent_parser],
|
|
||||||
help="Build necessary data"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Init config subcommand
|
# Init config subcommand
|
||||||
parser_init_config = subparsers.add_parser(
|
parser_init_config = subparsers.add_parser(
|
||||||
"init-config", parents=[parent_parser],
|
"init-config", parents=[parent_parser], help="Initialize empty configuration."
|
||||||
help="Initialize empty configuration."
|
|
||||||
)
|
|
||||||
parser_init_config.add_argument(
|
|
||||||
"output", nargs="?", help="Output config file. Use '-' for stdout."
|
|
||||||
)
|
)
|
||||||
|
parser_init_config.add_argument("output", nargs="?", help="Output config file. Use '-' for stdout.")
|
||||||
|
|
||||||
# Fetch subcommand parser
|
# Fetch subcommand parser
|
||||||
subparsers.add_parser("fetch", parents=[parent_parser],
|
subparsers.add_parser("fetch", parents=[parent_parser], help="Fetch housings posts")
|
||||||
help="Fetch housings posts")
|
|
||||||
|
|
||||||
# Filter subcommand parser
|
# Filter subcommand parser
|
||||||
parser_filter = subparsers.add_parser(
|
parser_filter = subparsers.add_parser(
|
||||||
"filter", parents=[parent_parser],
|
"filter",
|
||||||
help="Filter housings posts according to constraints in config."
|
parents=[parent_parser],
|
||||||
|
help="Filter housings posts according to constraints in config.",
|
||||||
)
|
)
|
||||||
parser_filter.add_argument(
|
parser_filter.add_argument(
|
||||||
"--input",
|
"--input",
|
||||||
@ -97,28 +77,29 @@ def parse_args(argv=None):
|
|||||||
"no additional fetching of infos is done, and the script outputs "
|
"no additional fetching of infos is done, and the script outputs "
|
||||||
"a filtered JSON dump on stdout. If not provided, update status "
|
"a filtered JSON dump on stdout. If not provided, update status "
|
||||||
"of the flats in the database."
|
"of the flats in the database."
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Import subcommand parser
|
# Import subcommand parser
|
||||||
subparsers.add_parser("import", parents=[parent_parser],
|
import_filter = subparsers.add_parser("import", parents=[parent_parser], help="Import housing posts in database.")
|
||||||
help="Import housing posts in database.")
|
import_filter.add_argument(
|
||||||
|
"--new-only",
|
||||||
|
action="store_true",
|
||||||
|
help=("Download new housing posts only but do not refresh existing ones"),
|
||||||
|
)
|
||||||
|
|
||||||
# Purge subcommand parser
|
# Purge subcommand parser
|
||||||
subparsers.add_parser("purge", parents=[parent_parser],
|
subparsers.add_parser("purge", parents=[parent_parser], help="Purge database.")
|
||||||
help="Purge database.")
|
|
||||||
|
|
||||||
# Serve subcommand parser
|
# Serve subcommand parser
|
||||||
parser_serve = subparsers.add_parser("serve", parents=[parent_parser],
|
parser_serve = subparsers.add_parser("serve", parents=[parent_parser], help="Serve the web app.")
|
||||||
help="Serve the web app.")
|
|
||||||
parser_serve.add_argument("--port", type=int, help="Port to bind to.")
|
parser_serve.add_argument("--port", type=int, help="Port to bind to.")
|
||||||
parser_serve.add_argument("--host", help="Host to listen on.")
|
parser_serve.add_argument("--host", help="Host to listen on.")
|
||||||
|
|
||||||
# Test subcommand parser
|
# Test subcommand parser
|
||||||
subparsers.add_parser("test", parents=[parent_parser],
|
subparsers.add_parser("test", parents=[parent_parser], help="Unit testing.")
|
||||||
help="Unit testing.")
|
|
||||||
|
|
||||||
return parser.parse_args(argv)
|
return parser, parser.parse_args(argv)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@ -127,25 +108,30 @@ def main():
|
|||||||
"""
|
"""
|
||||||
# pylint: disable=locally-disabled,too-many-branches
|
# pylint: disable=locally-disabled,too-many-branches
|
||||||
# Parse arguments
|
# Parse arguments
|
||||||
args = parse_args()
|
parser, args = parse_args()
|
||||||
|
|
||||||
# Set logger
|
# Set logger
|
||||||
if args.vv:
|
if getattr(args, 'vv', False):
|
||||||
logging.getLogger('').setLevel(logging.DEBUG)
|
logging.getLogger("").setLevel(logging.DEBUG)
|
||||||
logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG)
|
logging.getLogger("titlecase").setLevel(logging.INFO)
|
||||||
elif args.verbose:
|
logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
|
||||||
logging.getLogger('').setLevel(logging.INFO)
|
elif getattr(args, 'verbose', False):
|
||||||
|
logging.getLogger("").setLevel(logging.INFO)
|
||||||
# sqlalchemy INFO level is way too loud, just stick with WARNING
|
# sqlalchemy INFO level is way too loud, just stick with WARNING
|
||||||
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING)
|
logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
|
||||||
else:
|
else:
|
||||||
logging.getLogger('').setLevel(logging.WARNING)
|
logging.getLogger("").setLevel(logging.WARNING)
|
||||||
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING)
|
logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
|
||||||
|
|
||||||
# Init-config command
|
# Init-config command
|
||||||
if args.cmd == "init-config":
|
if args.cmd == "init-config":
|
||||||
flatisfy.config.init_config(args.output)
|
flatisfy.config.init_config(args.output)
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
else:
|
else:
|
||||||
|
if not args.cmd:
|
||||||
|
parser.print_help()
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
# Load config
|
# Load config
|
||||||
if args.cmd == "build-data":
|
if args.cmd == "build-data":
|
||||||
# Data not yet built, do not use it in config checks
|
# Data not yet built, do not use it in config checks
|
||||||
@ -153,9 +139,11 @@ def main():
|
|||||||
else:
|
else:
|
||||||
config = flatisfy.config.load_config(args, check_with_data=True)
|
config = flatisfy.config.load_config(args, check_with_data=True)
|
||||||
if config is None:
|
if config is None:
|
||||||
LOGGER.error("Invalid configuration. Exiting. "
|
LOGGER.error(
|
||||||
"Run init-config before if this is the first time "
|
"Invalid configuration. Exiting. "
|
||||||
"you run Flatisfy.")
|
"Run init-config before if this is the first time "
|
||||||
|
"you run Flatisfy."
|
||||||
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Purge command
|
# Purge command
|
||||||
@ -171,18 +159,11 @@ def main():
|
|||||||
if args.cmd == "fetch":
|
if args.cmd == "fetch":
|
||||||
# Fetch and filter flats list
|
# Fetch and filter flats list
|
||||||
fetched_flats = fetch.fetch_flats(config)
|
fetched_flats = fetch.fetch_flats(config)
|
||||||
fetched_flats = cmds.filter_fetched_flats(config,
|
fetched_flats = cmds.filter_fetched_flats(config, fetched_flats=fetched_flats, fetch_details=True)
|
||||||
fetched_flats=fetched_flats,
|
|
||||||
fetch_details=True)
|
|
||||||
# Sort by cost
|
# Sort by cost
|
||||||
fetched_flats = {
|
fetched_flats = {k: tools.sort_list_of_dicts_by(v["new"], "cost") for k, v in fetched_flats.items()}
|
||||||
k: tools.sort_list_of_dicts_by(v["new"], "cost")
|
|
||||||
for k, v in fetched_flats.items()
|
|
||||||
}
|
|
||||||
|
|
||||||
print(
|
print(tools.pretty_json(fetched_flats))
|
||||||
tools.pretty_json(fetched_flats)
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
# Filter command
|
# Filter command
|
||||||
elif args.cmd == "filter":
|
elif args.cmd == "filter":
|
||||||
@ -190,28 +171,19 @@ def main():
|
|||||||
if args.input:
|
if args.input:
|
||||||
fetched_flats = fetch.load_flats_from_file(args.input, config)
|
fetched_flats = fetch.load_flats_from_file(args.input, config)
|
||||||
|
|
||||||
fetched_flats = cmds.filter_fetched_flats(
|
fetched_flats = cmds.filter_fetched_flats(config, fetched_flats=fetched_flats, fetch_details=False)
|
||||||
config,
|
|
||||||
fetched_flats=fetched_flats,
|
|
||||||
fetch_details=False
|
|
||||||
)
|
|
||||||
|
|
||||||
# Sort by cost
|
# Sort by cost
|
||||||
fetched_flats = {
|
fetched_flats = {k: tools.sort_list_of_dicts_by(v["new"], "cost") for k, v in fetched_flats.items()}
|
||||||
k: tools.sort_list_of_dicts_by(v["new"], "cost")
|
|
||||||
for k, v in fetched_flats.items()
|
|
||||||
}
|
|
||||||
|
|
||||||
# Output to stdout
|
# Output to stdout
|
||||||
print(
|
print(tools.pretty_json(fetched_flats))
|
||||||
tools.pretty_json(fetched_flats)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
cmds.import_and_filter(config, load_from_db=True)
|
cmds.import_and_filter(config, load_from_db=True)
|
||||||
return
|
return
|
||||||
# Import command
|
# Import command
|
||||||
elif args.cmd == "import":
|
elif args.cmd == "import":
|
||||||
cmds.import_and_filter(config, load_from_db=False)
|
cmds.import_and_filter(config, load_from_db=False, new_only=args.new_only)
|
||||||
return
|
return
|
||||||
# Serve command
|
# Serve command
|
||||||
elif args.cmd == "serve":
|
elif args.cmd == "serve":
|
||||||
|
107
flatisfy/cmds.py
107
flatisfy/cmds.py
@ -6,6 +6,7 @@ from __future__ import absolute_import, print_function, unicode_literals
|
|||||||
|
|
||||||
import collections
|
import collections
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
import flatisfy.filters
|
import flatisfy.filters
|
||||||
from flatisfy import database
|
from flatisfy import database
|
||||||
@ -17,21 +18,23 @@ from flatisfy import fetch
|
|||||||
from flatisfy import tools
|
from flatisfy import tools
|
||||||
from flatisfy.filters import metadata
|
from flatisfy.filters import metadata
|
||||||
from flatisfy.web import app as web_app
|
from flatisfy.web import app as web_app
|
||||||
|
import time
|
||||||
|
from ratelimit.exception import RateLimitException
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def filter_flats_list(config, constraint_name, flats_list, fetch_details=True):
|
def filter_flats_list(config, constraint_name, flats_list, fetch_details=True, past_flats=None):
|
||||||
"""
|
"""
|
||||||
Filter the available flats list. Then, filter it according to criteria.
|
Filter the available flats list. Then, filter it according to criteria.
|
||||||
|
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
:param constraint_name: The constraint name that the ``flats_list`` should
|
:param constraint_name: The constraint name that the ``flats_list`` should
|
||||||
satisfy.
|
satisfy.
|
||||||
|
:param flats_list: The initial list of flat objects to filter.
|
||||||
:param fetch_details: Whether additional details should be fetched between
|
:param fetch_details: Whether additional details should be fetched between
|
||||||
the two passes.
|
the two passes.
|
||||||
:param flats_list: The initial list of flat objects to filter.
|
:param past_flats: The list of already fetched flats
|
||||||
:return: A dict mapping flat status and list of flat objects.
|
:return: A dict mapping flat status and list of flat objects.
|
||||||
"""
|
"""
|
||||||
# Add the flatisfy metadata entry and prepare the flat objects
|
# Add the flatisfy metadata entry and prepare the flat objects
|
||||||
@ -43,13 +46,9 @@ def filter_flats_list(config, constraint_name, flats_list, fetch_details=True):
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
LOGGER.error(
|
LOGGER.error(
|
||||||
"Missing constraint %s. Skipping filtering for these posts.",
|
"Missing constraint %s. Skipping filtering for these posts.",
|
||||||
constraint_name
|
constraint_name,
|
||||||
)
|
)
|
||||||
return {
|
return {"new": [], "duplicate": [], "ignored": []}
|
||||||
"new": [],
|
|
||||||
"duplicate": [],
|
|
||||||
"ignored": []
|
|
||||||
}
|
|
||||||
|
|
||||||
first_pass_result = collections.defaultdict(list)
|
first_pass_result = collections.defaultdict(list)
|
||||||
second_pass_result = collections.defaultdict(list)
|
second_pass_result = collections.defaultdict(list)
|
||||||
@ -57,52 +56,55 @@ def filter_flats_list(config, constraint_name, flats_list, fetch_details=True):
|
|||||||
# Do a first pass with the available infos to try to remove as much
|
# Do a first pass with the available infos to try to remove as much
|
||||||
# unwanted postings as possible
|
# unwanted postings as possible
|
||||||
if config["passes"] > 0:
|
if config["passes"] > 0:
|
||||||
first_pass_result = flatisfy.filters.first_pass(flats_list,
|
first_pass_result = flatisfy.filters.first_pass(flats_list, constraint, config)
|
||||||
constraint,
|
|
||||||
config)
|
|
||||||
else:
|
else:
|
||||||
first_pass_result["new"] = flats_list
|
first_pass_result["new"] = flats_list
|
||||||
|
|
||||||
# Load additional infos
|
# Load additional infos
|
||||||
if fetch_details:
|
if fetch_details:
|
||||||
|
past_ids = {x["id"]: x for x in past_flats} if past_flats else {}
|
||||||
for i, flat in enumerate(first_pass_result["new"]):
|
for i, flat in enumerate(first_pass_result["new"]):
|
||||||
details = fetch.fetch_details(config, flat["id"])
|
details = None
|
||||||
|
|
||||||
|
use_cache = past_ids.get(flat["id"])
|
||||||
|
if use_cache:
|
||||||
|
LOGGER.debug("Skipping details download for %s.", flat["id"])
|
||||||
|
details = use_cache
|
||||||
|
else:
|
||||||
|
if flat["id"].split("@")[1] in ["seloger", "leboncoin"]:
|
||||||
|
try:
|
||||||
|
details = fetch.fetch_details_rate_limited(config, flat["id"])
|
||||||
|
except RateLimitException:
|
||||||
|
time.sleep(60)
|
||||||
|
details = fetch.fetch_details_rate_limited(config, flat["id"])
|
||||||
|
else:
|
||||||
|
details = fetch.fetch_details(config, flat["id"])
|
||||||
|
|
||||||
first_pass_result["new"][i] = tools.merge_dicts(flat, details)
|
first_pass_result["new"][i] = tools.merge_dicts(flat, details)
|
||||||
|
|
||||||
# Do a second pass to consolidate all the infos we found and make use of
|
# Do a second pass to consolidate all the infos we found and make use of
|
||||||
# additional infos
|
# additional infos
|
||||||
if config["passes"] > 1:
|
if config["passes"] > 1:
|
||||||
second_pass_result = flatisfy.filters.second_pass(
|
second_pass_result = flatisfy.filters.second_pass(first_pass_result["new"], constraint, config)
|
||||||
first_pass_result["new"], constraint, config
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
second_pass_result["new"] = first_pass_result["new"]
|
second_pass_result["new"] = first_pass_result["new"]
|
||||||
|
|
||||||
# Do a third pass to deduplicate better
|
# Do a third pass to deduplicate better
|
||||||
if config["passes"] > 2:
|
if config["passes"] > 2:
|
||||||
third_pass_result = flatisfy.filters.third_pass(
|
third_pass_result = flatisfy.filters.third_pass(second_pass_result["new"], config)
|
||||||
second_pass_result["new"],
|
|
||||||
config
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
third_pass_result["new"] = second_pass_result["new"]
|
third_pass_result["new"] = second_pass_result["new"]
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"new": third_pass_result["new"],
|
"new": third_pass_result["new"],
|
||||||
"duplicate": (
|
"duplicate": (
|
||||||
first_pass_result["duplicate"] +
|
first_pass_result["duplicate"] + second_pass_result["duplicate"] + third_pass_result["duplicate"]
|
||||||
second_pass_result["duplicate"] +
|
|
||||||
third_pass_result["duplicate"]
|
|
||||||
),
|
),
|
||||||
"ignored": (
|
"ignored": (first_pass_result["ignored"] + second_pass_result["ignored"] + third_pass_result["ignored"]),
|
||||||
first_pass_result["ignored"] +
|
|
||||||
second_pass_result["ignored"] +
|
|
||||||
third_pass_result["ignored"]
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def filter_fetched_flats(config, fetched_flats, fetch_details=True):
|
def filter_fetched_flats(config, fetched_flats, fetch_details=True, past_flats={}):
|
||||||
"""
|
"""
|
||||||
Filter the available flats list. Then, filter it according to criteria.
|
Filter the available flats list. Then, filter it according to criteria.
|
||||||
|
|
||||||
@ -119,33 +121,40 @@ def filter_fetched_flats(config, fetched_flats, fetch_details=True):
|
|||||||
config,
|
config,
|
||||||
constraint_name,
|
constraint_name,
|
||||||
flats_list,
|
flats_list,
|
||||||
fetch_details
|
fetch_details,
|
||||||
|
past_flats.get(constraint_name, None),
|
||||||
)
|
)
|
||||||
return fetched_flats
|
return fetched_flats
|
||||||
|
|
||||||
|
|
||||||
def import_and_filter(config, load_from_db=False):
|
def import_and_filter(config, load_from_db=False, new_only=False):
|
||||||
"""
|
"""
|
||||||
Fetch the available flats list. Then, filter it according to criteria.
|
Fetch the available flats list. Then, filter it according to criteria.
|
||||||
Finally, store it in the database.
|
Finally, store it in the database.
|
||||||
|
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
:param load_from_db: Whether to load flats from database or fetch them
|
:param load_from_db: Whether to load flats from database or fetch them
|
||||||
using Weboob.
|
using Woob.
|
||||||
:return: ``None``.
|
:return: ``None``.
|
||||||
"""
|
"""
|
||||||
# Fetch and filter flats list
|
# Fetch and filter flats list
|
||||||
|
past_flats = fetch.load_flats_from_db(config)
|
||||||
if load_from_db:
|
if load_from_db:
|
||||||
fetched_flats = fetch.load_flats_from_db(config)
|
fetched_flats = past_flats
|
||||||
else:
|
else:
|
||||||
fetched_flats = fetch.fetch_flats(config)
|
fetched_flats = fetch.fetch_flats(config)
|
||||||
# Do not fetch additional details if we loaded data from the db.
|
# Do not fetch additional details if we loaded data from the db.
|
||||||
flats_by_status = filter_fetched_flats(config, fetched_flats=fetched_flats,
|
flats_by_status = filter_fetched_flats(
|
||||||
fetch_details=(not load_from_db))
|
config,
|
||||||
|
fetched_flats=fetched_flats,
|
||||||
|
fetch_details=(not load_from_db),
|
||||||
|
past_flats=past_flats if new_only else {},
|
||||||
|
)
|
||||||
# Create database connection
|
# Create database connection
|
||||||
get_session = database.init_db(config["database"], config["search_index"])
|
get_session = database.init_db(config["database"], config["search_index"])
|
||||||
|
|
||||||
new_flats = []
|
new_flats = []
|
||||||
|
result = []
|
||||||
|
|
||||||
LOGGER.info("Merging fetched flats in database...")
|
LOGGER.info("Merging fetched flats in database...")
|
||||||
# Flatten the flats_by_status dict
|
# Flatten the flats_by_status dict
|
||||||
@ -155,12 +164,14 @@ def import_and_filter(config, load_from_db=False):
|
|||||||
flatten_flats_by_status[status].extend(flats_list)
|
flatten_flats_by_status[status].extend(flats_list)
|
||||||
|
|
||||||
with get_session() as session:
|
with get_session() as session:
|
||||||
|
# Set is_expired to true for all existing flats.
|
||||||
|
# This will be set back to false if we find them during importing.
|
||||||
|
for flat in session.query(flat_model.Flat).all():
|
||||||
|
flat.is_expired = True
|
||||||
|
|
||||||
for status, flats_list in flatten_flats_by_status.items():
|
for status, flats_list in flatten_flats_by_status.items():
|
||||||
# Build SQLAlchemy Flat model objects for every available flat
|
# Build SQLAlchemy Flat model objects for every available flat
|
||||||
flats_objects = {
|
flats_objects = {flat_dict["id"]: flat_model.Flat.from_dict(flat_dict) for flat_dict in flats_list}
|
||||||
flat_dict["id"]: flat_model.Flat.from_dict(flat_dict)
|
|
||||||
for flat_dict in flats_list
|
|
||||||
}
|
|
||||||
|
|
||||||
if flats_objects:
|
if flats_objects:
|
||||||
# If there are some flats, try to merge them with the ones in
|
# If there are some flats, try to merge them with the ones in
|
||||||
@ -173,11 +184,13 @@ def import_and_filter(config, load_from_db=False):
|
|||||||
# status if the user defined it
|
# status if the user defined it
|
||||||
flat_object = flats_objects[each.id]
|
flat_object = flats_objects[each.id]
|
||||||
if each.status in flat_model.AUTOMATED_STATUSES:
|
if each.status in flat_model.AUTOMATED_STATUSES:
|
||||||
flat_object.status = getattr(
|
flat_object.status = getattr(flat_model.FlatStatus, status)
|
||||||
flat_model.FlatStatus, status
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
flat_object.status = each.status
|
flat_object.status = each.status
|
||||||
|
|
||||||
|
# Every flat we fetched isn't expired
|
||||||
|
flat_object.is_expired = False
|
||||||
|
|
||||||
# For each flat already in the db, merge it (UPDATE)
|
# For each flat already in the db, merge it (UPDATE)
|
||||||
# instead of adding it
|
# instead of adding it
|
||||||
session.merge(flats_objects.pop(each.id))
|
session.merge(flats_objects.pop(each.id))
|
||||||
@ -188,13 +201,22 @@ def import_and_filter(config, load_from_db=False):
|
|||||||
flat.status = getattr(flat_model.FlatStatus, status)
|
flat.status = getattr(flat_model.FlatStatus, status)
|
||||||
if flat.status == flat_model.FlatStatus.new:
|
if flat.status == flat_model.FlatStatus.new:
|
||||||
new_flats.append(flat)
|
new_flats.append(flat)
|
||||||
|
result.append(flat.id)
|
||||||
|
|
||||||
session.add_all(flats_objects.values())
|
session.add_all(flats_objects.values())
|
||||||
|
|
||||||
if config["send_email"]:
|
if config["send_email"]:
|
||||||
email.send_notification(config, new_flats)
|
email.send_notification(config, new_flats)
|
||||||
|
|
||||||
|
LOGGER.info(f"Found {len(result)} new flats.")
|
||||||
|
|
||||||
|
# Touch a file to indicate last update timestamp
|
||||||
|
ts_file = os.path.join(config["data_directory"], "timestamp")
|
||||||
|
with open(ts_file, "w"):
|
||||||
|
os.utime(ts_file, None)
|
||||||
|
|
||||||
LOGGER.info("Done!")
|
LOGGER.info("Done!")
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def purge_db(config):
|
def purge_db(config):
|
||||||
@ -234,4 +256,5 @@ def serve(config):
|
|||||||
# standard logging
|
# standard logging
|
||||||
server = web_app.QuietWSGIRefServer
|
server = web_app.QuietWSGIRefServer
|
||||||
|
|
||||||
|
print("Launching web viewer running on http://%s:%s" % (config["host"], config["port"]))
|
||||||
app.run(host=config["host"], port=config["port"], server=server)
|
app.run(host=config["host"], port=config["port"], server=server)
|
||||||
|
@ -15,13 +15,17 @@ import sys
|
|||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
import appdirs
|
import appdirs
|
||||||
from weboob.capabilities.housing import POSTS_TYPES, HOUSE_TYPES
|
from woob.capabilities.housing import POSTS_TYPES, HOUSE_TYPES
|
||||||
|
|
||||||
from flatisfy import data
|
from flatisfy import data
|
||||||
from flatisfy import tools
|
from flatisfy import tools
|
||||||
|
from flatisfy.constants import TimeToModes
|
||||||
from flatisfy.models.postal_code import PostalCode
|
from flatisfy.models.postal_code import PostalCode
|
||||||
|
|
||||||
|
|
||||||
|
DIRPATH = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
|
|
||||||
# Default configuration
|
# Default configuration
|
||||||
DEFAULT_CONFIG = {
|
DEFAULT_CONFIG = {
|
||||||
# Constraints to match
|
# Constraints to match
|
||||||
@ -29,17 +33,26 @@ DEFAULT_CONFIG = {
|
|||||||
"default": {
|
"default": {
|
||||||
"type": None, # RENT, SALE, SHARING
|
"type": None, # RENT, SALE, SHARING
|
||||||
"house_types": [], # List of house types, must be in APART, HOUSE,
|
"house_types": [], # List of house types, must be in APART, HOUSE,
|
||||||
# PARKING, LAND, OTHER or UNKNOWN
|
# PARKING, LAND, OTHER or UNKNOWN
|
||||||
"postal_codes": [], # List of postal codes
|
"postal_codes": [], # List of postal codes
|
||||||
|
"insees": [], # List of postal codes
|
||||||
"area": (None, None), # (min, max) in m^2
|
"area": (None, None), # (min, max) in m^2
|
||||||
"cost": (None, None), # (min, max) in currency unit
|
"cost": (None, None), # (min, max) in currency unit
|
||||||
"rooms": (None, None), # (min, max)
|
"rooms": (None, None), # (min, max)
|
||||||
"bedrooms": (None, None), # (min, max)
|
"bedrooms": (None, None), # (min, max)
|
||||||
"minimum_nb_photos": None, # min number of photos
|
"minimum_nb_photos": None, # min number of photos
|
||||||
"description_should_contain": [], # list of terms
|
"description_should_contain": [], # list of terms (str) or list
|
||||||
|
# (acting as an or)
|
||||||
|
"description_should_not_contain": [
|
||||||
|
"vendu",
|
||||||
|
"Vendu",
|
||||||
|
"VENDU",
|
||||||
|
"recherche",
|
||||||
|
],
|
||||||
"time_to": {} # Dict mapping names to {"gps": [lat, lng],
|
"time_to": {} # Dict mapping names to {"gps": [lat, lng],
|
||||||
# "time": (min, max) }
|
# "time": (min, max),
|
||||||
# Time is in seconds
|
# "mode": Valid mode }
|
||||||
|
# Time is in seconds
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
# Whether or not to store personal data from housing posts (phone number
|
# Whether or not to store personal data from housing posts (phone number
|
||||||
@ -56,16 +69,16 @@ DEFAULT_CONFIG = {
|
|||||||
"serve_images_locally": True,
|
"serve_images_locally": True,
|
||||||
# Navitia API key
|
# Navitia API key
|
||||||
"navitia_api_key": None,
|
"navitia_api_key": None,
|
||||||
|
# Mapbox API key
|
||||||
|
"mapbox_api_key": None,
|
||||||
# Number of filtering passes to run
|
# Number of filtering passes to run
|
||||||
"passes": 3,
|
"passes": 3,
|
||||||
# Maximum number of entries to fetch
|
# Maximum number of entries to fetch
|
||||||
"max_entries": None,
|
"max_entries": None,
|
||||||
# Directory in wich data will be put. ``None`` is XDG default location.
|
# Directory in wich data will be put. ``None`` is XDG default location.
|
||||||
"data_directory": None,
|
"data_directory": None,
|
||||||
# Path to the modules directory containing all Weboob modules. ``None`` if
|
# Path to the modules directory containing all Woob modules.
|
||||||
# ``weboob_modules`` package is pip-installed, and you want to use
|
"modules_path": os.path.join(DIRPATH, '..', 'modules'),
|
||||||
# ``pkgresource`` to automatically find it.
|
|
||||||
"modules_path": None,
|
|
||||||
# SQLAlchemy URI to the database to use
|
# SQLAlchemy URI to the database to use
|
||||||
"database": None,
|
"database": None,
|
||||||
# Path to the Whoosh search index file. Use ``None`` to put it in
|
# Path to the Whoosh search index file. Use ``None`` to put it in
|
||||||
@ -77,17 +90,21 @@ DEFAULT_CONFIG = {
|
|||||||
"host": "127.0.0.1",
|
"host": "127.0.0.1",
|
||||||
# Web server to use to serve the webapp (see Bottle deployment doc)
|
# Web server to use to serve the webapp (see Bottle deployment doc)
|
||||||
"webserver": None,
|
"webserver": None,
|
||||||
# List of Weboob backends to use (default to any backend available)
|
# List of Woob backends to use (default to any backend available)
|
||||||
"backends": None,
|
"backends": None,
|
||||||
# Should email notifications be sent?
|
# Should email notifications be sent?
|
||||||
"send_email": False,
|
"send_email": False,
|
||||||
"smtp_server": 'localhost',
|
"smtp_server": "localhost",
|
||||||
"smtp_port": 25,
|
"smtp_port": 25,
|
||||||
|
"smtp_username": None,
|
||||||
|
"smtp_password": None,
|
||||||
"smtp_from": "noreply@flatisfy.org",
|
"smtp_from": "noreply@flatisfy.org",
|
||||||
"smtp_to": [],
|
"smtp_to": [],
|
||||||
|
"notification_lang": "en",
|
||||||
# The web site url, to be used in email notifications. (doesn't matter
|
# The web site url, to be used in email notifications. (doesn't matter
|
||||||
# whether the trailing slash is present or not)
|
# whether the trailing slash is present or not)
|
||||||
"website_url": "http://127.0.0.1:8080"
|
"website_url": "http://127.0.0.1:8080",
|
||||||
|
"ignore_station": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
@ -102,20 +119,14 @@ def validate_config(config, check_with_data):
|
|||||||
check the config values.
|
check the config values.
|
||||||
:return: ``True`` if the configuration is valid, ``False`` otherwise.
|
:return: ``True`` if the configuration is valid, ``False`` otherwise.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _check_constraints_bounds(bounds):
|
def _check_constraints_bounds(bounds):
|
||||||
"""
|
"""
|
||||||
Check the bounds for numeric constraints.
|
Check the bounds for numeric constraints.
|
||||||
"""
|
"""
|
||||||
assert isinstance(bounds, list)
|
assert isinstance(bounds, list)
|
||||||
assert len(bounds) == 2
|
assert len(bounds) == 2
|
||||||
assert all(
|
assert all(x is None or (isinstance(x, (float, int)) and x >= 0) for x in bounds)
|
||||||
x is None or
|
|
||||||
(
|
|
||||||
isinstance(x, (float, int)) and
|
|
||||||
x >= 0
|
|
||||||
)
|
|
||||||
for x in bounds
|
|
||||||
)
|
|
||||||
if bounds[0] is not None and bounds[1] is not None:
|
if bounds[0] is not None and bounds[1] is not None:
|
||||||
assert bounds[1] > bounds[0]
|
assert bounds[1] > bounds[0]
|
||||||
|
|
||||||
@ -127,7 +138,9 @@ def validate_config(config, check_with_data):
|
|||||||
# pylint: disable=locally-disabled,line-too-long
|
# pylint: disable=locally-disabled,line-too-long
|
||||||
|
|
||||||
assert config["passes"] in [0, 1, 2, 3]
|
assert config["passes"] in [0, 1, 2, 3]
|
||||||
assert config["max_entries"] is None or (isinstance(config["max_entries"], int) and config["max_entries"] > 0) # noqa: E501
|
assert config["max_entries"] is None or (
|
||||||
|
isinstance(config["max_entries"], int) and config["max_entries"] > 0
|
||||||
|
) # noqa: E501
|
||||||
|
|
||||||
assert config["data_directory"] is None or isinstance(config["data_directory"], str) # noqa: E501
|
assert config["data_directory"] is None or isinstance(config["data_directory"], str) # noqa: E501
|
||||||
assert os.path.isdir(config["data_directory"])
|
assert os.path.isdir(config["data_directory"])
|
||||||
@ -144,19 +157,28 @@ def validate_config(config, check_with_data):
|
|||||||
assert isinstance(config["send_email"], bool)
|
assert isinstance(config["send_email"], bool)
|
||||||
assert config["smtp_server"] is None or isinstance(config["smtp_server"], str) # noqa: E501
|
assert config["smtp_server"] is None or isinstance(config["smtp_server"], str) # noqa: E501
|
||||||
assert config["smtp_port"] is None or isinstance(config["smtp_port"], int) # noqa: E501
|
assert config["smtp_port"] is None or isinstance(config["smtp_port"], int) # noqa: E501
|
||||||
|
assert config["smtp_username"] is None or isinstance(config["smtp_username"], str) # noqa: E501
|
||||||
|
assert config["smtp_password"] is None or isinstance(config["smtp_password"], str) # noqa: E501
|
||||||
assert config["smtp_to"] is None or isinstance(config["smtp_to"], list)
|
assert config["smtp_to"] is None or isinstance(config["smtp_to"], list)
|
||||||
|
assert config["notification_lang"] is None or isinstance(config["notification_lang"], str)
|
||||||
|
|
||||||
assert isinstance(config["store_personal_data"], bool)
|
assert isinstance(config["store_personal_data"], bool)
|
||||||
assert isinstance(config["max_distance_housing_station"], (int, float))
|
assert isinstance(config["max_distance_housing_station"], (int, float))
|
||||||
assert isinstance(config["duplicate_threshold"], int)
|
assert isinstance(config["duplicate_threshold"], int)
|
||||||
assert isinstance(config["duplicate_image_hash_threshold"], int)
|
assert isinstance(config["duplicate_image_hash_threshold"], int)
|
||||||
|
|
||||||
|
# API keys
|
||||||
|
assert config["navitia_api_key"] is None or isinstance(config["navitia_api_key"], str) # noqa: E501
|
||||||
|
assert config["mapbox_api_key"] is None or isinstance(config["mapbox_api_key"], str) # noqa: E501
|
||||||
|
|
||||||
|
assert config["ignore_station"] is None or isinstance(config["ignore_station"], bool) # noqa: E501
|
||||||
|
|
||||||
# Ensure constraints are ok
|
# Ensure constraints are ok
|
||||||
assert config["constraints"]
|
assert config["constraints"]
|
||||||
for constraint in config["constraints"].values():
|
for constraint in config["constraints"].values():
|
||||||
assert "type" in constraint
|
assert "type" in constraint
|
||||||
assert isinstance(constraint["type"], str)
|
assert isinstance(constraint["type"], str)
|
||||||
assert constraint["type"].upper() in POSTS_TYPES.keys
|
assert constraint["type"].upper() in POSTS_TYPES.__members__
|
||||||
|
|
||||||
assert "minimum_nb_photos" in constraint
|
assert "minimum_nb_photos" in constraint
|
||||||
if constraint["minimum_nb_photos"]:
|
if constraint["minimum_nb_photos"]:
|
||||||
@ -167,26 +189,42 @@ def validate_config(config, check_with_data):
|
|||||||
assert isinstance(constraint["description_should_contain"], list)
|
assert isinstance(constraint["description_should_contain"], list)
|
||||||
if constraint["description_should_contain"]:
|
if constraint["description_should_contain"]:
|
||||||
for term in constraint["description_should_contain"]:
|
for term in constraint["description_should_contain"]:
|
||||||
|
try:
|
||||||
|
assert isinstance(term, str)
|
||||||
|
except AssertionError:
|
||||||
|
assert isinstance(term, list)
|
||||||
|
assert all(isinstance(x, str) for x in term)
|
||||||
|
|
||||||
|
assert "description_should_not_contain" in constraint
|
||||||
|
assert isinstance(constraint["description_should_not_contain"], list)
|
||||||
|
if constraint["description_should_not_contain"]:
|
||||||
|
for term in constraint["description_should_not_contain"]:
|
||||||
assert isinstance(term, str)
|
assert isinstance(term, str)
|
||||||
|
|
||||||
assert "house_types" in constraint
|
assert "house_types" in constraint
|
||||||
assert constraint["house_types"]
|
assert constraint["house_types"]
|
||||||
for house_type in constraint["house_types"]:
|
for house_type in constraint["house_types"]:
|
||||||
assert house_type.upper() in HOUSE_TYPES.keys
|
assert house_type.upper() in HOUSE_TYPES.__members__
|
||||||
|
|
||||||
assert "postal_codes" in constraint
|
assert "postal_codes" in constraint
|
||||||
assert constraint["postal_codes"]
|
assert constraint["postal_codes"]
|
||||||
assert all(isinstance(x, str) for x in constraint["postal_codes"])
|
assert all(isinstance(x, str) for x in constraint["postal_codes"])
|
||||||
|
if "insee_codes" in constraint:
|
||||||
|
assert constraint["insee_codes"]
|
||||||
|
assert all(isinstance(x, str) for x in constraint["insee_codes"])
|
||||||
|
|
||||||
if check_with_data:
|
if check_with_data:
|
||||||
# Ensure data is built into db
|
# Ensure data is built into db
|
||||||
data.preprocess_data(config, force=False)
|
data.preprocess_data(config, force=False)
|
||||||
# Check postal codes
|
# Check postal codes
|
||||||
opendata_postal_codes = [
|
opendata = data.load_data(PostalCode, constraint, config)
|
||||||
x.postal_code
|
opendata_postal_codes = [x.postal_code for x in opendata]
|
||||||
for x in data.load_data(PostalCode, constraint, config)
|
opendata_insee_codes = [x.insee_code for x in opendata]
|
||||||
]
|
|
||||||
for postal_code in constraint["postal_codes"]:
|
for postal_code in constraint["postal_codes"]:
|
||||||
assert postal_code in opendata_postal_codes # noqa: E501
|
assert postal_code in opendata_postal_codes # noqa: E501
|
||||||
|
if "insee_codes" in constraint:
|
||||||
|
for insee in constraint["insee_codes"]:
|
||||||
|
assert insee in opendata_insee_codes # noqa: E501
|
||||||
|
|
||||||
assert "area" in constraint
|
assert "area" in constraint
|
||||||
_check_constraints_bounds(constraint["area"])
|
_check_constraints_bounds(constraint["area"])
|
||||||
@ -209,6 +247,8 @@ def validate_config(config, check_with_data):
|
|||||||
assert len(item["gps"]) == 2
|
assert len(item["gps"]) == 2
|
||||||
assert "time" in item
|
assert "time" in item
|
||||||
_check_constraints_bounds(item["time"])
|
_check_constraints_bounds(item["time"])
|
||||||
|
if "mode" in item:
|
||||||
|
TimeToModes[item["mode"]]
|
||||||
|
|
||||||
return True
|
return True
|
||||||
except (AssertionError, KeyError):
|
except (AssertionError, KeyError):
|
||||||
@ -237,22 +277,18 @@ def load_config(args=None, check_with_data=True):
|
|||||||
config_data.update(json.load(fh))
|
config_data.update(json.load(fh))
|
||||||
except (IOError, ValueError) as exc:
|
except (IOError, ValueError) as exc:
|
||||||
LOGGER.error(
|
LOGGER.error(
|
||||||
"Unable to load configuration from file, "
|
"Unable to load configuration from file, using default configuration: %s.",
|
||||||
"using default configuration: %s.",
|
exc,
|
||||||
exc
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Overload config with arguments
|
# Overload config with arguments
|
||||||
if args and getattr(args, "passes", None) is not None:
|
if args and getattr(args, "passes", None) is not None:
|
||||||
LOGGER.debug(
|
LOGGER.debug("Overloading number of passes from CLI arguments: %d.", args.passes)
|
||||||
"Overloading number of passes from CLI arguments: %d.",
|
|
||||||
args.passes
|
|
||||||
)
|
|
||||||
config_data["passes"] = args.passes
|
config_data["passes"] = args.passes
|
||||||
if args and getattr(args, "max_entries", None) is not None:
|
if args and getattr(args, "max_entries", None) is not None:
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
"Overloading maximum number of entries from CLI arguments: %d.",
|
"Overloading maximum number of entries from CLI arguments: %d.",
|
||||||
args.max_entries
|
args.max_entries,
|
||||||
)
|
)
|
||||||
config_data["max_entries"] = args.max_entries
|
config_data["max_entries"] = args.max_entries
|
||||||
if args and getattr(args, "port", None) is not None:
|
if args and getattr(args, "port", None) is not None:
|
||||||
@ -267,49 +303,39 @@ def load_config(args=None, check_with_data=True):
|
|||||||
LOGGER.debug("Overloading data directory from CLI arguments.")
|
LOGGER.debug("Overloading data directory from CLI arguments.")
|
||||||
config_data["data_directory"] = args.data_dir
|
config_data["data_directory"] = args.data_dir
|
||||||
elif config_data["data_directory"] is None:
|
elif config_data["data_directory"] is None:
|
||||||
config_data["data_directory"] = appdirs.user_data_dir(
|
config_data["data_directory"] = appdirs.user_data_dir("flatisfy", "flatisfy")
|
||||||
"flatisfy",
|
LOGGER.debug("Using default XDG data directory: %s.", config_data["data_directory"])
|
||||||
"flatisfy"
|
|
||||||
)
|
|
||||||
LOGGER.debug("Using default XDG data directory: %s.",
|
|
||||||
config_data["data_directory"])
|
|
||||||
|
|
||||||
if not os.path.isdir(config_data["data_directory"]):
|
if not os.path.isdir(config_data["data_directory"]):
|
||||||
LOGGER.info("Creating data directory according to config: %s",
|
LOGGER.info(
|
||||||
config_data["data_directory"])
|
"Creating data directory according to config: %s",
|
||||||
|
config_data["data_directory"],
|
||||||
|
)
|
||||||
os.makedirs(config_data["data_directory"])
|
os.makedirs(config_data["data_directory"])
|
||||||
os.makedirs(os.path.join(config_data["data_directory"], "images"))
|
os.makedirs(os.path.join(config_data["data_directory"], "images"))
|
||||||
|
|
||||||
if config_data["database"] is None:
|
if config_data["database"] is None:
|
||||||
config_data["database"] = "sqlite:///" + os.path.join(
|
config_data["database"] = "sqlite:///" + os.path.join(config_data["data_directory"], "flatisfy.db")
|
||||||
config_data["data_directory"],
|
|
||||||
"flatisfy.db"
|
|
||||||
)
|
|
||||||
|
|
||||||
if config_data["search_index"] is None:
|
if config_data["search_index"] is None:
|
||||||
config_data["search_index"] = os.path.join(
|
config_data["search_index"] = os.path.join(config_data["data_directory"], "search_index")
|
||||||
config_data["data_directory"],
|
|
||||||
"search_index"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Handle constraints filtering
|
# Handle constraints filtering
|
||||||
if args and getattr(args, "constraints", None) is not None:
|
if args and getattr(args, "constraints", None) is not None:
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
("Filtering constraints from config according to CLI argument. "
|
(
|
||||||
"Using only the following constraints: %s."),
|
"Filtering constraints from config according to CLI argument. "
|
||||||
args.constraints.replace(",", ", ")
|
"Using only the following constraints: %s."
|
||||||
|
),
|
||||||
|
args.constraints.replace(",", ", "),
|
||||||
)
|
)
|
||||||
constraints_filter = args.constraints.split(",")
|
constraints_filter = args.constraints.split(",")
|
||||||
config_data["constraints"] = {
|
config_data["constraints"] = {k: v for k, v in config_data["constraints"].items() if k in constraints_filter}
|
||||||
k: v
|
|
||||||
for k, v in config_data["constraints"].items()
|
|
||||||
if k in constraints_filter
|
|
||||||
}
|
|
||||||
|
|
||||||
# Sanitize website url
|
# Sanitize website url
|
||||||
if config_data["website_url"] is not None:
|
if config_data["website_url"] is not None:
|
||||||
if config_data["website_url"][-1] != '/':
|
if config_data["website_url"][-1] != "/":
|
||||||
config_data["website_url"] += '/'
|
config_data["website_url"] += "/"
|
||||||
|
|
||||||
config_validation = validate_config(config_data, check_with_data)
|
config_validation = validate_config(config_data, check_with_data)
|
||||||
if config_validation is True:
|
if config_validation is True:
|
||||||
|
@ -4,6 +4,8 @@ Constants used across the app.
|
|||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
# Some backends give more infos than others. Here is the precedence we want to
|
# Some backends give more infos than others. Here is the precedence we want to
|
||||||
# use. First is most important one, last is the one that will always be
|
# use. First is most important one, last is the one that will always be
|
||||||
# considered as less trustable if two backends have similar info about a
|
# considered as less trustable if two backends have similar info about a
|
||||||
@ -14,5 +16,12 @@ BACKENDS_BY_PRECEDENCE = [
|
|||||||
"pap",
|
"pap",
|
||||||
"leboncoin",
|
"leboncoin",
|
||||||
"explorimmo",
|
"explorimmo",
|
||||||
"logicimmo"
|
"logicimmo",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class TimeToModes(Enum):
|
||||||
|
PUBLIC_TRANSPORT = -1
|
||||||
|
WALK = 1
|
||||||
|
BIKE = 2
|
||||||
|
CAR = 3
|
||||||
|
@ -24,11 +24,13 @@ except ImportError:
|
|||||||
try:
|
try:
|
||||||
from functools32 import lru_cache
|
from functools32 import lru_cache
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
|
||||||
def lru_cache(maxsize=None): # pylint: disable=unused-argument
|
def lru_cache(maxsize=None): # pylint: disable=unused-argument
|
||||||
"""
|
"""
|
||||||
Identity implementation of ``lru_cache`` for fallback.
|
Identity implementation of ``lru_cache`` for fallback.
|
||||||
"""
|
"""
|
||||||
return lambda func: func
|
return lambda func: func
|
||||||
|
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"`functools.lru_cache` is not available on your system. Consider "
|
"`functools.lru_cache` is not available on your system. Consider "
|
||||||
"installing `functools32` Python module if using Python2 for "
|
"installing `functools32` Python module if using Python2 for "
|
||||||
@ -48,10 +50,7 @@ def preprocess_data(config, force=False):
|
|||||||
# Check if a build is required
|
# Check if a build is required
|
||||||
get_session = database.init_db(config["database"], config["search_index"])
|
get_session = database.init_db(config["database"], config["search_index"])
|
||||||
with get_session() as session:
|
with get_session() as session:
|
||||||
is_built = (
|
is_built = session.query(PublicTransport).count() > 0 and session.query(PostalCode).count() > 0
|
||||||
session.query(PublicTransport).count() > 0 and
|
|
||||||
session.query(PostalCode).count() > 0
|
|
||||||
)
|
|
||||||
if is_built and not force:
|
if is_built and not force:
|
||||||
# No need to rebuild the database, skip
|
# No need to rebuild the database, skip
|
||||||
return False
|
return False
|
||||||
@ -64,9 +63,7 @@ def preprocess_data(config, force=False):
|
|||||||
for preprocess in data_files.PREPROCESSING_FUNCTIONS:
|
for preprocess in data_files.PREPROCESSING_FUNCTIONS:
|
||||||
data_objects = preprocess()
|
data_objects = preprocess()
|
||||||
if not data_objects:
|
if not data_objects:
|
||||||
raise flatisfy.exceptions.DataBuildError(
|
raise flatisfy.exceptions.DataBuildError("Error with %s." % preprocess.__name__)
|
||||||
"Error with %s." % preprocess.__name__
|
|
||||||
)
|
|
||||||
with get_session() as session:
|
with get_session() as session:
|
||||||
session.add_all(data_objects)
|
session.add_all(data_objects)
|
||||||
LOGGER.info("Done building data!")
|
LOGGER.info("Done building data!")
|
||||||
@ -96,10 +93,7 @@ def load_data(model, constraint, config):
|
|||||||
# Load data for each area
|
# Load data for each area
|
||||||
areas = list(set(areas))
|
areas = list(set(areas))
|
||||||
for area in areas:
|
for area in areas:
|
||||||
results.extend(
|
results.extend(session.query(model).filter(model.area == area).all())
|
||||||
session.query(model)
|
|
||||||
.filter(model.area == area).all()
|
|
||||||
)
|
|
||||||
# Expunge loaded data from the session to be able to use them
|
# Expunge loaded data from the session to be able to use them
|
||||||
# afterwards
|
# afterwards
|
||||||
session.expunge_all()
|
session.expunge_all()
|
||||||
|
@ -16,10 +16,7 @@ from flatisfy.models.postal_code import PostalCode
|
|||||||
from flatisfy.models.public_transport import PublicTransport
|
from flatisfy.models.public_transport import PublicTransport
|
||||||
from flatisfy.tools import normalize_string
|
from flatisfy.tools import normalize_string
|
||||||
|
|
||||||
if sys.version_info >= (3, 0):
|
import csv
|
||||||
import csv
|
|
||||||
else:
|
|
||||||
from backports import csv
|
|
||||||
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
@ -27,8 +24,8 @@ MODULE_DIR = os.path.dirname(os.path.realpath(__file__))
|
|||||||
|
|
||||||
titlecase.set_small_word_list(
|
titlecase.set_small_word_list(
|
||||||
# Add French small words
|
# Add French small words
|
||||||
r"l|d|un|une|et|à|a|sur|ou|le|la|de|lès|les|" +
|
r"l|d|un|une|et|à|a|sur|ou|le|la|de|lès|les|"
|
||||||
titlecase.SMALL
|
+ titlecase.SMALL
|
||||||
)
|
)
|
||||||
|
|
||||||
TRANSPORT_DATA_FILES = {
|
TRANSPORT_DATA_FILES = {
|
||||||
@ -36,7 +33,7 @@ TRANSPORT_DATA_FILES = {
|
|||||||
"FR-NW": "stops_fr-nw.txt",
|
"FR-NW": "stops_fr-nw.txt",
|
||||||
"FR-NE": "stops_fr-ne.txt",
|
"FR-NE": "stops_fr-ne.txt",
|
||||||
"FR-SW": "stops_fr-sw.txt",
|
"FR-SW": "stops_fr-sw.txt",
|
||||||
"FR-SE": "stops_fr-se.txt"
|
"FR-SE": "stops_fr-se.txt",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -54,8 +51,20 @@ def french_postal_codes_to_quarter(postal_code):
|
|||||||
# French departements
|
# French departements
|
||||||
# Taken from Wikipedia data.
|
# Taken from Wikipedia data.
|
||||||
department_to_subdivision = {
|
department_to_subdivision = {
|
||||||
"FR-ARA": ["01", "03", "07", "15", "26", "38", "42", "43", "63", "69",
|
"FR-ARA": [
|
||||||
"73", "74"],
|
"01",
|
||||||
|
"03",
|
||||||
|
"07",
|
||||||
|
"15",
|
||||||
|
"26",
|
||||||
|
"38",
|
||||||
|
"42",
|
||||||
|
"43",
|
||||||
|
"63",
|
||||||
|
"69",
|
||||||
|
"73",
|
||||||
|
"74",
|
||||||
|
],
|
||||||
"FR-BFC": ["21", "25", "39", "58", "70", "71", "89", "90"],
|
"FR-BFC": ["21", "25", "39", "58", "70", "71", "89", "90"],
|
||||||
"FR-BRE": ["22", "29", "35", "44", "56"],
|
"FR-BRE": ["22", "29", "35", "44", "56"],
|
||||||
"FR-CVL": ["18", "28", "36", "37", "41", "45"],
|
"FR-CVL": ["18", "28", "36", "37", "41", "45"],
|
||||||
@ -64,36 +73,53 @@ def french_postal_codes_to_quarter(postal_code):
|
|||||||
"FR-HDF": ["02", "59", "60", "62", "80"],
|
"FR-HDF": ["02", "59", "60", "62", "80"],
|
||||||
"FR-IDF": ["75", "77", "78", "91", "92", "93", "94", "95"],
|
"FR-IDF": ["75", "77", "78", "91", "92", "93", "94", "95"],
|
||||||
"FR-NOR": ["14", "27", "50", "61", "76"],
|
"FR-NOR": ["14", "27", "50", "61", "76"],
|
||||||
"FR-NAQ": ["16", "17", "19", "23", "24", "33", "40", "47", "64", "79",
|
"FR-NAQ": [
|
||||||
"86", "87"],
|
"16",
|
||||||
"FR-OCC": ["09", "11", "12", "30", "31", "32", "34", "46", "48", "65",
|
"17",
|
||||||
"66", "81", "82"],
|
"19",
|
||||||
|
"23",
|
||||||
|
"24",
|
||||||
|
"33",
|
||||||
|
"40",
|
||||||
|
"47",
|
||||||
|
"64",
|
||||||
|
"79",
|
||||||
|
"86",
|
||||||
|
"87",
|
||||||
|
],
|
||||||
|
"FR-OCC": [
|
||||||
|
"09",
|
||||||
|
"11",
|
||||||
|
"12",
|
||||||
|
"30",
|
||||||
|
"31",
|
||||||
|
"32",
|
||||||
|
"34",
|
||||||
|
"46",
|
||||||
|
"48",
|
||||||
|
"65",
|
||||||
|
"66",
|
||||||
|
"81",
|
||||||
|
"82",
|
||||||
|
],
|
||||||
"FR-PDL": ["44", "49", "53", "72", "85"],
|
"FR-PDL": ["44", "49", "53", "72", "85"],
|
||||||
"FR-PAC": ["04", "05", "06", "13", "83", "84"]
|
"FR-PAC": ["04", "05", "06", "13", "83", "84"],
|
||||||
}
|
}
|
||||||
subdivision_to_quarters = {
|
subdivision_to_quarters = {
|
||||||
'FR-IDF': ['FR-IDF'],
|
"FR-IDF": ["FR-IDF"],
|
||||||
'FR-NW': ['FR-BRE', 'FR-CVL', 'FR-NOR', 'FR-PDL'],
|
"FR-NW": ["FR-BRE", "FR-CVL", "FR-NOR", "FR-PDL"],
|
||||||
'FR-NE': ['FR-BFC', 'FR-GES', 'FR-HDF'],
|
"FR-NE": ["FR-BFC", "FR-GES", "FR-HDF"],
|
||||||
'FR-SE': ['FR-ARA', 'FR-COR', 'FR-PAC', 'FR-OCC'],
|
"FR-SE": ["FR-ARA", "FR-COR", "FR-PAC", "FR-OCC"],
|
||||||
'FR-SW': ['FR-NAQ']
|
"FR-SW": ["FR-NAQ"],
|
||||||
}
|
}
|
||||||
|
|
||||||
subdivision = next(
|
subdivision = next(
|
||||||
(
|
(i for i, departments in department_to_subdivision.items() if departement in departments),
|
||||||
i
|
None,
|
||||||
for i, departments in department_to_subdivision.items()
|
|
||||||
if departement in departments
|
|
||||||
),
|
|
||||||
None
|
|
||||||
)
|
)
|
||||||
return next(
|
return next(
|
||||||
(
|
(i for i, subdivisions in subdivision_to_quarters.items() if subdivision in subdivisions),
|
||||||
i
|
None,
|
||||||
for i, subdivisions in subdivision_to_quarters.items()
|
|
||||||
if subdivision in subdivisions
|
|
||||||
),
|
|
||||||
None
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -109,9 +135,7 @@ def _preprocess_laposte():
|
|||||||
raw_laposte_data = []
|
raw_laposte_data = []
|
||||||
# Load opendata file
|
# Load opendata file
|
||||||
try:
|
try:
|
||||||
with io.open(
|
with io.open(os.path.join(MODULE_DIR, data_file), "r", encoding="utf-8") as fh:
|
||||||
os.path.join(MODULE_DIR, data_file), "r", encoding='utf-8'
|
|
||||||
) as fh:
|
|
||||||
raw_laposte_data = json.load(fh)
|
raw_laposte_data = json.load(fh)
|
||||||
except (IOError, ValueError):
|
except (IOError, ValueError):
|
||||||
LOGGER.error("Invalid raw LaPoste opendata file.")
|
LOGGER.error("Invalid raw LaPoste opendata file.")
|
||||||
@ -127,31 +151,30 @@ def _preprocess_laposte():
|
|||||||
try:
|
try:
|
||||||
area = french_postal_codes_to_quarter(fields["code_postal"])
|
area = french_postal_codes_to_quarter(fields["code_postal"])
|
||||||
if area is None:
|
if area is None:
|
||||||
LOGGER.info(
|
LOGGER.debug(
|
||||||
"No matching area found for postal code %s, skipping it.",
|
"No matching area found for postal code %s, skipping it.",
|
||||||
fields["code_postal"]
|
fields["code_postal"],
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
name = normalize_string(
|
name = normalize_string(titlecase.titlecase(fields["nom_de_la_commune"]), lowercase=False)
|
||||||
titlecase.titlecase(fields["nom_de_la_commune"]),
|
|
||||||
lowercase=False
|
|
||||||
)
|
|
||||||
|
|
||||||
if (fields["code_postal"], name) in seen_postal_codes:
|
if (fields["code_postal"], name) in seen_postal_codes:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
seen_postal_codes.append((fields["code_postal"], name))
|
seen_postal_codes.append((fields["code_postal"], name))
|
||||||
postal_codes_data.append(PostalCode(
|
postal_codes_data.append(
|
||||||
area=area,
|
PostalCode(
|
||||||
postal_code=fields["code_postal"],
|
area=area,
|
||||||
name=name,
|
postal_code=fields["code_postal"],
|
||||||
lat=fields["coordonnees_gps"][0],
|
insee_code=fields["code_commune_insee"],
|
||||||
lng=fields["coordonnees_gps"][1]
|
name=name,
|
||||||
))
|
lat=fields["coordonnees_gps"][0],
|
||||||
|
lng=fields["coordonnees_gps"][1],
|
||||||
|
)
|
||||||
|
)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
LOGGER.info("Missing data for postal code %s, skipping it.",
|
LOGGER.debug("Missing data for postal code %s, skipping it.", fields["code_postal"])
|
||||||
fields["code_postal"])
|
|
||||||
|
|
||||||
return postal_codes_data
|
return postal_codes_data
|
||||||
|
|
||||||
@ -167,17 +190,11 @@ def _preprocess_public_transport():
|
|||||||
for area, data_file in TRANSPORT_DATA_FILES.items():
|
for area, data_file in TRANSPORT_DATA_FILES.items():
|
||||||
LOGGER.info("Building from public transport data %s.", data_file)
|
LOGGER.info("Building from public transport data %s.", data_file)
|
||||||
try:
|
try:
|
||||||
with io.open(os.path.join(MODULE_DIR, data_file), "r",
|
with io.open(os.path.join(MODULE_DIR, data_file), "r", encoding="utf-8") as fh:
|
||||||
encoding='utf-8') as fh:
|
|
||||||
filereader = csv.reader(fh)
|
filereader = csv.reader(fh)
|
||||||
next(filereader, None) # Skip first row (headers)
|
next(filereader, None) # Skip first row (headers)
|
||||||
for row in filereader:
|
for row in filereader:
|
||||||
public_transport_data.append(PublicTransport(
|
public_transport_data.append(PublicTransport(name=row[2], area=area, lat=row[3], lng=row[4]))
|
||||||
name=row[2],
|
|
||||||
area=area,
|
|
||||||
lat=row[3],
|
|
||||||
lng=row[4]
|
|
||||||
))
|
|
||||||
except (IOError, IndexError):
|
except (IOError, IndexError):
|
||||||
LOGGER.error("Invalid raw opendata file: %s.", data_file)
|
LOGGER.error("Invalid raw opendata file: %s.", data_file)
|
||||||
return []
|
return []
|
||||||
@ -186,7 +203,4 @@ def _preprocess_public_transport():
|
|||||||
|
|
||||||
|
|
||||||
# List of all the available preprocessing functions. Order can be important.
|
# List of all the available preprocessing functions. Order can be important.
|
||||||
PREPROCESSING_FUNCTIONS = [
|
PREPROCESSING_FUNCTIONS = [_preprocess_laposte, _preprocess_public_transport]
|
||||||
_preprocess_laposte,
|
|
||||||
_preprocess_public_transport
|
|
||||||
]
|
|
||||||
|
File diff suppressed because one or more lines are too long
@ -13,12 +13,7 @@ from sqlalchemy.engine import Engine
|
|||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
from sqlalchemy.exc import OperationalError, SQLAlchemyError
|
from sqlalchemy.exc import OperationalError, SQLAlchemyError
|
||||||
|
|
||||||
# Import models
|
|
||||||
import flatisfy.models.postal_code # noqa: F401
|
|
||||||
import flatisfy.models.public_transport # noqa: F401
|
|
||||||
import flatisfy.models.flat # noqa: F401
|
import flatisfy.models.flat # noqa: F401
|
||||||
import flatisfy.models.constraint # noqa: F401
|
|
||||||
|
|
||||||
from flatisfy.database.base import BASE
|
from flatisfy.database.base import BASE
|
||||||
from flatisfy.database.whooshalchemy import IndexService
|
from flatisfy.database.whooshalchemy import IndexService
|
||||||
|
|
||||||
@ -52,9 +47,7 @@ def init_db(database_uri=None, search_db_uri=None):
|
|||||||
Session = sessionmaker(bind=engine) # pylint: disable=locally-disabled,invalid-name
|
Session = sessionmaker(bind=engine) # pylint: disable=locally-disabled,invalid-name
|
||||||
|
|
||||||
if search_db_uri:
|
if search_db_uri:
|
||||||
index_service = IndexService(
|
index_service = IndexService(whoosh_base=search_db_uri)
|
||||||
whoosh_base=search_db_uri
|
|
||||||
)
|
|
||||||
index_service.register_class(flatisfy.models.flat.Flat)
|
index_service.register_class(flatisfy.models.flat.Flat)
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
|
53
flatisfy/database/types.py
Normal file
53
flatisfy/database/types.py
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
"""
|
||||||
|
This modules implements custom types in SQLAlchemy.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
import sqlalchemy.types as types
|
||||||
|
|
||||||
|
|
||||||
|
class StringyJSON(types.TypeDecorator):
|
||||||
|
"""
|
||||||
|
Stores and retrieves JSON as TEXT for SQLite.
|
||||||
|
|
||||||
|
From
|
||||||
|
https://avacariu.me/articles/2016/compiling-json-as-text-for-sqlite-with-sqlalchemy.
|
||||||
|
|
||||||
|
.. note ::
|
||||||
|
|
||||||
|
The associated field is immutable. That is, changes to the data
|
||||||
|
(typically, changing the value of a dict field) will not trigger an
|
||||||
|
update on the SQL side upon ``commit`` as the reference to the object
|
||||||
|
will not have been updated. One should force the update by forcing an
|
||||||
|
update of the reference (by performing a ``copy`` operation on the dict
|
||||||
|
for instance).
|
||||||
|
"""
|
||||||
|
|
||||||
|
impl = types.TEXT
|
||||||
|
|
||||||
|
def process_bind_param(self, value, dialect):
|
||||||
|
"""
|
||||||
|
Process the bound param, serialize the object to JSON before saving
|
||||||
|
into database.
|
||||||
|
"""
|
||||||
|
if value is not None:
|
||||||
|
value = json.dumps(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def process_result_value(self, value, dialect):
|
||||||
|
"""
|
||||||
|
Process the value fetched from the database, deserialize the JSON
|
||||||
|
string before returning the object.
|
||||||
|
"""
|
||||||
|
if value is not None:
|
||||||
|
value = json.loads(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
# TypeEngine.with_variant says "use StringyJSON instead when
|
||||||
|
# connecting to 'sqlite'"
|
||||||
|
# pylint: disable=locally-disabled,invalid-name
|
||||||
|
MagicJSON = types.JSON().with_variant(StringyJSON, "sqlite")
|
@ -30,7 +30,6 @@ from whoosh.qparser import MultifieldParser
|
|||||||
|
|
||||||
|
|
||||||
class IndexService(object):
|
class IndexService(object):
|
||||||
|
|
||||||
def __init__(self, config=None, whoosh_base=None):
|
def __init__(self, config=None, whoosh_base=None):
|
||||||
if not whoosh_base and config:
|
if not whoosh_base and config:
|
||||||
whoosh_base = config.get("WHOOSH_BASE")
|
whoosh_base = config.get("WHOOSH_BASE")
|
||||||
@ -84,8 +83,7 @@ class IndexService(object):
|
|||||||
primary = field.name
|
primary = field.name
|
||||||
continue
|
continue
|
||||||
if field.name in model_class.__searchable__:
|
if field.name in model_class.__searchable__:
|
||||||
schema[field.name] = whoosh.fields.TEXT(
|
schema[field.name] = whoosh.fields.TEXT(analyzer=StemmingAnalyzer())
|
||||||
analyzer=StemmingAnalyzer())
|
|
||||||
return Schema(**schema), primary
|
return Schema(**schema), primary
|
||||||
|
|
||||||
def before_commit(self, session):
|
def before_commit(self, session):
|
||||||
@ -93,21 +91,18 @@ class IndexService(object):
|
|||||||
|
|
||||||
for model in session.new:
|
for model in session.new:
|
||||||
model_class = model.__class__
|
model_class = model.__class__
|
||||||
if hasattr(model_class, '__searchable__'):
|
if hasattr(model_class, "__searchable__"):
|
||||||
self.to_update.setdefault(model_class.__name__, []).append(
|
self.to_update.setdefault(model_class.__name__, []).append(("new", model))
|
||||||
("new", model))
|
|
||||||
|
|
||||||
for model in session.deleted:
|
for model in session.deleted:
|
||||||
model_class = model.__class__
|
model_class = model.__class__
|
||||||
if hasattr(model_class, '__searchable__'):
|
if hasattr(model_class, "__searchable__"):
|
||||||
self.to_update.setdefault(model_class.__name__, []).append(
|
self.to_update.setdefault(model_class.__name__, []).append(("deleted", model))
|
||||||
("deleted", model))
|
|
||||||
|
|
||||||
for model in session.dirty:
|
for model in session.dirty:
|
||||||
model_class = model.__class__
|
model_class = model.__class__
|
||||||
if hasattr(model_class, '__searchable__'):
|
if hasattr(model_class, "__searchable__"):
|
||||||
self.to_update.setdefault(model_class.__name__, []).append(
|
self.to_update.setdefault(model_class.__name__, []).append(("changed", model))
|
||||||
("changed", model))
|
|
||||||
|
|
||||||
def after_commit(self, session):
|
def after_commit(self, session):
|
||||||
"""
|
"""
|
||||||
@ -128,16 +123,11 @@ class IndexService(object):
|
|||||||
# added as a new doc. Could probably replace this with a whoosh
|
# added as a new doc. Could probably replace this with a whoosh
|
||||||
# update.
|
# update.
|
||||||
|
|
||||||
writer.delete_by_term(
|
writer.delete_by_term(primary_field, text_type(getattr(model, primary_field)))
|
||||||
primary_field, text_type(getattr(model, primary_field)))
|
|
||||||
|
|
||||||
if change_type in ("new", "changed"):
|
if change_type in ("new", "changed"):
|
||||||
attrs = dict((key, getattr(model, key))
|
attrs = dict((key, getattr(model, key)) for key in searchable)
|
||||||
for key in searchable)
|
attrs = {attr: text_type(getattr(model, attr)) for attr in attrs.keys()}
|
||||||
attrs = {
|
|
||||||
attr: text_type(getattr(model, attr))
|
|
||||||
for attr in attrs.keys()
|
|
||||||
}
|
|
||||||
attrs[primary_field] = text_type(getattr(model, primary_field))
|
attrs[primary_field] = text_type(getattr(model, primary_field))
|
||||||
writer.add_document(**attrs)
|
writer.add_document(**attrs)
|
||||||
|
|
||||||
@ -158,8 +148,7 @@ class Searcher(object):
|
|||||||
self.parser = MultifieldParser(list(fields), index.schema)
|
self.parser = MultifieldParser(list(fields), index.schema)
|
||||||
|
|
||||||
def __call__(self, session, query, limit=None):
|
def __call__(self, session, query, limit=None):
|
||||||
results = self.index.searcher().search(
|
results = self.index.searcher().search(self.parser.parse(query), limit=limit)
|
||||||
self.parser.parse(query), limit=limit)
|
|
||||||
|
|
||||||
keys = [x[self.primary] for x in results]
|
keys = [x[self.primary] for x in results]
|
||||||
primary_column = getattr(self.model_class, self.primary)
|
primary_column = getattr(self.model_class, self.primary)
|
||||||
|
@ -8,7 +8,7 @@ from builtins import str
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
import smtplib
|
import smtplib
|
||||||
|
from money import Money
|
||||||
from email.mime.multipart import MIMEMultipart
|
from email.mime.multipart import MIMEMultipart
|
||||||
from email.mime.text import MIMEText
|
from email.mime.text import MIMEText
|
||||||
from email.utils import formatdate, make_msgid
|
from email.utils import formatdate, make_msgid
|
||||||
@ -16,7 +16,7 @@ from email.utils import formatdate, make_msgid
|
|||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def send_email(server, port, subject, _from, _to, txt, html):
|
def send_email(server, port, subject, _from, _to, txt, html, username=None, password=None):
|
||||||
"""
|
"""
|
||||||
Send an email
|
Send an email
|
||||||
|
|
||||||
@ -33,16 +33,18 @@ def send_email(server, port, subject, _from, _to, txt, html):
|
|||||||
return
|
return
|
||||||
|
|
||||||
server = smtplib.SMTP(server, port)
|
server = smtplib.SMTP(server, port)
|
||||||
|
if username or password:
|
||||||
|
server.login(username or "", password or "")
|
||||||
|
|
||||||
msg = MIMEMultipart('alternative')
|
msg = MIMEMultipart("alternative")
|
||||||
msg['Subject'] = subject
|
msg["Subject"] = subject
|
||||||
msg['From'] = _from
|
msg["From"] = _from
|
||||||
msg['To'] = ', '.join(_to)
|
msg["To"] = ", ".join(_to)
|
||||||
msg['Date'] = formatdate()
|
msg["Date"] = formatdate()
|
||||||
msg['Message-ID'] = make_msgid()
|
msg["Message-ID"] = make_msgid()
|
||||||
|
|
||||||
msg.attach(MIMEText(txt, 'plain', 'utf-8'))
|
msg.attach(MIMEText(txt, "plain", "utf-8"))
|
||||||
msg.attach(MIMEText(html, 'html', 'utf-8'))
|
msg.attach(MIMEText(html, "html", "utf-8"))
|
||||||
|
|
||||||
server.sendmail(_from, _to, msg.as_string())
|
server.sendmail(_from, _to, msg.as_string())
|
||||||
server.quit()
|
server.quit()
|
||||||
@ -59,13 +61,33 @@ def send_notification(config, flats):
|
|||||||
if not flats:
|
if not flats:
|
||||||
return
|
return
|
||||||
|
|
||||||
txt = u'Hello dear user,\n\nThe following new flats have been found:\n\n'
|
i18n = {
|
||||||
html = """
|
"en": {
|
||||||
|
"subject": f"{len(flats)} new flats found!",
|
||||||
|
"hello": "Hello dear user",
|
||||||
|
"following_new_flats": "The following new flats have been found:",
|
||||||
|
"area": "area",
|
||||||
|
"cost": "cost",
|
||||||
|
"signature": "Hope you'll find what you were looking for.",
|
||||||
|
},
|
||||||
|
"fr": {
|
||||||
|
"subject": f"{len(flats)} nouvelles annonces disponibles !",
|
||||||
|
"hello": "Bonjour cher utilisateur",
|
||||||
|
"following_new_flats": "Voici les nouvelles annonces :",
|
||||||
|
"area": "surface",
|
||||||
|
"cost": "coût",
|
||||||
|
"signature": "Bonne recherche",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
trs = i18n.get(config["notification_lang"], "en")
|
||||||
|
|
||||||
|
txt = trs["hello"] + ",\n\n\n\n"
|
||||||
|
html = f"""
|
||||||
<html>
|
<html>
|
||||||
<head></head>
|
<head></head>
|
||||||
<body>
|
<body>
|
||||||
<p>Hello dear user!</p>
|
<p>{trs["hello"]}!</p>
|
||||||
<p>The following new flats have been found:
|
<p>{trs["following_new_flats"]}
|
||||||
|
|
||||||
<ul>
|
<ul>
|
||||||
"""
|
"""
|
||||||
@ -75,39 +97,58 @@ def send_notification(config, flats):
|
|||||||
for flat in flats:
|
for flat in flats:
|
||||||
title = str(flat.title)
|
title = str(flat.title)
|
||||||
flat_id = str(flat.id)
|
flat_id = str(flat.id)
|
||||||
area = str(flat.area)
|
try:
|
||||||
cost = str(flat.cost)
|
area = str(int(flat.area))
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
area = None
|
||||||
|
try:
|
||||||
|
cost = int(flat.cost)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
cost = None
|
||||||
currency = str(flat.currency)
|
currency = str(flat.currency)
|
||||||
|
|
||||||
txt += (
|
txt += f"- {title}: {website_url}#/flat/{flat_id} "
|
||||||
'- {}: {}#/flat/{} (area: {}, cost: {} {})\n'.format(
|
html += f"""
|
||||||
title, website_url, flat_id, area, cost, currency
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
html += """
|
|
||||||
<li>
|
<li>
|
||||||
<a href="{}#/flat/{}">{}</a>
|
<a href="{website_url}#/flat/{flat_id}">{title}</a>
|
||||||
(area: {}, cost: {} {})
|
"""
|
||||||
</li>
|
|
||||||
""".format(website_url, flat_id, title, area, cost, currency)
|
fields = []
|
||||||
|
if area:
|
||||||
|
fields.append(f"{trs['area']}: {area}m²")
|
||||||
|
if cost:
|
||||||
|
if currency == '$':
|
||||||
|
currency = 'USD'
|
||||||
|
if currency == '€':
|
||||||
|
currency = 'EUR'
|
||||||
|
money = Money(cost, currency).format(config["notification_lang"])
|
||||||
|
fields.append(f"{trs['cost']}: {money.format()}")
|
||||||
|
|
||||||
|
if len(fields):
|
||||||
|
txt += f'({", ".join(fields)})'
|
||||||
|
html += f'({", ".join(fields)})'
|
||||||
|
|
||||||
|
html += "</li>"
|
||||||
|
txt += "\n"
|
||||||
|
|
||||||
html += "</ul>"
|
html += "</ul>"
|
||||||
|
|
||||||
signature = (
|
signature = f"\n{trs['signature']}\n\nBye!\nFlatisfy"
|
||||||
u"\nHope you'll find what you were looking for.\n\nBye!\nFlatisfy"
|
|
||||||
)
|
|
||||||
txt += signature
|
txt += signature
|
||||||
html += signature.replace('\n', '<br>')
|
html += signature.replace("\n", "<br>")
|
||||||
|
|
||||||
html += """</p>
|
html += """</p>
|
||||||
</body>
|
</body>
|
||||||
</html>"""
|
</html>"""
|
||||||
|
|
||||||
send_email(config["smtp_server"],
|
send_email(
|
||||||
config["smtp_port"],
|
config["smtp_server"],
|
||||||
"New flats found!",
|
config["smtp_port"],
|
||||||
config["smtp_from"],
|
trs["subject"],
|
||||||
config["smtp_to"],
|
config["smtp_from"],
|
||||||
txt,
|
config["smtp_to"],
|
||||||
html)
|
txt,
|
||||||
|
html,
|
||||||
|
config.get("smtp_username"),
|
||||||
|
config.get("smtp_password"),
|
||||||
|
)
|
||||||
|
@ -10,4 +10,5 @@ class DataBuildError(Exception):
|
|||||||
"""
|
"""
|
||||||
Error occurring on building a data file.
|
Error occurring on building a data file.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
@ -9,6 +9,7 @@ import collections
|
|||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
from ratelimit import limits
|
||||||
|
|
||||||
from flatisfy import database
|
from flatisfy import database
|
||||||
from flatisfy import tools
|
from flatisfy import tools
|
||||||
@ -19,27 +20,27 @@ LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from weboob.capabilities.housing import Query, POSTS_TYPES, HOUSE_TYPES
|
from woob.capabilities.housing import Query, POSTS_TYPES, HOUSE_TYPES
|
||||||
from weboob.core.bcall import CallErrors
|
from woob.core.bcall import CallErrors
|
||||||
from weboob.core.ouiboube import WebNip
|
from woob.core.ouiboube import WebNip
|
||||||
from weboob.tools.json import WeboobEncoder
|
from woob.tools.json import WoobEncoder
|
||||||
except ImportError:
|
except ImportError:
|
||||||
LOGGER.error("Weboob is not available on your system. Make sure you "
|
LOGGER.error("Woob is not available on your system. Make sure you installed it.")
|
||||||
"installed it.")
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
class WeboobProxy(object):
|
class WoobProxy(object):
|
||||||
"""
|
"""
|
||||||
Wrapper around Weboob ``WebNip`` class, to fetch housing posts without
|
Wrapper around Woob ``WebNip`` class, to fetch housing posts without
|
||||||
having to spawn a subprocess.
|
having to spawn a subprocess.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def version():
|
def version():
|
||||||
"""
|
"""
|
||||||
Get Weboob version.
|
Get Woob version.
|
||||||
|
|
||||||
:return: The installed Weboob version.
|
:return: The installed Woob version.
|
||||||
"""
|
"""
|
||||||
return WebNip.VERSION
|
return WebNip.VERSION
|
||||||
|
|
||||||
@ -63,7 +64,7 @@ class WeboobProxy(object):
|
|||||||
|
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
"""
|
"""
|
||||||
Create a Weboob handle and try to load the modules.
|
Create a Woob handle and try to load the modules.
|
||||||
|
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
"""
|
"""
|
||||||
@ -77,14 +78,14 @@ class WeboobProxy(object):
|
|||||||
self.webnip = WebNip(modules_path=config["modules_path"])
|
self.webnip = WebNip(modules_path=config["modules_path"])
|
||||||
|
|
||||||
# Create backends
|
# Create backends
|
||||||
self.backends = [
|
self.backends = []
|
||||||
self.webnip.load_backend(
|
for module in backends:
|
||||||
module,
|
try:
|
||||||
module,
|
self.backends.append(
|
||||||
params={}
|
self.webnip.load_backend(module, module, params={})
|
||||||
)
|
)
|
||||||
for module in backends
|
except Exception as exc:
|
||||||
]
|
raise Exception('Unable to load module ' + module) from exc
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
return self
|
return self
|
||||||
@ -94,13 +95,13 @@ class WeboobProxy(object):
|
|||||||
|
|
||||||
def build_queries(self, constraints_dict):
|
def build_queries(self, constraints_dict):
|
||||||
"""
|
"""
|
||||||
Build Weboob ``weboob.capabilities.housing.Query`` objects from the
|
Build Woob ``woob.capabilities.housing.Query`` objects from the
|
||||||
constraints defined in the configuration. Each query has at most 3
|
constraints defined in the configuration. Each query has at most 3
|
||||||
cities, to comply with housing websites limitations.
|
cities, to comply with housing websites limitations.
|
||||||
|
|
||||||
:param constraints_dict: A dictionary of constraints, as defined in the
|
:param constraints_dict: A dictionary of constraints, as defined in the
|
||||||
config.
|
config.
|
||||||
:return: A list of Weboob ``weboob.capabilities.housing.Query``
|
:return: A list of Woob ``woob.capabilities.housing.Query``
|
||||||
objects. Returns ``None`` if an error occurred.
|
objects. Returns ``None`` if an error occurred.
|
||||||
"""
|
"""
|
||||||
queries = []
|
queries = []
|
||||||
@ -114,28 +115,21 @@ class WeboobProxy(object):
|
|||||||
except CallErrors as exc:
|
except CallErrors as exc:
|
||||||
# If an error occured, just log it
|
# If an error occured, just log it
|
||||||
LOGGER.error(
|
LOGGER.error(
|
||||||
(
|
("An error occured while building query for postal code %s: %s"),
|
||||||
"An error occured while building query for "
|
|
||||||
"postal code %s: %s"
|
|
||||||
),
|
|
||||||
postal_code,
|
postal_code,
|
||||||
str(exc)
|
str(exc),
|
||||||
)
|
)
|
||||||
|
|
||||||
if not matching_cities:
|
if not matching_cities:
|
||||||
# If postal code gave no match, warn the user
|
# If postal code gave no match, warn the user
|
||||||
LOGGER.warn(
|
LOGGER.warn("Postal code %s could not be matched with a city.", postal_code)
|
||||||
"Postal code %s could not be matched with a city.",
|
|
||||||
postal_code
|
|
||||||
)
|
|
||||||
|
|
||||||
# Remove "TOUTES COMMUNES" entry which are duplicates of the individual
|
# Remove "TOUTES COMMUNES" entry which are duplicates of the individual
|
||||||
# cities entries in Logicimmo module.
|
# cities entries in Logicimmo module.
|
||||||
matching_cities = [
|
matching_cities = [
|
||||||
city
|
city
|
||||||
for city in matching_cities
|
for city in matching_cities
|
||||||
if not (city.backend == 'logicimmo' and
|
if not (city.backend == "logicimmo" and city.name.startswith("TOUTES COMMUNES"))
|
||||||
city.name.startswith('TOUTES COMMUNES'))
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Then, build queries by grouping cities by at most 3
|
# Then, build queries by grouping cities by at most 3
|
||||||
@ -145,21 +139,14 @@ class WeboobProxy(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
query.house_types = [
|
query.house_types = [
|
||||||
getattr(
|
getattr(HOUSE_TYPES, house_type.upper()) for house_type in constraints_dict["house_types"]
|
||||||
HOUSE_TYPES,
|
|
||||||
house_type.upper()
|
|
||||||
)
|
|
||||||
for house_type in constraints_dict["house_types"]
|
|
||||||
]
|
]
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
LOGGER.error("Invalid house types constraint.")
|
LOGGER.error("Invalid house types constraint.")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
query.type = getattr(
|
query.type = getattr(POSTS_TYPES, constraints_dict["type"].upper())
|
||||||
POSTS_TYPES,
|
|
||||||
constraints_dict["type"].upper()
|
|
||||||
)
|
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
LOGGER.error("Invalid post type constraint.")
|
LOGGER.error("Invalid post type constraint.")
|
||||||
return None
|
return None
|
||||||
@ -176,37 +163,43 @@ class WeboobProxy(object):
|
|||||||
|
|
||||||
def query(self, query, max_entries=None, store_personal_data=False):
|
def query(self, query, max_entries=None, store_personal_data=False):
|
||||||
"""
|
"""
|
||||||
Fetch the housings posts matching a given Weboob query.
|
Fetch the housings posts matching a given Woob query.
|
||||||
|
|
||||||
:param query: A Weboob `weboob.capabilities.housing.Query`` object.
|
:param query: A Woob `woob.capabilities.housing.Query`` object.
|
||||||
:param max_entries: Maximum number of entries to fetch.
|
:param max_entries: Maximum number of entries to fetch.
|
||||||
:param store_personal_data: Whether personal data should be fetched
|
:param store_personal_data: Whether personal data should be fetched
|
||||||
from housing posts (phone number etc).
|
from housing posts (phone number etc).
|
||||||
:return: The matching housing posts, dumped as a list of JSON objects.
|
:return: The matching housing posts, dumped as a list of JSON objects.
|
||||||
"""
|
"""
|
||||||
housings = []
|
housings = []
|
||||||
|
# List the useful backends for this specific query
|
||||||
|
useful_backends = [x.backend for x in query.cities]
|
||||||
# TODO: Handle max_entries better
|
# TODO: Handle max_entries better
|
||||||
try:
|
try:
|
||||||
for housing in itertools.islice(
|
for housing in itertools.islice(
|
||||||
self.webnip.do('search_housings', query),
|
self.webnip.do(
|
||||||
max_entries
|
"search_housings",
|
||||||
|
query,
|
||||||
|
# Only run the call on the required backends.
|
||||||
|
# Otherwise, Woob is doing weird stuff and returning
|
||||||
|
# nonsense.
|
||||||
|
backends=[x for x in self.backends if x.name in useful_backends],
|
||||||
|
),
|
||||||
|
max_entries,
|
||||||
):
|
):
|
||||||
if not store_personal_data:
|
if not store_personal_data:
|
||||||
housing.phone = None
|
housing.phone = None
|
||||||
housings.append(json.dumps(housing, cls=WeboobEncoder))
|
housings.append(json.dumps(housing, cls=WoobEncoder))
|
||||||
except CallErrors as exc:
|
except CallErrors as exc:
|
||||||
# If an error occured, just log it
|
# If an error occured, just log it
|
||||||
LOGGER.error(
|
LOGGER.error("An error occured while fetching the housing posts: %s", str(exc))
|
||||||
"An error occured while fetching the housing posts: %s",
|
|
||||||
str(exc)
|
|
||||||
)
|
|
||||||
return housings
|
return housings
|
||||||
|
|
||||||
def info(self, full_flat_id, store_personal_data=False):
|
def info(self, full_flat_id, store_personal_data=False):
|
||||||
"""
|
"""
|
||||||
Get information (details) about an housing post.
|
Get information (details) about an housing post.
|
||||||
|
|
||||||
:param full_flat_id: A Weboob housing post id, in complete form
|
:param full_flat_id: A Woob housing post id, in complete form
|
||||||
(ID@BACKEND)
|
(ID@BACKEND)
|
||||||
:param store_personal_data: Whether personal data should be fetched
|
:param store_personal_data: Whether personal data should be fetched
|
||||||
from housing posts (phone number etc).
|
from housing posts (phone number etc).
|
||||||
@ -214,40 +207,32 @@ class WeboobProxy(object):
|
|||||||
"""
|
"""
|
||||||
flat_id, backend_name = full_flat_id.rsplit("@", 1)
|
flat_id, backend_name = full_flat_id.rsplit("@", 1)
|
||||||
try:
|
try:
|
||||||
backend = next(
|
backend = next(backend for backend in self.backends if backend.name == backend_name)
|
||||||
backend
|
|
||||||
for backend in self.backends
|
|
||||||
if backend.name == backend_name
|
|
||||||
)
|
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
LOGGER.error("Backend %s is not available.", backend_name)
|
LOGGER.error("Backend %s is not available.", backend_name)
|
||||||
return "{}"
|
return "{}"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
housing = backend.get_housing(flat_id)
|
housing = backend.get_housing(flat_id)
|
||||||
# Otherwise, we miss the @backend afterwards
|
|
||||||
housing.id = full_flat_id
|
|
||||||
if not store_personal_data:
|
if not store_personal_data:
|
||||||
# Ensure phone is cleared
|
# Ensure phone is cleared
|
||||||
housing.phone = None
|
housing.phone = None
|
||||||
else:
|
else:
|
||||||
# Ensure phone is fetched
|
# Ensure phone is fetched
|
||||||
backend.fillobj(housing, 'phone')
|
backend.fillobj(housing, "phone")
|
||||||
|
# Otherwise, we miss the @backend afterwards
|
||||||
|
housing.id = full_flat_id
|
||||||
|
|
||||||
return json.dumps(housing, cls=WeboobEncoder)
|
return json.dumps(housing, cls=WoobEncoder)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
# If an error occured, just log it
|
# If an error occured, just log it
|
||||||
LOGGER.error(
|
LOGGER.error("An error occured while fetching housing %s: %s", full_flat_id, str(exc))
|
||||||
"An error occured while fetching housing %s: %s",
|
|
||||||
full_flat_id,
|
|
||||||
str(exc)
|
|
||||||
)
|
|
||||||
return "{}"
|
return "{}"
|
||||||
|
|
||||||
|
|
||||||
def fetch_flats(config):
|
def fetch_flats(config):
|
||||||
"""
|
"""
|
||||||
Fetch the available flats using the Flatboob / Weboob config.
|
Fetch the available flats using the Woob config.
|
||||||
|
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
:return: A dict mapping constraint in config to all available matching
|
:return: A dict mapping constraint in config to all available matching
|
||||||
@ -257,38 +242,42 @@ def fetch_flats(config):
|
|||||||
|
|
||||||
for constraint_name, constraint in config["constraints"].items():
|
for constraint_name, constraint in config["constraints"].items():
|
||||||
LOGGER.info("Loading flats for constraint %s...", constraint_name)
|
LOGGER.info("Loading flats for constraint %s...", constraint_name)
|
||||||
with WeboobProxy(config) as weboob_proxy:
|
with WoobProxy(config) as woob_proxy:
|
||||||
queries = weboob_proxy.build_queries(constraint)
|
queries = woob_proxy.build_queries(constraint)
|
||||||
housing_posts = []
|
housing_posts = []
|
||||||
for query in queries:
|
for query in queries:
|
||||||
housing_posts.extend(
|
housing_posts.extend(woob_proxy.query(query, config["max_entries"], config["store_personal_data"]))
|
||||||
weboob_proxy.query(query, config["max_entries"],
|
housing_posts = housing_posts[: config["max_entries"]]
|
||||||
config["store_personal_data"])
|
|
||||||
)
|
|
||||||
LOGGER.info("Fetched %d flats.", len(housing_posts))
|
LOGGER.info("Fetched %d flats.", len(housing_posts))
|
||||||
|
|
||||||
constraint_flats_list = [json.loads(flat) for flat in housing_posts]
|
constraint_flats_list = [json.loads(flat) for flat in housing_posts]
|
||||||
constraint_flats_list = [WeboobProxy.restore_decimal_fields(flat)
|
constraint_flats_list = [WoobProxy.restore_decimal_fields(flat) for flat in constraint_flats_list]
|
||||||
for flat in constraint_flats_list]
|
|
||||||
fetched_flats[constraint_name] = constraint_flats_list
|
fetched_flats[constraint_name] = constraint_flats_list
|
||||||
return fetched_flats
|
return fetched_flats
|
||||||
|
|
||||||
|
|
||||||
|
@limits(calls=10, period=60)
|
||||||
|
def fetch_details_rate_limited(config, flat_id):
|
||||||
|
"""
|
||||||
|
Limit flats fetching to at most 10 calls per minute to avoid rate banning
|
||||||
|
"""
|
||||||
|
return fetch_details(config, flat_id)
|
||||||
|
|
||||||
|
|
||||||
def fetch_details(config, flat_id):
|
def fetch_details(config, flat_id):
|
||||||
"""
|
"""
|
||||||
Fetch the additional details for a flat using Flatboob / Weboob.
|
Fetch the additional details for a flat using Woob.
|
||||||
|
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
:param flat_id: ID of the flat to fetch details for.
|
:param flat_id: ID of the flat to fetch details for.
|
||||||
:return: A flat dict with all the available data.
|
:return: A flat dict with all the available data.
|
||||||
"""
|
"""
|
||||||
with WeboobProxy(config) as weboob_proxy:
|
with WoobProxy(config) as woob_proxy:
|
||||||
LOGGER.info("Loading additional details for flat %s.", flat_id)
|
LOGGER.info("Loading additional details for flat %s.", flat_id)
|
||||||
weboob_output = weboob_proxy.info(flat_id,
|
woob_output = woob_proxy.info(flat_id, config["store_personal_data"])
|
||||||
config["store_personal_data"])
|
|
||||||
|
|
||||||
flat_details = json.loads(weboob_output)
|
flat_details = json.loads(woob_output)
|
||||||
flat_details = WeboobProxy.restore_decimal_fields(flat_details)
|
flat_details = WoobProxy.restore_decimal_fields(flat_details)
|
||||||
LOGGER.info("Fetched details for flat %s.", flat_id)
|
LOGGER.info("Fetched details for flat %s.", flat_id)
|
||||||
|
|
||||||
return flat_details
|
return flat_details
|
||||||
@ -316,10 +305,7 @@ def load_flats_from_file(json_file, config):
|
|||||||
LOGGER.info("Found %d flats.", len(flats_list))
|
LOGGER.info("Found %d flats.", len(flats_list))
|
||||||
except (IOError, ValueError):
|
except (IOError, ValueError):
|
||||||
LOGGER.error("File %s is not a valid dump file.", json_file)
|
LOGGER.error("File %s is not a valid dump file.", json_file)
|
||||||
return {
|
return {constraint_name: flats_list for constraint_name in config["constraints"]}
|
||||||
constraint_name: flats_list
|
|
||||||
for constraint_name in config["constraints"]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def load_flats_from_db(config):
|
def load_flats_from_db(config):
|
||||||
|
@ -36,48 +36,51 @@ def refine_with_housing_criteria(flats_list, constraint):
|
|||||||
for i, flat in enumerate(flats_list):
|
for i, flat in enumerate(flats_list):
|
||||||
# Check postal code
|
# Check postal code
|
||||||
postal_code = flat["flatisfy"].get("postal_code", None)
|
postal_code = flat["flatisfy"].get("postal_code", None)
|
||||||
if (
|
if postal_code and postal_code not in constraint["postal_codes"]:
|
||||||
postal_code and
|
LOGGER.info(
|
||||||
postal_code not in constraint["postal_codes"]
|
"Postal code %s for flat %s is out of range (%s).",
|
||||||
):
|
postal_code,
|
||||||
LOGGER.info("Postal code for flat %s is out of range.", flat["id"])
|
flat["id"],
|
||||||
is_ok[i] = is_ok[i] and False
|
", ".join(constraint["postal_codes"]),
|
||||||
|
)
|
||||||
|
is_ok[i] = False
|
||||||
|
# Check insee code
|
||||||
|
insee_code = flat["flatisfy"].get("insee_code", None)
|
||||||
|
if insee_code and "insee_codes" in constraint and insee_code not in constraint["insee_codes"]:
|
||||||
|
LOGGER.info(
|
||||||
|
"insee code %s for flat %s is out of range (%s).",
|
||||||
|
insee_code,
|
||||||
|
flat["id"],
|
||||||
|
", ".join(constraint["insee_codes"]),
|
||||||
|
)
|
||||||
|
is_ok[i] = False
|
||||||
|
|
||||||
# Check time_to
|
# Check time_to
|
||||||
for place_name, time in flat["flatisfy"].get("time_to", {}).items():
|
for place_name, time in flat["flatisfy"].get("time_to", {}).items():
|
||||||
time = time["time"]
|
time = time["time"]
|
||||||
is_within_interval = tools.is_within_interval(
|
is_within_interval = tools.is_within_interval(time, *(constraint["time_to"][place_name]["time"]))
|
||||||
time,
|
|
||||||
*(constraint["time_to"][place_name]["time"])
|
|
||||||
)
|
|
||||||
if not is_within_interval:
|
if not is_within_interval:
|
||||||
LOGGER.info("Flat %s is too far from place %s: %ds.",
|
LOGGER.info(
|
||||||
flat["id"], place_name, time)
|
"Flat %s is too far from place %s: %ds.",
|
||||||
|
flat["id"],
|
||||||
|
place_name,
|
||||||
|
time,
|
||||||
|
)
|
||||||
is_ok[i] = is_ok[i] and is_within_interval
|
is_ok[i] = is_ok[i] and is_within_interval
|
||||||
|
|
||||||
# Check other fields
|
# Check other fields
|
||||||
for field in ["area", "cost", "rooms", "bedrooms"]:
|
for field in ["area", "cost", "rooms", "bedrooms"]:
|
||||||
interval = constraint[field]
|
interval = constraint[field]
|
||||||
is_within_interval = tools.is_within_interval(
|
is_within_interval = tools.is_within_interval(flat.get(field, None), *interval)
|
||||||
flat.get(field, None),
|
|
||||||
*interval
|
|
||||||
)
|
|
||||||
if not is_within_interval:
|
if not is_within_interval:
|
||||||
LOGGER.info("%s for flat %s is out of range.",
|
LOGGER.info(
|
||||||
field.capitalize(), flat["id"])
|
"%s %s for flat %s is out of range.", field.capitalize(), str(flat.get(field, None)), flat["id"]
|
||||||
|
)
|
||||||
is_ok[i] = is_ok[i] and is_within_interval
|
is_ok[i] = is_ok[i] and is_within_interval
|
||||||
|
|
||||||
return (
|
return (
|
||||||
[
|
[flat for i, flat in enumerate(flats_list) if is_ok[i]],
|
||||||
flat
|
[flat for i, flat in enumerate(flats_list) if not is_ok[i]],
|
||||||
for i, flat in enumerate(flats_list)
|
|
||||||
if is_ok[i]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
flat
|
|
||||||
for i, flat in enumerate(flats_list)
|
|
||||||
if not is_ok[i]
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -103,45 +106,43 @@ def refine_with_details_criteria(flats_list, constraint):
|
|||||||
|
|
||||||
for i, flat in enumerate(flats_list):
|
for i, flat in enumerate(flats_list):
|
||||||
# Check number of pictures
|
# Check number of pictures
|
||||||
has_enough_photos = tools.is_within_interval(
|
has_enough_photos = tools.is_within_interval(len(flat.get("photos", [])), constraint["minimum_nb_photos"], None)
|
||||||
len(flat.get('photos', [])),
|
|
||||||
constraint['minimum_nb_photos'],
|
|
||||||
None
|
|
||||||
)
|
|
||||||
if not has_enough_photos:
|
if not has_enough_photos:
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
"Flat %s only has %d photos, it should have at least %d.",
|
"Flat %s only has %d photos, it should have at least %d.",
|
||||||
flat["id"],
|
flat["id"],
|
||||||
len(flat['photos']),
|
len(flat["photos"]),
|
||||||
constraint['minimum_nb_photos']
|
constraint["minimum_nb_photos"],
|
||||||
)
|
)
|
||||||
is_ok[i] = False
|
is_ok[i] = False
|
||||||
|
|
||||||
has_terms_in_description = True
|
for term in constraint["description_should_contain"]:
|
||||||
if constraint["description_should_contain"]:
|
if isinstance(term, str) and term.lower() not in flat["text"].lower():
|
||||||
has_terms_in_description = all(
|
LOGGER.info(
|
||||||
term in flat['text']
|
("Description for flat %s does not contain required term '%s'."),
|
||||||
for term in constraint["description_should_contain"]
|
flat["id"],
|
||||||
)
|
term,
|
||||||
if not has_terms_in_description:
|
)
|
||||||
LOGGER.info(
|
is_ok[i] = False
|
||||||
("Description for flat %s does not contain all the required "
|
elif isinstance(term, list) and all(x.lower() not in flat["text"].lower() for x in term):
|
||||||
"terms."),
|
LOGGER.info(
|
||||||
flat["id"]
|
("Description for flat %s does not contain any of required terms '%s'."),
|
||||||
)
|
flat["id"],
|
||||||
is_ok[i] = False
|
term,
|
||||||
|
)
|
||||||
|
is_ok[i] = False
|
||||||
|
for term in constraint["description_should_not_contain"]:
|
||||||
|
if term.lower() in flat["text"].lower():
|
||||||
|
LOGGER.info(
|
||||||
|
("Description for flat %s contains blacklisted term '%s'."),
|
||||||
|
flat["id"],
|
||||||
|
term,
|
||||||
|
)
|
||||||
|
is_ok[i] = False
|
||||||
|
|
||||||
return (
|
return (
|
||||||
[
|
[flat for i, flat in enumerate(flats_list) if is_ok[i]],
|
||||||
flat
|
[flat for i, flat in enumerate(flats_list) if not is_ok[i]],
|
||||||
for i, flat in enumerate(flats_list)
|
|
||||||
if is_ok[i]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
flat
|
|
||||||
for i, flat in enumerate(flats_list)
|
|
||||||
if not is_ok[i]
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -163,30 +164,25 @@ def first_pass(flats_list, constraint, config):
|
|||||||
|
|
||||||
# Handle duplicates based on ids
|
# Handle duplicates based on ids
|
||||||
# Just remove them (no merge) as they should be the exact same object.
|
# Just remove them (no merge) as they should be the exact same object.
|
||||||
flats_list, _ = duplicates.detect(
|
flats_list, _ = duplicates.detect(flats_list, key="id", merge=False, should_intersect=False)
|
||||||
flats_list, key="id", merge=False, should_intersect=False
|
|
||||||
)
|
|
||||||
# Also merge duplicates based on urls (these may come from different
|
# Also merge duplicates based on urls (these may come from different
|
||||||
# flatboob backends)
|
# flatboob backends)
|
||||||
# This is especially useful as some websites such as entreparticuliers
|
# This is especially useful as some websites such as entreparticuliers
|
||||||
# contains a lot of leboncoin housings posts.
|
# contains a lot of leboncoin housings posts.
|
||||||
flats_list, duplicates_by_urls = duplicates.detect(
|
flats_list, duplicates_by_urls = duplicates.detect(flats_list, key="urls", merge=True, should_intersect=True)
|
||||||
flats_list, key="urls", merge=True, should_intersect=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# Guess the postal codes
|
# Guess the postal codes
|
||||||
flats_list = metadata.guess_postal_code(flats_list, constraint, config)
|
flats_list = metadata.guess_postal_code(flats_list, constraint, config)
|
||||||
# Try to match with stations
|
|
||||||
flats_list = metadata.guess_stations(flats_list, constraint, config)
|
|
||||||
# Remove returned housing posts that do not match criteria
|
|
||||||
flats_list, ignored_list = refine_with_housing_criteria(flats_list,
|
|
||||||
constraint)
|
|
||||||
|
|
||||||
return {
|
if not config["ignore_station"]:
|
||||||
"new": flats_list,
|
# Try to match with stations
|
||||||
"ignored": ignored_list,
|
flats_list = metadata.guess_stations(flats_list, constraint, config)
|
||||||
"duplicate": duplicates_by_urls
|
|
||||||
}
|
# Remove returned housing posts that do not match criteria
|
||||||
|
flats_list, ignored_list = refine_with_housing_criteria(flats_list, constraint)
|
||||||
|
|
||||||
|
return {"new": flats_list, "ignored": ignored_list, "duplicate": duplicates_by_urls}
|
||||||
|
|
||||||
|
|
||||||
@tools.timeit
|
@tools.timeit
|
||||||
def second_pass(flats_list, constraint, config):
|
def second_pass(flats_list, constraint, config):
|
||||||
@ -213,28 +209,24 @@ def second_pass(flats_list, constraint, config):
|
|||||||
flats_list = metadata.guess_postal_code(flats_list, constraint, config)
|
flats_list = metadata.guess_postal_code(flats_list, constraint, config)
|
||||||
|
|
||||||
# Better match with stations (confirm and check better)
|
# Better match with stations (confirm and check better)
|
||||||
flats_list = metadata.guess_stations(flats_list, constraint, config)
|
if not config["ignore_station"]:
|
||||||
|
flats_list = metadata.guess_stations(flats_list, constraint, config)
|
||||||
|
|
||||||
# Compute travel time to specified points
|
# Compute travel time to specified points
|
||||||
flats_list = metadata.compute_travel_times(flats_list, constraint, config)
|
flats_list = metadata.compute_travel_times(flats_list, constraint, config)
|
||||||
|
|
||||||
# Remove returned housing posts that do not match criteria
|
# Remove returned housing posts that do not match criteria
|
||||||
flats_list, ignored_list = refine_with_housing_criteria(flats_list,
|
flats_list, ignored_list = refine_with_housing_criteria(flats_list, constraint)
|
||||||
constraint)
|
|
||||||
|
|
||||||
# Remove returned housing posts which do not match criteria relying on
|
# Remove returned housing posts which do not match criteria relying on
|
||||||
# fetched details.
|
# fetched details.
|
||||||
flats_list, ignored_list = refine_with_details_criteria(flats_list,
|
flats_list, ignored_list = refine_with_details_criteria(flats_list, constraint)
|
||||||
constraint)
|
|
||||||
|
|
||||||
if config["serve_images_locally"]:
|
if config["serve_images_locally"]:
|
||||||
images.download_images(flats_list, config)
|
images.download_images(flats_list, config)
|
||||||
|
|
||||||
return {
|
return {"new": flats_list, "ignored": ignored_list, "duplicate": []}
|
||||||
"new": flats_list,
|
|
||||||
"ignored": ignored_list,
|
|
||||||
"duplicate": []
|
|
||||||
}
|
|
||||||
|
|
||||||
@tools.timeit
|
@tools.timeit
|
||||||
def third_pass(flats_list, config):
|
def third_pass(flats_list, config):
|
||||||
@ -253,8 +245,4 @@ def third_pass(flats_list, config):
|
|||||||
# Deduplicate the list using every available data
|
# Deduplicate the list using every available data
|
||||||
flats_list, duplicate_flats = duplicates.deep_detect(flats_list, config)
|
flats_list, duplicate_flats = duplicates.deep_detect(flats_list, config)
|
||||||
|
|
||||||
return {
|
return {"new": flats_list, "ignored": [], "duplicate": duplicate_flats}
|
||||||
"new": flats_list,
|
|
||||||
"ignored": [],
|
|
||||||
"duplicate": duplicate_flats
|
|
||||||
}
|
|
||||||
|
@ -5,18 +5,23 @@ Caching function for pictures.
|
|||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
|
import collections
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import requests
|
import requests
|
||||||
|
import logging
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
import PIL.Image
|
import PIL.Image
|
||||||
|
|
||||||
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class MemoryCache(object):
|
class MemoryCache(object):
|
||||||
"""
|
"""
|
||||||
A cache in memory.
|
A cache in memory.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def on_miss(key):
|
def on_miss(key):
|
||||||
"""
|
"""
|
||||||
@ -31,7 +36,7 @@ class MemoryCache(object):
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.hits = 0
|
self.hits = 0
|
||||||
self.misses = 0
|
self.misses = 0
|
||||||
self.map = {}
|
self.map = collections.OrderedDict()
|
||||||
|
|
||||||
def get(self, key):
|
def get(self, key):
|
||||||
"""
|
"""
|
||||||
@ -84,6 +89,7 @@ class ImageCache(MemoryCache):
|
|||||||
"""
|
"""
|
||||||
A cache for images, stored in memory.
|
A cache for images, stored in memory.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def compute_filename(url):
|
def compute_filename(url):
|
||||||
"""
|
"""
|
||||||
@ -99,24 +105,40 @@ class ImageCache(MemoryCache):
|
|||||||
"""
|
"""
|
||||||
Helper to actually retrieve photos if not already cached.
|
Helper to actually retrieve photos if not already cached.
|
||||||
"""
|
"""
|
||||||
filepath = os.path.join(
|
# If two many items in the cache, pop one
|
||||||
self.storage_dir,
|
if len(self.map.keys()) > self.max_items:
|
||||||
self.compute_filename(url)
|
self.map.popitem(last=False)
|
||||||
)
|
|
||||||
if os.path.isfile(filepath):
|
|
||||||
image = PIL.Image.open(filepath)
|
|
||||||
else:
|
|
||||||
req = requests.get(url)
|
|
||||||
try:
|
|
||||||
req.raise_for_status()
|
|
||||||
image = PIL.Image.open(BytesIO(req.content))
|
|
||||||
if self.storage_dir:
|
|
||||||
image.save(filepath, format=image.format)
|
|
||||||
except (requests.HTTPError, IOError):
|
|
||||||
return None
|
|
||||||
return image
|
|
||||||
|
|
||||||
def __init__(self, storage_dir=None):
|
if url.endswith(".svg"):
|
||||||
|
# Skip SVG photo which are unsupported and unlikely to be relevant
|
||||||
|
return None
|
||||||
|
|
||||||
|
filepath = None
|
||||||
|
# Try to load from local folder
|
||||||
|
if self.storage_dir:
|
||||||
|
filepath = os.path.join(self.storage_dir, self.compute_filename(url))
|
||||||
|
if os.path.isfile(filepath):
|
||||||
|
return PIL.Image.open(filepath)
|
||||||
|
# Otherwise, fetch it
|
||||||
|
try:
|
||||||
|
LOGGER.debug(f"Download photo from {url} to {filepath}")
|
||||||
|
req = requests.get(url)
|
||||||
|
req.raise_for_status()
|
||||||
|
image = PIL.Image.open(BytesIO(req.content))
|
||||||
|
if filepath:
|
||||||
|
image.save(filepath, format=image.format)
|
||||||
|
return image
|
||||||
|
except (requests.HTTPError, IOError) as exc:
|
||||||
|
LOGGER.info(f"Download photo from {url} failed: {exc}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __init__(self, max_items=200, storage_dir=None):
|
||||||
|
"""
|
||||||
|
:param max_items: Max number of items in the cache, to prevent Out Of
|
||||||
|
Memory errors.
|
||||||
|
:param storage_dir: Directory in which images should be stored.
|
||||||
|
"""
|
||||||
|
self.max_items = max_items
|
||||||
self.storage_dir = storage_dir
|
self.storage_dir = storage_dir
|
||||||
if self.storage_dir and not os.path.isdir(self.storage_dir):
|
if self.storage_dir and not os.path.isdir(self.storage_dir):
|
||||||
os.makedirs(self.storage_dir)
|
os.makedirs(self.storage_dir)
|
||||||
|
@ -35,14 +35,14 @@ def homogeneize_phone_number(numbers):
|
|||||||
|
|
||||||
clean_numbers = []
|
clean_numbers = []
|
||||||
|
|
||||||
for number in numbers.split(','):
|
for number in numbers.split(","):
|
||||||
number = number.strip()
|
number = number.strip()
|
||||||
number = number.replace(".", "")
|
number = number.replace(".", "")
|
||||||
number = number.replace(" ", "")
|
number = number.replace(" ", "")
|
||||||
number = number.replace("-", "")
|
number = number.replace("-", "")
|
||||||
number = number.replace("(", "")
|
number = number.replace("(", "")
|
||||||
number = number.replace(")", "")
|
number = number.replace(")", "")
|
||||||
number = re.sub(r'^\+\d\d', "", number)
|
number = re.sub(r"^\+\d\d", "", number)
|
||||||
|
|
||||||
if not number.startswith("0"):
|
if not number.startswith("0"):
|
||||||
number = "0" + number
|
number = "0" + number
|
||||||
@ -94,12 +94,7 @@ def compare_photos(photo1, photo2, photo_cache, hash_threshold):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def find_number_common_photos(
|
def find_number_common_photos(flat1_photos, flat2_photos, photo_cache, hash_threshold):
|
||||||
flat1_photos,
|
|
||||||
flat2_photos,
|
|
||||||
photo_cache,
|
|
||||||
hash_threshold
|
|
||||||
):
|
|
||||||
"""
|
"""
|
||||||
Compute the number of common photos between the two lists of photos for the
|
Compute the number of common photos between the two lists of photos for the
|
||||||
flats.
|
flats.
|
||||||
@ -166,7 +161,7 @@ def detect(flats_list, key="id", merge=True, should_intersect=False):
|
|||||||
|
|
||||||
for flat_key, matching_flats in seen.items():
|
for flat_key, matching_flats in seen.items():
|
||||||
if flat_key is None:
|
if flat_key is None:
|
||||||
# If the key is None, it means Weboob could not load the data. In
|
# If the key is None, it means Woob could not load the data. In
|
||||||
# this case, we consider every matching item as being independant
|
# this case, we consider every matching item as being independant
|
||||||
# of the others, to avoid over-deduplication.
|
# of the others, to avoid over-deduplication.
|
||||||
unique_flats_list.extend(matching_flats)
|
unique_flats_list.extend(matching_flats)
|
||||||
@ -174,22 +169,21 @@ def detect(flats_list, key="id", merge=True, should_intersect=False):
|
|||||||
# Sort matching flats by backend precedence
|
# Sort matching flats by backend precedence
|
||||||
matching_flats.sort(
|
matching_flats.sort(
|
||||||
key=lambda flat: next(
|
key=lambda flat: next(
|
||||||
i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE)
|
i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE) if flat["id"].endswith(backend)
|
||||||
if flat["id"].endswith(backend)
|
|
||||||
),
|
),
|
||||||
reverse=True
|
reverse=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
if len(matching_flats) > 1:
|
if len(matching_flats) > 1:
|
||||||
LOGGER.info("Found duplicates using key \"%s\": %s.",
|
LOGGER.info(
|
||||||
key,
|
'Found duplicates using key "%s": %s.',
|
||||||
[flat["id"] for flat in matching_flats])
|
key,
|
||||||
|
[flat["id"] for flat in matching_flats],
|
||||||
|
)
|
||||||
# Otherwise, check the policy
|
# Otherwise, check the policy
|
||||||
if merge:
|
if merge:
|
||||||
# If a merge is requested, do the merge
|
# If a merge is requested, do the merge
|
||||||
unique_flats_list.append(
|
unique_flats_list.append(tools.merge_dicts(*matching_flats))
|
||||||
tools.merge_dicts(*matching_flats)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
# Otherwise, just keep the most important of them
|
# Otherwise, just keep the most important of them
|
||||||
unique_flats_list.append(matching_flats[-1])
|
unique_flats_list.append(matching_flats[-1])
|
||||||
@ -203,8 +197,7 @@ def detect(flats_list, key="id", merge=True, should_intersect=False):
|
|||||||
if should_intersect:
|
if should_intersect:
|
||||||
# We added some flats twice with the above method, let's deduplicate on
|
# We added some flats twice with the above method, let's deduplicate on
|
||||||
# id.
|
# id.
|
||||||
unique_flats_list, _ = detect(unique_flats_list, key="id", merge=True,
|
unique_flats_list, _ = detect(unique_flats_list, key="id", merge=True, should_intersect=False)
|
||||||
should_intersect=False)
|
|
||||||
|
|
||||||
return unique_flats_list, duplicate_flats
|
return unique_flats_list, duplicate_flats
|
||||||
|
|
||||||
@ -250,14 +243,12 @@ def get_duplicate_score(flat1, flat2, photo_cache, hash_threshold):
|
|||||||
|
|
||||||
# They should have the same postal code, if available
|
# They should have the same postal code, if available
|
||||||
if (
|
if (
|
||||||
"flatisfy" in flat1 and "flatisfy" in flat2 and
|
"flatisfy" in flat1
|
||||||
flat1["flatisfy"].get("postal_code", None) and
|
and "flatisfy" in flat2
|
||||||
flat2["flatisfy"].get("postal_code", None)
|
and flat1["flatisfy"].get("postal_code", None)
|
||||||
|
and flat2["flatisfy"].get("postal_code", None)
|
||||||
):
|
):
|
||||||
assert (
|
assert flat1["flatisfy"]["postal_code"] == flat2["flatisfy"]["postal_code"]
|
||||||
flat1["flatisfy"]["postal_code"] ==
|
|
||||||
flat2["flatisfy"]["postal_code"]
|
|
||||||
)
|
|
||||||
n_common_items += 1
|
n_common_items += 1
|
||||||
|
|
||||||
# TODO: Better text comparison (one included in the other, fuzzymatch)
|
# TODO: Better text comparison (one included in the other, fuzzymatch)
|
||||||
@ -272,35 +263,23 @@ def get_duplicate_score(flat1, flat2, photo_cache, hash_threshold):
|
|||||||
flat2_phone = homogeneize_phone_number(flat2["phone"])
|
flat2_phone = homogeneize_phone_number(flat2["phone"])
|
||||||
if flat1_phone and flat2_phone:
|
if flat1_phone and flat2_phone:
|
||||||
# Use an "in" test as there could be multiple phone numbers
|
# Use an "in" test as there could be multiple phone numbers
|
||||||
# returned by a weboob module
|
# returned by a Woob module
|
||||||
if flat1_phone in flat2_phone or flat2_phone in flat1_phone:
|
if flat1_phone in flat2_phone or flat2_phone in flat1_phone:
|
||||||
n_common_items += 4 # Counts much more than the rest
|
n_common_items += 4 # Counts much more than the rest
|
||||||
|
|
||||||
# If the two flats are from the same website and have a
|
# If the two flats are from the same website and have a
|
||||||
# different float part, consider they cannot be duplicates. See
|
# different float part, consider they cannot be duplicates. See
|
||||||
# https://framagit.org/phyks/Flatisfy/issues/100.
|
# https://framagit.org/phyks/Flatisfy/issues/100.
|
||||||
both_are_from_same_backend = (
|
both_are_from_same_backend = flat1["id"].split("@")[-1] == flat2["id"].split("@")[-1]
|
||||||
flat1["id"].split("@")[-1] == flat2["id"].split("@")[-1]
|
both_have_float_part = (flat1["area"] % 1) > 0 and (flat2["area"] % 1) > 0
|
||||||
)
|
both_have_equal_float_part = (flat1["area"] % 1) == (flat2["area"] % 1)
|
||||||
both_have_float_part = (
|
|
||||||
(flat1["area"] % 1) > 0 and (flat2["area"] % 1) > 0
|
|
||||||
)
|
|
||||||
both_have_equal_float_part = (
|
|
||||||
(flat1["area"] % 1) == (flat2["area"] % 1)
|
|
||||||
)
|
|
||||||
if both_have_float_part and both_are_from_same_backend:
|
if both_have_float_part and both_are_from_same_backend:
|
||||||
assert both_have_equal_float_part
|
assert both_have_equal_float_part
|
||||||
|
|
||||||
if flat1.get("photos", []) and flat2.get("photos", []):
|
if flat1.get("photos", []) and flat2.get("photos", []):
|
||||||
n_common_photos = find_number_common_photos(
|
n_common_photos = find_number_common_photos(flat1["photos"], flat2["photos"], photo_cache, hash_threshold)
|
||||||
flat1["photos"],
|
|
||||||
flat2["photos"],
|
|
||||||
photo_cache,
|
|
||||||
hash_threshold
|
|
||||||
)
|
|
||||||
|
|
||||||
min_number_photos = min(len(flat1["photos"]),
|
min_number_photos = min(len(flat1["photos"]), len(flat2["photos"]))
|
||||||
len(flat2["photos"]))
|
|
||||||
|
|
||||||
# Either all the photos are the same, or there are at least
|
# Either all the photos are the same, or there are at least
|
||||||
# three common photos.
|
# three common photos.
|
||||||
@ -332,9 +311,7 @@ def deep_detect(flats_list, config):
|
|||||||
storage_dir = os.path.join(config["data_directory"], "images")
|
storage_dir = os.path.join(config["data_directory"], "images")
|
||||||
else:
|
else:
|
||||||
storage_dir = None
|
storage_dir = None
|
||||||
photo_cache = ImageCache(
|
photo_cache = ImageCache(storage_dir=storage_dir)
|
||||||
storage_dir=storage_dir
|
|
||||||
)
|
|
||||||
|
|
||||||
LOGGER.info("Running deep duplicates detection.")
|
LOGGER.info("Running deep duplicates detection.")
|
||||||
matching_flats = collections.defaultdict(list)
|
matching_flats = collections.defaultdict(list)
|
||||||
@ -347,30 +324,26 @@ def deep_detect(flats_list, config):
|
|||||||
if flat2["id"] in matching_flats[flat1["id"]]:
|
if flat2["id"] in matching_flats[flat1["id"]]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
n_common_items = get_duplicate_score(
|
n_common_items = get_duplicate_score(flat1, flat2, photo_cache, config["duplicate_image_hash_threshold"])
|
||||||
flat1,
|
|
||||||
flat2,
|
|
||||||
photo_cache,
|
|
||||||
config["duplicate_image_hash_threshold"]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Minimal score to consider they are duplicates
|
# Minimal score to consider they are duplicates
|
||||||
if n_common_items >= config["duplicate_threshold"]:
|
if n_common_items >= config["duplicate_threshold"]:
|
||||||
# Mark flats as duplicates
|
# Mark flats as duplicates
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
("Found duplicates using deep detection: (%s, %s). "
|
("Found duplicates using deep detection: (%s, %s). Score is %d."),
|
||||||
"Score is %d."),
|
|
||||||
flat1["id"],
|
flat1["id"],
|
||||||
flat2["id"],
|
flat2["id"],
|
||||||
n_common_items
|
n_common_items,
|
||||||
)
|
)
|
||||||
matching_flats[flat1["id"]].append(flat2["id"])
|
matching_flats[flat1["id"]].append(flat2["id"])
|
||||||
matching_flats[flat2["id"]].append(flat1["id"])
|
matching_flats[flat2["id"]].append(flat1["id"])
|
||||||
|
|
||||||
if photo_cache.total():
|
if photo_cache.total():
|
||||||
LOGGER.debug("Photo cache: hits: %d%% / misses: %d%%.",
|
LOGGER.debug(
|
||||||
photo_cache.hit_rate(),
|
"Photo cache: hits: %d%% / misses: %d%%.",
|
||||||
photo_cache.miss_rate())
|
photo_cache.hit_rate(),
|
||||||
|
photo_cache.miss_rate(),
|
||||||
|
)
|
||||||
|
|
||||||
seen_ids = []
|
seen_ids = []
|
||||||
duplicate_flats = []
|
duplicate_flats = []
|
||||||
@ -381,16 +354,11 @@ def deep_detect(flats_list, config):
|
|||||||
|
|
||||||
seen_ids.extend(matching_flats[flat_id])
|
seen_ids.extend(matching_flats[flat_id])
|
||||||
to_merge = sorted(
|
to_merge = sorted(
|
||||||
[
|
[flat for flat in flats_list if flat["id"] in matching_flats[flat_id]],
|
||||||
flat
|
|
||||||
for flat in flats_list
|
|
||||||
if flat["id"] in matching_flats[flat_id]
|
|
||||||
],
|
|
||||||
key=lambda flat: next(
|
key=lambda flat: next(
|
||||||
i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE)
|
i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE) if flat["id"].endswith(backend)
|
||||||
if flat["id"].endswith(backend)
|
|
||||||
),
|
),
|
||||||
reverse=True
|
reverse=True,
|
||||||
)
|
)
|
||||||
unique_flats_list.append(tools.merge_dicts(*to_merge))
|
unique_flats_list.append(tools.merge_dicts(*to_merge))
|
||||||
# The ID of the added merged flat will be the one of the last item
|
# The ID of the added merged flat will be the one of the last item
|
||||||
|
@ -22,14 +22,8 @@ def download_images(flats_list, config):
|
|||||||
:param flats_list: A list of flats dicts.
|
:param flats_list: A list of flats dicts.
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
"""
|
"""
|
||||||
photo_cache = ImageCache(
|
photo_cache = ImageCache(storage_dir=os.path.join(config["data_directory"], "images"))
|
||||||
storage_dir=os.path.join(config["data_directory"], "images")
|
for flat in flats_list:
|
||||||
)
|
|
||||||
flats_list_length = len(flats_list)
|
|
||||||
for i, flat in enumerate(flats_list):
|
|
||||||
LOGGER.info(
|
|
||||||
"Downloading photos for flat %d/%d.", i + 1, flats_list_length
|
|
||||||
)
|
|
||||||
for photo in flat["photos"]:
|
for photo in flat["photos"]:
|
||||||
# Download photo
|
# Download photo
|
||||||
image = photo_cache.get(photo["url"])
|
image = photo_cache.get(photo["url"])
|
||||||
|
@ -12,6 +12,7 @@ import re
|
|||||||
|
|
||||||
from flatisfy import data
|
from flatisfy import data
|
||||||
from flatisfy import tools
|
from flatisfy import tools
|
||||||
|
from flatisfy.constants import TimeToModes
|
||||||
from flatisfy.models.postal_code import PostalCode
|
from flatisfy.models.postal_code import PostalCode
|
||||||
from flatisfy.models.public_transport import PublicTransport
|
from flatisfy.models.public_transport import PublicTransport
|
||||||
|
|
||||||
@ -75,10 +76,10 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
|
|||||||
|
|
||||||
Example::
|
Example::
|
||||||
|
|
||||||
>>> match("Paris 14ème", ["Ris", "ris", "Paris 14"], limit=1)
|
>>> fuzzy_match("Paris 14ème", ["Ris", "ris", "Paris 14"], limit=1)
|
||||||
[("Paris 14", 100)
|
[("Paris 14", 100)
|
||||||
|
|
||||||
>>> match( \
|
>>> fuzzy_match( \
|
||||||
"Saint-Jacques, Denfert-Rochereau (Colonel Rol-Tanguy), " \
|
"Saint-Jacques, Denfert-Rochereau (Colonel Rol-Tanguy), " \
|
||||||
"Mouton-Duvernet", \
|
"Mouton-Duvernet", \
|
||||||
["saint-jacques", "denfert rochereau", "duvernet", "toto"], \
|
["saint-jacques", "denfert rochereau", "duvernet", "toto"], \
|
||||||
@ -87,8 +88,8 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
|
|||||||
[('denfert rochereau', 100), ('saint-jacques', 76)]
|
[('denfert rochereau', 100), ('saint-jacques', 76)]
|
||||||
"""
|
"""
|
||||||
# TODO: Is there a better confidence measure?
|
# TODO: Is there a better confidence measure?
|
||||||
normalized_query = tools.normalize_string(query)
|
normalized_query = tools.normalize_string(query).replace("saint", "st")
|
||||||
normalized_choices = [tools.normalize_string(choice) for choice in choices]
|
normalized_choices = [tools.normalize_string(choice).replace("saint", "st") for choice in choices]
|
||||||
|
|
||||||
# Remove duplicates in the choices list
|
# Remove duplicates in the choices list
|
||||||
unique_normalized_choices = tools.uniqify(normalized_choices)
|
unique_normalized_choices = tools.uniqify(normalized_choices)
|
||||||
@ -96,13 +97,9 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
|
|||||||
# Get the matches (normalized strings)
|
# Get the matches (normalized strings)
|
||||||
# Keep only ``limit`` matches.
|
# Keep only ``limit`` matches.
|
||||||
matches = sorted(
|
matches = sorted(
|
||||||
[
|
[(choice, len(choice)) for choice in tools.uniqify(unique_normalized_choices) if choice in normalized_query],
|
||||||
(choice, len(choice))
|
|
||||||
for choice in tools.uniqify(unique_normalized_choices)
|
|
||||||
if choice in normalized_query
|
|
||||||
],
|
|
||||||
key=lambda x: x[1],
|
key=lambda x: x[1],
|
||||||
reverse=True
|
reverse=True,
|
||||||
)
|
)
|
||||||
if limit:
|
if limit:
|
||||||
matches = matches[:limit]
|
matches = matches[:limit]
|
||||||
@ -110,22 +107,66 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
|
|||||||
# Update confidence
|
# Update confidence
|
||||||
if matches:
|
if matches:
|
||||||
max_confidence = max(match[1] for match in matches)
|
max_confidence = max(match[1] for match in matches)
|
||||||
matches = [
|
matches = [(x[0], int(x[1] / max_confidence * 100)) for x in matches]
|
||||||
(x[0], int(x[1] / max_confidence * 100))
|
|
||||||
for x in matches
|
|
||||||
]
|
|
||||||
|
|
||||||
# Convert back matches to original strings
|
# Convert back matches to original strings
|
||||||
# Also filter out matches below threshold
|
# Also filter out matches below threshold
|
||||||
matches = [
|
matches = [(choices[normalized_choices.index(x[0])], x[1]) for x in matches if x[1] >= threshold]
|
||||||
(choices[normalized_choices.index(x[0])], x[1])
|
|
||||||
for x in matches
|
|
||||||
if x[1] >= threshold
|
|
||||||
]
|
|
||||||
|
|
||||||
return matches
|
return matches
|
||||||
|
|
||||||
|
|
||||||
|
def guess_location_position(location, cities, constraint, must_match):
|
||||||
|
# try to find a city
|
||||||
|
# Find all fuzzy-matching cities
|
||||||
|
postal_code = None
|
||||||
|
insee_code = None
|
||||||
|
position = None
|
||||||
|
|
||||||
|
matched_cities = fuzzy_match(location, [x.name for x in cities], limit=None)
|
||||||
|
if matched_cities:
|
||||||
|
# Find associated postal codes
|
||||||
|
matched_postal_codes = []
|
||||||
|
for matched_city_name, _ in matched_cities:
|
||||||
|
postal_code_objects_for_city = [x for x in cities if x.name == matched_city_name]
|
||||||
|
insee_code = [pc.insee_code for pc in postal_code_objects_for_city][0]
|
||||||
|
matched_postal_codes.extend(pc.postal_code for pc in postal_code_objects_for_city)
|
||||||
|
# Try to match them with postal codes in config constraint
|
||||||
|
matched_postal_codes_in_config = set(matched_postal_codes) & set(constraint["postal_codes"])
|
||||||
|
if matched_postal_codes_in_config:
|
||||||
|
# If there are some matched postal codes which are also in
|
||||||
|
# config, use them preferentially. This avoid ignoring
|
||||||
|
# incorrectly some flats in cities with multiple postal
|
||||||
|
# codes, see #110.
|
||||||
|
postal_code = next(iter(matched_postal_codes_in_config))
|
||||||
|
else:
|
||||||
|
# Otherwise, simply take any matched postal code.
|
||||||
|
postal_code = matched_postal_codes[0]
|
||||||
|
|
||||||
|
# take the city position
|
||||||
|
for matched_city_name, _ in matched_cities:
|
||||||
|
postal_code_objects_for_city = [
|
||||||
|
x for x in cities if x.name == matched_city_name and x.postal_code == postal_code
|
||||||
|
]
|
||||||
|
if len(postal_code_objects_for_city):
|
||||||
|
position = {
|
||||||
|
"lat": postal_code_objects_for_city[0].lat,
|
||||||
|
"lng": postal_code_objects_for_city[0].lng,
|
||||||
|
}
|
||||||
|
LOGGER.debug(("Found position %s using city %s."), position, matched_city_name)
|
||||||
|
break
|
||||||
|
|
||||||
|
if not postal_code and must_match:
|
||||||
|
postal_code = cities[0].postal_code
|
||||||
|
position = {
|
||||||
|
"lat": cities[0].lat,
|
||||||
|
"lng": cities[0].lng,
|
||||||
|
}
|
||||||
|
insee_code = cities[0].insee_code
|
||||||
|
|
||||||
|
return (postal_code, insee_code, position)
|
||||||
|
|
||||||
|
|
||||||
def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
|
def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
|
||||||
"""
|
"""
|
||||||
Try to guess the postal code from the location of the flats.
|
Try to guess the postal code from the location of the flats.
|
||||||
@ -140,24 +181,27 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
|
|||||||
|
|
||||||
:return: An updated list of flats dict with guessed postal code.
|
:return: An updated list of flats dict with guessed postal code.
|
||||||
"""
|
"""
|
||||||
opendata = {
|
opendata = {"postal_codes": data.load_data(PostalCode, constraint, config)}
|
||||||
"postal_codes": data.load_data(PostalCode, constraint, config)
|
|
||||||
}
|
|
||||||
|
|
||||||
for flat in flats_list:
|
for flat in flats_list:
|
||||||
location = flat.get("location", None)
|
location = flat.get("location", None)
|
||||||
|
if not location:
|
||||||
|
addr = flat.get("address", None)
|
||||||
|
if addr:
|
||||||
|
location = addr["full_address"]
|
||||||
if not location:
|
if not location:
|
||||||
# Skip everything if empty location
|
# Skip everything if empty location
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
(
|
("No location field for flat %s, skipping postal code lookup. (%s)"),
|
||||||
"No location field for flat %s, skipping postal "
|
flat["id"],
|
||||||
"code lookup."
|
flat.get("address"),
|
||||||
),
|
|
||||||
flat["id"]
|
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
postal_code = None
|
postal_code = None
|
||||||
|
insee_code = None
|
||||||
|
position = None
|
||||||
|
|
||||||
# Try to find a postal code directly
|
# Try to find a postal code directly
|
||||||
try:
|
try:
|
||||||
postal_code = re.search(r"[0-9]{5}", location)
|
postal_code = re.search(r"[0-9]{5}", location)
|
||||||
@ -165,86 +209,51 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
|
|||||||
postal_code = postal_code.group(0)
|
postal_code = postal_code.group(0)
|
||||||
|
|
||||||
# Check the postal code is within the db
|
# Check the postal code is within the db
|
||||||
assert postal_code in [x.postal_code
|
assert postal_code in [x.postal_code for x in opendata["postal_codes"]]
|
||||||
for x in opendata["postal_codes"]]
|
|
||||||
|
|
||||||
LOGGER.info(
|
LOGGER.debug(
|
||||||
"Found postal code in location field for flat %s: %s.",
|
"Found postal code directly in location field for flat %s: %s.",
|
||||||
flat["id"], postal_code
|
flat["id"],
|
||||||
|
postal_code,
|
||||||
)
|
)
|
||||||
except AssertionError:
|
except AssertionError:
|
||||||
postal_code = None
|
postal_code = None
|
||||||
|
|
||||||
# If not found, try to find a city
|
# Then fetch position (and postal_code is couldn't be found earlier)
|
||||||
if not postal_code:
|
cities = opendata["postal_codes"]
|
||||||
# Find all fuzzy-matching cities
|
if postal_code:
|
||||||
matched_cities = fuzzy_match(
|
cities = [x for x in cities if x.postal_code == postal_code]
|
||||||
location,
|
(postal_code, insee_code, position) = guess_location_position(
|
||||||
[x.name for x in opendata["postal_codes"]],
|
location, cities, constraint, postal_code is not None
|
||||||
limit=None
|
)
|
||||||
)
|
|
||||||
if matched_cities:
|
|
||||||
# Find associated postal codes
|
|
||||||
matched_postal_codes = []
|
|
||||||
for matched_city_name, _ in matched_cities:
|
|
||||||
postal_code_objects_for_city = [
|
|
||||||
x for x in opendata["postal_codes"]
|
|
||||||
if x.name == matched_city_name
|
|
||||||
]
|
|
||||||
matched_postal_codes.extend(
|
|
||||||
pc.postal_code
|
|
||||||
for pc in postal_code_objects_for_city
|
|
||||||
)
|
|
||||||
# Try to match them with postal codes in config constraint
|
|
||||||
matched_postal_codes_in_config = (
|
|
||||||
set(matched_postal_codes) & set(constraint["postal_codes"])
|
|
||||||
)
|
|
||||||
if matched_postal_codes_in_config:
|
|
||||||
# If there are some matched postal codes which are also in
|
|
||||||
# config, use them preferentially. This avoid ignoring
|
|
||||||
# incorrectly some flats in cities with multiple postal
|
|
||||||
# codes, see #110.
|
|
||||||
postal_code = next(iter(matched_postal_codes_in_config))
|
|
||||||
else:
|
|
||||||
# Otherwise, simply take any matched postal code.
|
|
||||||
postal_code = matched_postal_codes[0]
|
|
||||||
LOGGER.info(
|
|
||||||
("Found postal code in location field through city lookup "
|
|
||||||
"for flat %s: %s."),
|
|
||||||
flat["id"], postal_code
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check that postal code is not too far from the ones listed in config,
|
# Check that postal code is not too far from the ones listed in config,
|
||||||
# limit bad fuzzy matching
|
# limit bad fuzzy matching
|
||||||
if postal_code and distance_threshold:
|
if postal_code and distance_threshold:
|
||||||
distance = min(
|
distance = min(
|
||||||
tools.distance(
|
tools.distance(
|
||||||
next(
|
next((x.lat, x.lng) for x in opendata["postal_codes"] if x.postal_code == postal_code),
|
||||||
(x.lat, x.lng)
|
next((x.lat, x.lng) for x in opendata["postal_codes"] if x.postal_code == constraint_postal_code),
|
||||||
for x in opendata["postal_codes"]
|
|
||||||
if x.postal_code == postal_code
|
|
||||||
),
|
|
||||||
next(
|
|
||||||
(x.lat, x.lng)
|
|
||||||
for x in opendata["postal_codes"]
|
|
||||||
if x.postal_code == constraint_postal_code
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
for constraint_postal_code in constraint["postal_codes"]
|
for constraint_postal_code in constraint["postal_codes"]
|
||||||
)
|
)
|
||||||
|
|
||||||
if distance > distance_threshold:
|
if distance > distance_threshold:
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
("Postal code %s found for flat %s is off-constraints "
|
(
|
||||||
"(distance is %dm > %dm). Let's consider it is an "
|
"Postal code %s found for flat %s @ %s is off-constraints "
|
||||||
"artifact match and keep the post without this postal "
|
"(distance is %dm > %dm). Let's consider it is an "
|
||||||
"code."),
|
"artifact match and keep the post without this postal "
|
||||||
|
"code."
|
||||||
|
),
|
||||||
postal_code,
|
postal_code,
|
||||||
flat["id"],
|
flat["id"],
|
||||||
|
location,
|
||||||
int(distance),
|
int(distance),
|
||||||
int(distance_threshold)
|
int(distance_threshold),
|
||||||
)
|
)
|
||||||
postal_code = None
|
postal_code = None
|
||||||
|
position = None
|
||||||
|
|
||||||
# Store it
|
# Store it
|
||||||
if postal_code:
|
if postal_code:
|
||||||
@ -252,12 +261,28 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
|
|||||||
if existing_postal_code and existing_postal_code != postal_code:
|
if existing_postal_code and existing_postal_code != postal_code:
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"Replacing previous postal code %s by %s for flat %s.",
|
"Replacing previous postal code %s by %s for flat %s.",
|
||||||
existing_postal_code, postal_code, flat["id"]
|
existing_postal_code,
|
||||||
|
postal_code,
|
||||||
|
flat["id"],
|
||||||
)
|
)
|
||||||
flat["flatisfy"]["postal_code"] = postal_code
|
flat["flatisfy"]["postal_code"] = postal_code
|
||||||
else:
|
else:
|
||||||
LOGGER.info("No postal code found for flat %s.", flat["id"])
|
LOGGER.info("No postal code found for flat %s.", flat["id"])
|
||||||
|
|
||||||
|
if insee_code:
|
||||||
|
flat["flatisfy"]["insee_code"] = insee_code
|
||||||
|
|
||||||
|
if position:
|
||||||
|
flat["flatisfy"]["position"] = position
|
||||||
|
LOGGER.debug(
|
||||||
|
"found postal_code=%s insee_code=%s position=%s for flat %s (%s).",
|
||||||
|
postal_code,
|
||||||
|
insee_code,
|
||||||
|
position,
|
||||||
|
flat["id"],
|
||||||
|
location,
|
||||||
|
)
|
||||||
|
|
||||||
return flats_list
|
return flats_list
|
||||||
|
|
||||||
|
|
||||||
@ -271,10 +296,10 @@ def guess_stations(flats_list, constraint, config):
|
|||||||
|
|
||||||
:return: An updated list of flats dict with guessed nearby stations.
|
:return: An updated list of flats dict with guessed nearby stations.
|
||||||
"""
|
"""
|
||||||
distance_threshold = config['max_distance_housing_station']
|
distance_threshold = config["max_distance_housing_station"]
|
||||||
opendata = {
|
opendata = {
|
||||||
"postal_codes": data.load_data(PostalCode, constraint, config),
|
"postal_codes": data.load_data(PostalCode, constraint, config),
|
||||||
"stations": data.load_data(PublicTransport, constraint, config)
|
"stations": data.load_data(PublicTransport, constraint, config),
|
||||||
}
|
}
|
||||||
|
|
||||||
for flat in flats_list:
|
for flat in flats_list:
|
||||||
@ -282,18 +307,27 @@ def guess_stations(flats_list, constraint, config):
|
|||||||
|
|
||||||
if not flat_station:
|
if not flat_station:
|
||||||
# Skip everything if empty station
|
# Skip everything if empty station
|
||||||
LOGGER.info(
|
LOGGER.info("No stations field for flat %s, skipping stations lookup.", flat["id"])
|
||||||
"No stations field for flat %s, skipping stations lookup.",
|
|
||||||
flat["id"]
|
|
||||||
)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
matched_stations = fuzzy_match(
|
# Woob modules can return several stations in a comma-separated list.
|
||||||
flat_station,
|
flat_stations = flat_station.split(",")
|
||||||
[x.name for x in opendata["stations"]],
|
# But some stations containing a comma exist, so let's add the initial
|
||||||
limit=10,
|
# value to the list of stations to check if there was one.
|
||||||
threshold=50
|
if len(flat_stations) > 1:
|
||||||
)
|
flat_stations.append(flat_station)
|
||||||
|
|
||||||
|
matched_stations = []
|
||||||
|
for tentative_station in flat_stations:
|
||||||
|
matched_stations += fuzzy_match(
|
||||||
|
tentative_station,
|
||||||
|
[x.name for x in opendata["stations"]],
|
||||||
|
limit=10,
|
||||||
|
threshold=50,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Keep only one occurrence of each station
|
||||||
|
matched_stations = list(set(matched_stations))
|
||||||
|
|
||||||
# Filter out the stations that are obviously too far and not well
|
# Filter out the stations that are obviously too far and not well
|
||||||
# guessed
|
# guessed
|
||||||
@ -302,54 +336,43 @@ def guess_stations(flats_list, constraint, config):
|
|||||||
if postal_code:
|
if postal_code:
|
||||||
# If there is a postal code, check that the matched station is
|
# If there is a postal code, check that the matched station is
|
||||||
# closed to it
|
# closed to it
|
||||||
postal_code_gps = next(
|
postal_code_gps = next((x.lat, x.lng) for x in opendata["postal_codes"] if x.postal_code == postal_code)
|
||||||
(x.lat, x.lng)
|
|
||||||
for x in opendata["postal_codes"]
|
|
||||||
if x.postal_code == postal_code
|
|
||||||
)
|
|
||||||
for station in matched_stations:
|
for station in matched_stations:
|
||||||
# Note that multiple stations with the same name exist in a
|
# Note that multiple stations with the same name exist in a
|
||||||
# city, hence the list of stations objects for a given matching
|
# city, hence the list of stations objects for a given matching
|
||||||
# station name.
|
# station name.
|
||||||
stations_objects = [
|
stations_objects = [x for x in opendata["stations"] if x.name == station[0]]
|
||||||
x for x in opendata["stations"] if x.name == station[0]
|
|
||||||
]
|
|
||||||
for station_data in stations_objects:
|
for station_data in stations_objects:
|
||||||
distance = tools.distance(
|
distance = tools.distance((station_data.lat, station_data.lng), postal_code_gps)
|
||||||
(station_data.lat, station_data.lng),
|
|
||||||
postal_code_gps
|
|
||||||
)
|
|
||||||
if distance < distance_threshold:
|
if distance < distance_threshold:
|
||||||
# If at least one of the coordinates for a given
|
# If at least one of the coordinates for a given
|
||||||
# station is close enough, that's ok and we can add
|
# station is close enough, that's ok and we can add
|
||||||
# the station
|
# the station
|
||||||
good_matched_stations.append({
|
good_matched_stations.append(
|
||||||
"key": station[0],
|
{
|
||||||
"name": station_data.name,
|
"key": station[0],
|
||||||
"confidence": station[1],
|
"name": station_data.name,
|
||||||
"gps": (station_data.lat, station_data.lng)
|
"confidence": station[1],
|
||||||
})
|
"gps": (station_data.lat, station_data.lng),
|
||||||
|
}
|
||||||
|
)
|
||||||
break
|
break
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
("Station %s is too far from flat %s (%dm > %dm), "
|
("Station %s is too far from flat %s (%dm > %dm), discarding this station."),
|
||||||
"discarding this station."),
|
|
||||||
station[0],
|
station[0],
|
||||||
flat["id"],
|
flat["id"],
|
||||||
int(distance),
|
int(distance),
|
||||||
int(distance_threshold)
|
int(distance_threshold),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
LOGGER.info(
|
LOGGER.info("No postal code for flat %s, skipping stations detection.", flat["id"])
|
||||||
"No postal code for flat %s, skipping stations detection.",
|
|
||||||
flat["id"]
|
|
||||||
)
|
|
||||||
|
|
||||||
if not good_matched_stations:
|
if not good_matched_stations:
|
||||||
# No stations found, log it and cotninue with next housing
|
# No stations found, log it and cotninue with next housing
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
"No stations found for flat %s, matching %s.",
|
"No stations found for flat %s, matching %s.",
|
||||||
flat["id"],
|
flat["id"],
|
||||||
flat["station"]
|
flat["station"],
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -357,29 +380,20 @@ def guess_stations(flats_list, constraint, config):
|
|||||||
"Found stations for flat %s: %s (matching %s).",
|
"Found stations for flat %s: %s (matching %s).",
|
||||||
flat["id"],
|
flat["id"],
|
||||||
", ".join(x["name"] for x in good_matched_stations),
|
", ".join(x["name"] for x in good_matched_stations),
|
||||||
flat["station"]
|
flat["station"],
|
||||||
)
|
)
|
||||||
|
|
||||||
# If some stations were already filled in and the result is different,
|
# If some stations were already filled in and the result is different,
|
||||||
# display some warning to the user
|
# display some warning to the user
|
||||||
if (
|
if "matched_stations" in flat["flatisfy"] and (
|
||||||
"matched_stations" in flat["flatisfy"] and
|
# Do a set comparison, as ordering is not important
|
||||||
(
|
set([station["name"] for station in flat["flatisfy"]["matched_stations"]])
|
||||||
# Do a set comparison, as ordering is not important
|
!= set([station["name"] for station in good_matched_stations])
|
||||||
set([
|
|
||||||
station["name"]
|
|
||||||
for station in flat["flatisfy"]["matched_stations"]
|
|
||||||
]) !=
|
|
||||||
set([
|
|
||||||
station["name"]
|
|
||||||
for station in good_matched_stations
|
|
||||||
])
|
|
||||||
)
|
|
||||||
):
|
):
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"Replacing previously fetched stations for flat %s. Found "
|
"Replacing previously fetched stations for flat %s. Found "
|
||||||
"stations differ from the previously found ones.",
|
"stations differ from the previously found ones.",
|
||||||
flat["id"]
|
flat["id"],
|
||||||
)
|
)
|
||||||
|
|
||||||
flat["flatisfy"]["matched_stations"] = good_matched_stations
|
flat["flatisfy"]["matched_stations"] = good_matched_stations
|
||||||
@ -404,9 +418,8 @@ def compute_travel_times(flats_list, constraint, config):
|
|||||||
if not flat["flatisfy"].get("matched_stations", []):
|
if not flat["flatisfy"].get("matched_stations", []):
|
||||||
# Skip any flat without matched stations
|
# Skip any flat without matched stations
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
"Skipping travel time computation for flat %s. No matched "
|
"Skipping travel time computation for flat %s. No matched stations.",
|
||||||
"stations.",
|
flat["id"],
|
||||||
flat["id"]
|
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -417,18 +430,15 @@ def compute_travel_times(flats_list, constraint, config):
|
|||||||
# For each place, loop over the stations close to the flat, and find
|
# For each place, loop over the stations close to the flat, and find
|
||||||
# the minimum travel time.
|
# the minimum travel time.
|
||||||
for place_name, place in constraint["time_to"].items():
|
for place_name, place in constraint["time_to"].items():
|
||||||
|
mode = place.get("mode", "PUBLIC_TRANSPORT")
|
||||||
time_to_place_dict = None
|
time_to_place_dict = None
|
||||||
for station in flat["flatisfy"]["matched_stations"]:
|
for station in flat["flatisfy"]["matched_stations"]:
|
||||||
# Time from station is a dict with time and route
|
# Time from station is a dict with time and route
|
||||||
time_from_station_dict = tools.get_travel_time_between(
|
time_from_station_dict = tools.get_travel_time_between(
|
||||||
station["gps"],
|
station["gps"], place["gps"], TimeToModes[mode], config
|
||||||
place["gps"],
|
|
||||||
config
|
|
||||||
)
|
)
|
||||||
if (
|
if time_from_station_dict and (
|
||||||
time_from_station_dict and
|
time_from_station_dict["time"] < time_to_place_dict or time_to_place_dict is None
|
||||||
(time_from_station_dict["time"] < time_to_place_dict or
|
|
||||||
time_to_place_dict is None)
|
|
||||||
):
|
):
|
||||||
# If starting from this station makes the route to the
|
# If starting from this station makes the route to the
|
||||||
# specified place shorter, update
|
# specified place shorter, update
|
||||||
@ -436,8 +446,11 @@ def compute_travel_times(flats_list, constraint, config):
|
|||||||
|
|
||||||
if time_to_place_dict:
|
if time_to_place_dict:
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
"Travel time between %s and flat %s is %ds.",
|
"Travel time between %s and flat %s by %s is %ds.",
|
||||||
place_name, flat["id"], time_to_place_dict["time"]
|
place_name,
|
||||||
|
flat["id"],
|
||||||
|
mode,
|
||||||
|
time_to_place_dict["time"],
|
||||||
)
|
)
|
||||||
flat["flatisfy"]["time_to"][place_name] = time_to_place_dict
|
flat["flatisfy"]["time_to"][place_name] = time_to_place_dict
|
||||||
return flats_list
|
return flats_list
|
||||||
|
@ -1,100 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
"""
|
|
||||||
This modules defines an SQLAlchemy ORM model for a search constraint.
|
|
||||||
"""
|
|
||||||
# pylint: disable=locally-disabled,invalid-name,too-few-public-methods
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from sqlalchemy import (
|
|
||||||
Column, Float, ForeignKey, Integer, String, Table
|
|
||||||
)
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from sqlalchemy_utils.types.json import JSONType
|
|
||||||
from sqlalchemy_utils.types.scalar_list import ScalarListType
|
|
||||||
|
|
||||||
import enum
|
|
||||||
from sqlalchemy_enum_list import EnumListType
|
|
||||||
|
|
||||||
from flatisfy.database.base import BASE
|
|
||||||
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class HouseTypes(enum.Enum):
|
|
||||||
"""
|
|
||||||
An enum of the possible house types.
|
|
||||||
"""
|
|
||||||
APART = 0
|
|
||||||
HOUSE = 1
|
|
||||||
PARKING = 2
|
|
||||||
LAND = 3
|
|
||||||
OTHER = 4
|
|
||||||
UNKNOWN = 5
|
|
||||||
|
|
||||||
|
|
||||||
class PostTypes(enum.Enum):
|
|
||||||
"""
|
|
||||||
An enum of the possible posts types.
|
|
||||||
"""
|
|
||||||
RENT = 0
|
|
||||||
SALE = 1
|
|
||||||
SHARING = 2
|
|
||||||
|
|
||||||
|
|
||||||
association_table = Table(
|
|
||||||
'constraint_postal_codes_association', BASE.metadata,
|
|
||||||
Column('constraint_id', Integer, ForeignKey('constraints.id')),
|
|
||||||
Column('postal_code_id', Integer, ForeignKey('postal_codes.id'))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Constraint(BASE):
|
|
||||||
"""
|
|
||||||
SQLAlchemy ORM model to store a search constraint.
|
|
||||||
"""
|
|
||||||
__tablename__ = "constraints"
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
name = Column(String)
|
|
||||||
type = Column(EnumListType(PostTypes, int))
|
|
||||||
house_types = Column(EnumListType(HouseTypes, int))
|
|
||||||
# TODO: What happens when one delete a postal code?
|
|
||||||
postal_codes = relationship("PostalCode", secondary=association_table)
|
|
||||||
|
|
||||||
area_min = Column(Float, default=None) # in m^2
|
|
||||||
area_max = Column(Float, default=None) # in m^2
|
|
||||||
|
|
||||||
cost_min = Column(Float, default=None) # in currency unit
|
|
||||||
cost_max = Column(Float, default=None) # in currency unit
|
|
||||||
|
|
||||||
rooms_min = Column(Integer, default=None)
|
|
||||||
rooms_max = Column(Integer, default=None)
|
|
||||||
|
|
||||||
bedrooms_min = Column(Integer, default=None)
|
|
||||||
bedrooms_max = Column(Integer, default=None)
|
|
||||||
|
|
||||||
minimum_nb_photos = Column(Integer, default=None)
|
|
||||||
description_should_contain = Column(ScalarListType()) # list of terms
|
|
||||||
|
|
||||||
# Dict mapping names to {"gps": [lat, lng], "time": (min, max) }
|
|
||||||
# ``min`` and ``max`` are in seconds and can be ``null``.
|
|
||||||
# TODO: Use an additional time_to_places table?
|
|
||||||
time_to = Column(JSONType)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<Constraint(id=%s, name=%s)>" % (self.id, self.name)
|
|
||||||
|
|
||||||
def json_api_repr(self):
|
|
||||||
"""
|
|
||||||
Return a dict representation of this constraint object that is JSON
|
|
||||||
serializable.
|
|
||||||
"""
|
|
||||||
constraint_repr = {
|
|
||||||
k: v
|
|
||||||
for k, v in self.__dict__.items()
|
|
||||||
if not k.startswith("_")
|
|
||||||
}
|
|
||||||
|
|
||||||
return constraint_repr
|
|
@ -8,16 +8,23 @@ from __future__ import absolute_import, print_function, unicode_literals
|
|||||||
import logging
|
import logging
|
||||||
import enum
|
import enum
|
||||||
|
|
||||||
|
import arrow
|
||||||
|
|
||||||
from sqlalchemy import (
|
from sqlalchemy import (
|
||||||
Column, Enum, Float, ForeignKey, Integer, SmallInteger, String, Table,
|
Boolean,
|
||||||
Text, inspect
|
Column,
|
||||||
|
DateTime,
|
||||||
|
Enum,
|
||||||
|
Float,
|
||||||
|
SmallInteger,
|
||||||
|
String,
|
||||||
|
Text,
|
||||||
|
inspect,
|
||||||
)
|
)
|
||||||
from sqlalchemy.orm import relationship, validates
|
from sqlalchemy.orm import validates
|
||||||
from sqlalchemy_utils.types.arrow import ArrowType
|
|
||||||
from sqlalchemy_utils.types.json import JSONType
|
|
||||||
from sqlalchemy_utils.types.scalar_list import ScalarListType
|
|
||||||
|
|
||||||
from flatisfy.database.base import BASE
|
from flatisfy.database.base import BASE
|
||||||
|
from flatisfy.database.types import MagicJSON
|
||||||
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
@ -27,6 +34,7 @@ class FlatUtilities(enum.Enum):
|
|||||||
"""
|
"""
|
||||||
An enum of the possible utilities status for a flat entry.
|
An enum of the possible utilities status for a flat entry.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
included = 10
|
included = 10
|
||||||
unknown = 0
|
unknown = 0
|
||||||
excluded = -10
|
excluded = -10
|
||||||
@ -36,6 +44,7 @@ class FlatStatus(enum.Enum):
|
|||||||
"""
|
"""
|
||||||
An enum of the possible status for a flat entry.
|
An enum of the possible status for a flat entry.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
user_deleted = -100
|
user_deleted = -100
|
||||||
duplicate = -20
|
duplicate = -20
|
||||||
ignored = -10
|
ignored = -10
|
||||||
@ -48,74 +57,54 @@ class FlatStatus(enum.Enum):
|
|||||||
|
|
||||||
# List of statuses that are automatically handled, and which the user cannot
|
# List of statuses that are automatically handled, and which the user cannot
|
||||||
# manually set through the UI.
|
# manually set through the UI.
|
||||||
AUTOMATED_STATUSES = [
|
AUTOMATED_STATUSES = [FlatStatus.new, FlatStatus.duplicate, FlatStatus.ignored]
|
||||||
FlatStatus.new,
|
|
||||||
FlatStatus.duplicate,
|
|
||||||
FlatStatus.ignored
|
|
||||||
]
|
|
||||||
|
|
||||||
stations_association_table = Table(
|
|
||||||
'stations_flats_association', BASE.metadata,
|
|
||||||
Column(
|
|
||||||
'public_transport_id', Integer, ForeignKey('public_transports.id')
|
|
||||||
),
|
|
||||||
Column('flat_id', Integer, ForeignKey('flats.id'))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Flat(BASE):
|
class Flat(BASE):
|
||||||
"""
|
"""
|
||||||
SQLAlchemy ORM model to store a flat.
|
SQLAlchemy ORM model to store a flat.
|
||||||
"""
|
"""
|
||||||
__tablename__ = "flats"
|
|
||||||
__searchable__ = [
|
|
||||||
"title", "text", "station", "location", "details", "notes"
|
|
||||||
]
|
|
||||||
|
|
||||||
# Weboob data
|
__tablename__ = "flats"
|
||||||
|
__searchable__ = ["title", "text", "station", "location", "details", "notes"]
|
||||||
|
|
||||||
|
# Woob data
|
||||||
id = Column(String, primary_key=True)
|
id = Column(String, primary_key=True)
|
||||||
area = Column(Float)
|
area = Column(Float)
|
||||||
bedrooms = Column(Float)
|
bedrooms = Column(Float)
|
||||||
cost = Column(Float)
|
cost = Column(Float)
|
||||||
currency = Column(String)
|
currency = Column(String)
|
||||||
utilities = Column(Enum(FlatUtilities), default=FlatUtilities.unknown)
|
utilities = Column(Enum(FlatUtilities), default=FlatUtilities.unknown)
|
||||||
date = Column(ArrowType)
|
date = Column(DateTime)
|
||||||
details = Column(JSONType)
|
details = Column(MagicJSON)
|
||||||
location = Column(String)
|
location = Column(String)
|
||||||
phone = Column(String)
|
phone = Column(String)
|
||||||
photos = Column(JSONType)
|
photos = Column(MagicJSON)
|
||||||
rooms = Column(Float)
|
rooms = Column(Float)
|
||||||
station = Column(String)
|
station = Column(String)
|
||||||
text = Column(Text)
|
text = Column(Text)
|
||||||
title = Column(String)
|
title = Column(String)
|
||||||
urls = Column(ScalarListType())
|
urls = Column(MagicJSON)
|
||||||
merged_ids = Column(ScalarListType())
|
merged_ids = Column(MagicJSON)
|
||||||
notes = Column(Text)
|
notes = Column(Text)
|
||||||
notation = Column(SmallInteger, default=0)
|
notation = Column(SmallInteger, default=0)
|
||||||
|
is_expired = Column(Boolean, default=False)
|
||||||
|
|
||||||
# Flatisfy found stations
|
# Flatisfy data
|
||||||
# TODO: What happens when one deletes a station?
|
# TODO: Should be in another table with relationships
|
||||||
flatisfy_stations = relationship("PublicTransport",
|
flatisfy_stations = Column(MagicJSON)
|
||||||
secondary=stations_association_table)
|
flatisfy_postal_code = Column(String)
|
||||||
# Flatisfy found postal code
|
flatisfy_time_to = Column(MagicJSON)
|
||||||
# TODO: What happens when one deletes a postal code?
|
flatisfy_constraint = Column(String)
|
||||||
flatisfy_postal_code_id = Column(Integer, ForeignKey('postal_codes.id'))
|
flatisfy_position = Column(MagicJSON)
|
||||||
flatisfy_postal_code = relationship("PostalCode")
|
|
||||||
# Computed time to
|
|
||||||
flatisfy_time_to = Column(JSONType)
|
|
||||||
# Constraint relationship
|
|
||||||
# TODO: What happens when one deletes a constraint?
|
|
||||||
# TODO: A flat could match multiple constraints
|
|
||||||
flatisfy_constraint_id = Column(Integer, ForeignKey('constraints.id'))
|
|
||||||
flatisfy_constraint = relationship("Constraint")
|
|
||||||
|
|
||||||
# Status
|
# Status
|
||||||
status = Column(Enum(FlatStatus), default=FlatStatus.new)
|
status = Column(Enum(FlatStatus), default=FlatStatus.new)
|
||||||
|
|
||||||
# Date for visit
|
# Date for visit
|
||||||
visit_date = Column(ArrowType)
|
visit_date = Column(DateTime)
|
||||||
|
|
||||||
@validates('utilities')
|
@validates("utilities")
|
||||||
def validate_utilities(self, _, utilities):
|
def validate_utilities(self, _, utilities):
|
||||||
"""
|
"""
|
||||||
Utilities validation method
|
Utilities validation method
|
||||||
@ -140,8 +129,7 @@ class Flat(BASE):
|
|||||||
try:
|
try:
|
||||||
return getattr(FlatStatus, status)
|
return getattr(FlatStatus, status)
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError):
|
||||||
LOGGER.warn("Unkown flat status %s, ignoring it.",
|
LOGGER.warn("Unkown flat status %s, ignoring it.", status)
|
||||||
status)
|
|
||||||
return self.status.default.arg
|
return self.status.default.arg
|
||||||
|
|
||||||
@validates("notation")
|
@validates("notation")
|
||||||
@ -153,14 +141,34 @@ class Flat(BASE):
|
|||||||
notation = int(notation)
|
notation = int(notation)
|
||||||
assert notation >= 0 and notation <= 5
|
assert notation >= 0 and notation <= 5
|
||||||
except (ValueError, AssertionError):
|
except (ValueError, AssertionError):
|
||||||
raise ValueError('notation should be an integer between 0 and 5')
|
raise ValueError("notation should be an integer between 0 and 5")
|
||||||
return notation
|
return notation
|
||||||
|
|
||||||
|
@validates("date")
|
||||||
|
def validate_date(self, _, date):
|
||||||
|
"""
|
||||||
|
Date validation method
|
||||||
|
"""
|
||||||
|
if date:
|
||||||
|
return arrow.get(date).naive
|
||||||
|
return None
|
||||||
|
|
||||||
|
@validates("visit_date")
|
||||||
|
def validate_visit_date(self, _, visit_date):
|
||||||
|
"""
|
||||||
|
Visit date validation method
|
||||||
|
"""
|
||||||
|
if visit_date:
|
||||||
|
return arrow.get(visit_date).naive
|
||||||
|
return None
|
||||||
|
|
||||||
@validates("photos")
|
@validates("photos")
|
||||||
def validate_photos(self, _, photos):
|
def validate_photos(self, _, photos):
|
||||||
"""
|
"""
|
||||||
Photos validation method
|
Photos validation method
|
||||||
"""
|
"""
|
||||||
|
if not photos:
|
||||||
|
photos = []
|
||||||
for photo in photos:
|
for photo in photos:
|
||||||
try:
|
try:
|
||||||
# Remove computed hash to avoid storing it in db
|
# Remove computed hash to avoid storing it in db
|
||||||
@ -178,22 +186,14 @@ class Flat(BASE):
|
|||||||
# Handle flatisfy metadata
|
# Handle flatisfy metadata
|
||||||
flat_dict = flat_dict.copy()
|
flat_dict = flat_dict.copy()
|
||||||
if "flatisfy" in flat_dict:
|
if "flatisfy" in flat_dict:
|
||||||
flat_dict["flatisfy_stations"] = (
|
flat_dict["flatisfy_stations"] = flat_dict["flatisfy"].get("matched_stations", [])
|
||||||
flat_dict["flatisfy"].get("matched_stations", [])
|
flat_dict["flatisfy_postal_code"] = flat_dict["flatisfy"].get("postal_code", None)
|
||||||
)
|
flat_dict["flatisfy_position"] = flat_dict["flatisfy"].get("position", None)
|
||||||
flat_dict["flatisfy_postal_code"] = (
|
flat_dict["flatisfy_time_to"] = flat_dict["flatisfy"].get("time_to", {})
|
||||||
flat_dict["flatisfy"].get("postal_code", None)
|
flat_dict["flatisfy_constraint"] = flat_dict["flatisfy"].get("constraint", "default")
|
||||||
)
|
|
||||||
flat_dict["flatisfy_time_to"] = (
|
|
||||||
flat_dict["flatisfy"].get("time_to", {})
|
|
||||||
)
|
|
||||||
flat_dict["flatisfy_constraint"] = (
|
|
||||||
flat_dict["flatisfy"].get("constraint", "default")
|
|
||||||
)
|
|
||||||
del flat_dict["flatisfy"]
|
del flat_dict["flatisfy"]
|
||||||
|
|
||||||
flat_dict = {k: v for k, v in flat_dict.items()
|
flat_dict = {k: v for k, v in flat_dict.items() if k in inspect(Flat).columns.keys()}
|
||||||
if k in inspect(Flat).columns.keys()}
|
|
||||||
return Flat(**flat_dict)
|
return Flat(**flat_dict)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
@ -204,11 +204,7 @@ class Flat(BASE):
|
|||||||
Return a dict representation of this flat object that is JSON
|
Return a dict representation of this flat object that is JSON
|
||||||
serializable.
|
serializable.
|
||||||
"""
|
"""
|
||||||
flat_repr = {
|
flat_repr = {k: v for k, v in self.__dict__.items() if not k.startswith("_")}
|
||||||
k: v
|
|
||||||
for k, v in self.__dict__.items()
|
|
||||||
if not k.startswith("_")
|
|
||||||
}
|
|
||||||
if isinstance(flat_repr["status"], FlatStatus):
|
if isinstance(flat_repr["status"], FlatStatus):
|
||||||
flat_repr["status"] = flat_repr["status"].name
|
flat_repr["status"] = flat_repr["status"].name
|
||||||
if isinstance(flat_repr["utilities"], FlatUtilities):
|
if isinstance(flat_repr["utilities"], FlatUtilities):
|
||||||
|
@ -7,9 +7,7 @@ from __future__ import absolute_import, print_function, unicode_literals
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from sqlalchemy import (
|
from sqlalchemy import Column, Float, Integer, String, UniqueConstraint
|
||||||
Column, Float, Integer, String, UniqueConstraint
|
|
||||||
)
|
|
||||||
|
|
||||||
from flatisfy.database.base import BASE
|
from flatisfy.database.base import BASE
|
||||||
|
|
||||||
@ -21,6 +19,7 @@ class PostalCode(BASE):
|
|||||||
"""
|
"""
|
||||||
SQLAlchemy ORM model to store a postal code opendata.
|
SQLAlchemy ORM model to store a postal code opendata.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__tablename__ = "postal_codes"
|
__tablename__ = "postal_codes"
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True)
|
id = Column(Integer, primary_key=True)
|
||||||
@ -28,6 +27,7 @@ class PostalCode(BASE):
|
|||||||
# following ISO 3166-2.
|
# following ISO 3166-2.
|
||||||
area = Column(String, index=True)
|
area = Column(String, index=True)
|
||||||
postal_code = Column(String, index=True)
|
postal_code = Column(String, index=True)
|
||||||
|
insee_code = Column(String, index=True)
|
||||||
name = Column(String, index=True)
|
name = Column(String, index=True)
|
||||||
lat = Column(Float)
|
lat = Column(Float)
|
||||||
lng = Column(Float)
|
lng = Column(Float)
|
||||||
@ -41,8 +41,4 @@ class PostalCode(BASE):
|
|||||||
Return a dict representation of this postal code object that is JSON
|
Return a dict representation of this postal code object that is JSON
|
||||||
serializable.
|
serializable.
|
||||||
"""
|
"""
|
||||||
return {
|
return {k: v for k, v in self.__dict__.items() if not k.startswith("_")}
|
||||||
k: v
|
|
||||||
for k, v in self.__dict__.items()
|
|
||||||
if not k.startswith("_")
|
|
||||||
}
|
|
||||||
|
@ -7,9 +7,7 @@ from __future__ import absolute_import, print_function, unicode_literals
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from sqlalchemy import (
|
from sqlalchemy import Column, Float, Integer, String
|
||||||
Column, Float, Integer, String
|
|
||||||
)
|
|
||||||
|
|
||||||
from flatisfy.database.base import BASE
|
from flatisfy.database.base import BASE
|
||||||
|
|
||||||
@ -21,6 +19,7 @@ class PublicTransport(BASE):
|
|||||||
"""
|
"""
|
||||||
SQLAlchemy ORM model to store public transport opendata.
|
SQLAlchemy ORM model to store public transport opendata.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__tablename__ = "public_transports"
|
__tablename__ = "public_transports"
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True)
|
id = Column(Integer, primary_key=True)
|
||||||
|
@ -15,42 +15,7 @@
|
|||||||
"photos": [
|
"photos": [
|
||||||
{
|
{
|
||||||
"id": "0an3yarge9y446j653dewxu0jwy33pmwar47k2qym.jpg",
|
"id": "0an3yarge9y446j653dewxu0jwy33pmwar47k2qym.jpg",
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/0/a/n/3/0an3yarge9y446j653dewxu0jwy33pmwar47k2qym.jpg",
|
"url": "flatisfy/test_files/127028739@seloger.jpg",
|
||||||
"data": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "1qnz6hpffcrd1c71htbooubgb7s57d82ie1v0zyf2.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/1/q/n/z/1qnz6hpffcrd1c71htbooubgb7s57d82ie1v0zyf2.jpg",
|
|
||||||
"data": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "16bv8yqgytefa1fq57hyk6e0y6ox8t2mh8wj2dgxq.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/1/6/b/v/16bv8yqgytefa1fq57hyk6e0y6ox8t2mh8wj2dgxq.jpg",
|
|
||||||
"data": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "1o23blwk87ew95e3vcq5ygyk10z2hy82fzo5j6hha.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/1/o/2/3/1o23blwk87ew95e3vcq5ygyk10z2hy82fzo5j6hha.jpg",
|
|
||||||
"data": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "20vuxbdp160sot4ccryf6g7g4rwxrkhz3b3tmq7zy.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/2/0/v/u/20vuxbdp160sot4ccryf6g7g4rwxrkhz3b3tmq7zy.jpg",
|
|
||||||
"data": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "00d9bpezie95lqtfmoccqg1ddrld2m64c2mcod5ha.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/0/0/d/9/00d9bpezie95lqtfmoccqg1ddrld2m64c2mcod5ha.jpg",
|
|
||||||
"data": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "0lhqf881qm2j03hz5581d8ggplp1xwwchb2rtoqgu.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/0/l/h/q/0lhqf881qm2j03hz5581d8ggplp1xwwchb2rtoqgu.jpg",
|
|
||||||
"data": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "0chwbagbf8tc0qf9sd3wryzl4gm7hkswcnrtnx2bi.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/0/c/h/w/0chwbagbf8tc0qf9sd3wryzl4gm7hkswcnrtnx2bi.jpg",
|
|
||||||
"data": null
|
"data": null
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@ -69,5 +34,8 @@
|
|||||||
"Etages": "30",
|
"Etages": "30",
|
||||||
"Parking": "1",
|
"Parking": "1",
|
||||||
"Salle de Séjour": ""
|
"Salle de Séjour": ""
|
||||||
|
},
|
||||||
|
"flatisfy": {
|
||||||
|
"postal_code": "35000"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -15,42 +15,7 @@
|
|||||||
"photos": [
|
"photos": [
|
||||||
{
|
{
|
||||||
"id": "f9b2da6dfa184759aa0c349edb1cd037.jpg",
|
"id": "f9b2da6dfa184759aa0c349edb1cd037.jpg",
|
||||||
"url": "http://thbr.figarocms.net/images/2qEDBqRV-QNlp4fHVNhSCWlt6rU=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/f9b2da6dfa184759aa0c349edb1cd037.jpg",
|
"url": "flatisfy/test_files/14428129@explorimmo.jpg",
|
||||||
"data": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "3f2cc9dc429d4e3dbb9f4216f109d224.jpg",
|
|
||||||
"url": "http://thbr.figarocms.net/images/DulZQyZkkwa0ZFBT1nYD9rUD0A4=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/3f2cc9dc429d4e3dbb9f4216f109d224.jpg",
|
|
||||||
"data": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "56ae1db620f44af6b860df10eba55870.jpg",
|
|
||||||
"url": "http://thbr.figarocms.net/images/EpvEffLcFbBT7spEZB2dcOHaZwA=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/56ae1db620f44af6b860df10eba55870.jpg",
|
|
||||||
"data": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "5acdef1f05314fe19111a0c3d92b8fe5.jpg",
|
|
||||||
"url": "http://thbr.figarocms.net/images/wHtDlJMwIrMC3cWXi8ASN4I6Zl4=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/5acdef1f05314fe19111a0c3d92b8fe5.jpg",
|
|
||||||
"data": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "16c686ea91b248129fe60011d61e060b.jpg",
|
|
||||||
"url": "http://thbr.figarocms.net/images/SD5VT1gxRSXSlt3pAz8r_SI3rqw=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/16c686ea91b248129fe60011d61e060b.jpg",
|
|
||||||
"data": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "e6a67d42709d443481da0feb9a7e11a1.jpg",
|
|
||||||
"url": "http://thbr.figarocms.net/images/u8PGKXqC0CL9AyEOI5T9TFeGs-Y=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/e6a67d42709d443481da0feb9a7e11a1.jpg",
|
|
||||||
"data": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "6888cc7bc823402198205e480c8cab6c.jpg",
|
|
||||||
"url": "http://thbr.figarocms.net/images/-3AseFCRaleidG2vsDJpA5BLBa4=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/6888cc7bc823402198205e480c8cab6c.jpg",
|
|
||||||
"data": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "d40dbeea9e424ea2a846f5683746ea9e.jpg",
|
|
||||||
"url": "http://thbr.figarocms.net/images/TMKBtBuucYge-BgCoUGRjxZjdBE=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/d40dbeea9e424ea2a846f5683746ea9e.jpg",
|
|
||||||
"data": null
|
"data": null
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@ -73,5 +38,8 @@
|
|||||||
"agencyFees": 0,
|
"agencyFees": 0,
|
||||||
"availabilityDate": null,
|
"availabilityDate": null,
|
||||||
"guarantee": 0
|
"guarantee": 0
|
||||||
|
},
|
||||||
|
"flatisfy": {
|
||||||
|
"postal_code": "35000"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -30,6 +30,7 @@ class LocalImageCache(ImageCache):
|
|||||||
"""
|
"""
|
||||||
A local cache for images, stored in memory.
|
A local cache for images, stored in memory.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def on_miss(path):
|
def on_miss(path):
|
||||||
"""
|
"""
|
||||||
@ -46,48 +47,34 @@ class TestTexts(unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
Checks string normalizations.
|
Checks string normalizations.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def test_roman_numbers(self):
|
def test_roman_numbers(self):
|
||||||
"""
|
"""
|
||||||
Checks roman numbers replacement.
|
Checks roman numbers replacement.
|
||||||
"""
|
"""
|
||||||
self.assertEqual(
|
self.assertEqual("XIV", tools.convert_arabic_to_roman("14"))
|
||||||
"XIV",
|
|
||||||
tools.convert_arabic_to_roman("14")
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual("XXXIX", tools.convert_arabic_to_roman("39"))
|
||||||
"XXXIX",
|
|
||||||
tools.convert_arabic_to_roman("39")
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual("40", tools.convert_arabic_to_roman("40"))
|
||||||
"40",
|
|
||||||
tools.convert_arabic_to_roman("40")
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual("1987", tools.convert_arabic_to_roman("1987"))
|
||||||
"1987",
|
|
||||||
tools.convert_arabic_to_roman("1987")
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
"Dans le XVe arrondissement",
|
"Dans le XVe arrondissement",
|
||||||
tools.convert_arabic_to_roman_in_text("Dans le 15e arrondissement")
|
tools.convert_arabic_to_roman_in_text("Dans le 15e arrondissement"),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual("XXeme arr.", tools.convert_arabic_to_roman_in_text("20eme arr."))
|
||||||
"XXeme arr.",
|
|
||||||
tools.convert_arabic_to_roman_in_text("20eme arr.")
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
"A AIX EN PROVENCE",
|
"A AIX EN PROVENCE",
|
||||||
tools.convert_arabic_to_roman_in_text("A AIX EN PROVENCE")
|
tools.convert_arabic_to_roman_in_text("A AIX EN PROVENCE"),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
"Montigny Le Bretonneux",
|
"Montigny Le Bretonneux",
|
||||||
tools.convert_arabic_to_roman_in_text("Montigny Le Bretonneux")
|
tools.convert_arabic_to_roman_in_text("Montigny Le Bretonneux"),
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_roman_numbers_in_text(self):
|
def test_roman_numbers_in_text(self):
|
||||||
@ -97,77 +84,54 @@ class TestTexts(unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
"dans le XVe arrondissement",
|
"dans le XVe arrondissement",
|
||||||
tools.normalize_string("Dans le 15e arrondissement")
|
tools.normalize_string("Dans le 15e arrondissement"),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual("paris XVe, 75005", tools.normalize_string("Paris 15e, 75005"))
|
||||||
"paris XVe, 75005",
|
|
||||||
tools.normalize_string("Paris 15e, 75005")
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual("paris xve, 75005", tools.normalize_string("Paris XVe, 75005"))
|
||||||
"paris xve, 75005",
|
|
||||||
tools.normalize_string("Paris XVe, 75005")
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_multiple_whitespaces(self):
|
def test_multiple_whitespaces(self):
|
||||||
"""
|
"""
|
||||||
Checks whitespaces are collapsed.
|
Checks whitespaces are collapsed.
|
||||||
"""
|
"""
|
||||||
self.assertEqual(
|
self.assertEqual("avec ascenseur", tools.normalize_string("avec ascenseur"))
|
||||||
"avec ascenseur",
|
|
||||||
tools.normalize_string("avec ascenseur")
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_whitespace_trim(self):
|
def test_whitespace_trim(self):
|
||||||
"""
|
"""
|
||||||
Checks that trailing and beginning whitespaces are trimmed.
|
Checks that trailing and beginning whitespaces are trimmed.
|
||||||
"""
|
"""
|
||||||
self.assertEqual(
|
self.assertEqual("rennes 35000", tools.normalize_string(" Rennes 35000 "))
|
||||||
"rennes 35000",
|
|
||||||
tools.normalize_string(" Rennes 35000 ")
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_accents(self):
|
def test_accents(self):
|
||||||
"""
|
"""
|
||||||
Checks accents are replaced.
|
Checks accents are replaced.
|
||||||
"""
|
"""
|
||||||
self.assertEqual(
|
self.assertEqual("eeeaui", tools.normalize_string(u"éèêàüï"))
|
||||||
"eeeaui",
|
|
||||||
tools.normalize_string(u"éèêàüï")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestPhoneNumbers(unittest.TestCase):
|
class TestPhoneNumbers(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
Checks phone numbers normalizations.
|
Checks phone numbers normalizations.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def test_prefix(self):
|
def test_prefix(self):
|
||||||
"""
|
"""
|
||||||
Checks phone numbers with international prefixes.
|
Checks phone numbers with international prefixes.
|
||||||
"""
|
"""
|
||||||
self.assertEqual(
|
self.assertEqual("0605040302", duplicates.homogeneize_phone_number("+33605040302"))
|
||||||
"0605040302",
|
|
||||||
duplicates.homogeneize_phone_number("+33605040302")
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_dots_separators(self):
|
def test_dots_separators(self):
|
||||||
"""
|
"""
|
||||||
Checks phone numbers with dots.
|
Checks phone numbers with dots.
|
||||||
"""
|
"""
|
||||||
self.assertEqual(
|
self.assertEqual("0605040302", duplicates.homogeneize_phone_number("06.05.04.03.02"))
|
||||||
"0605040302",
|
|
||||||
duplicates.homogeneize_phone_number("06.05.04.03.02")
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_spaces_separators(self):
|
def test_spaces_separators(self):
|
||||||
"""
|
"""
|
||||||
Checks phone numbers with spaces.
|
Checks phone numbers with spaces.
|
||||||
"""
|
"""
|
||||||
self.assertEqual(
|
self.assertEqual("0605040302", duplicates.homogeneize_phone_number("06 05 04 03 02"))
|
||||||
"0605040302",
|
|
||||||
duplicates.homogeneize_phone_number("06 05 04 03 02")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestPhotos(unittest.TestCase):
|
class TestPhotos(unittest.TestCase):
|
||||||
@ -183,96 +147,104 @@ class TestPhotos(unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
Compares a photo against itself.
|
Compares a photo against itself.
|
||||||
"""
|
"""
|
||||||
photo = {
|
photo = {"url": TESTS_DATA_DIR + "127028739@seloger.jpg"}
|
||||||
"url": TESTS_DATA_DIR + "127028739@seloger.jpg"
|
|
||||||
}
|
|
||||||
|
|
||||||
self.assertTrue(duplicates.compare_photos(
|
self.assertTrue(duplicates.compare_photos(photo, photo, self.IMAGE_CACHE, self.HASH_THRESHOLD))
|
||||||
photo,
|
|
||||||
photo,
|
|
||||||
self.IMAGE_CACHE,
|
|
||||||
self.HASH_THRESHOLD
|
|
||||||
))
|
|
||||||
|
|
||||||
def test_different_photos(self):
|
def test_different_photos(self):
|
||||||
"""
|
"""
|
||||||
Compares two different photos.
|
Compares two different photos.
|
||||||
"""
|
"""
|
||||||
self.assertFalse(duplicates.compare_photos(
|
self.assertFalse(
|
||||||
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
|
duplicates.compare_photos(
|
||||||
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
|
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
|
||||||
self.IMAGE_CACHE,
|
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
|
||||||
self.HASH_THRESHOLD
|
self.IMAGE_CACHE,
|
||||||
))
|
self.HASH_THRESHOLD,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
self.assertFalse(duplicates.compare_photos(
|
self.assertFalse(
|
||||||
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
|
duplicates.compare_photos(
|
||||||
{"url": TESTS_DATA_DIR + "127028739-3@seloger.jpg"},
|
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
|
||||||
self.IMAGE_CACHE,
|
{"url": TESTS_DATA_DIR + "127028739-3@seloger.jpg"},
|
||||||
self.HASH_THRESHOLD
|
self.IMAGE_CACHE,
|
||||||
))
|
self.HASH_THRESHOLD,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
def test_matching_photos(self):
|
def test_matching_photos(self):
|
||||||
"""
|
"""
|
||||||
Compares two matching photos with different size and source.
|
Compares two matching photos with different size and source.
|
||||||
"""
|
"""
|
||||||
self.assertTrue(duplicates.compare_photos(
|
self.assertTrue(
|
||||||
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
|
duplicates.compare_photos(
|
||||||
{"url": TESTS_DATA_DIR + "14428129@explorimmo.jpg"},
|
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
|
||||||
self.IMAGE_CACHE,
|
{"url": TESTS_DATA_DIR + "14428129@explorimmo.jpg"},
|
||||||
self.HASH_THRESHOLD
|
self.IMAGE_CACHE,
|
||||||
))
|
self.HASH_THRESHOLD,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
self.assertTrue(duplicates.compare_photos(
|
self.assertTrue(
|
||||||
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
|
duplicates.compare_photos(
|
||||||
{"url": TESTS_DATA_DIR + "14428129-2@explorimmo.jpg"},
|
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
|
||||||
self.IMAGE_CACHE,
|
{"url": TESTS_DATA_DIR + "14428129-2@explorimmo.jpg"},
|
||||||
self.HASH_THRESHOLD
|
self.IMAGE_CACHE,
|
||||||
))
|
self.HASH_THRESHOLD,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
self.assertTrue(duplicates.compare_photos(
|
self.assertTrue(
|
||||||
{"url": TESTS_DATA_DIR + "127028739-3@seloger.jpg"},
|
duplicates.compare_photos(
|
||||||
{"url": TESTS_DATA_DIR + "14428129-3@explorimmo.jpg"},
|
{"url": TESTS_DATA_DIR + "127028739-3@seloger.jpg"},
|
||||||
self.IMAGE_CACHE,
|
{"url": TESTS_DATA_DIR + "14428129-3@explorimmo.jpg"},
|
||||||
self.HASH_THRESHOLD
|
self.IMAGE_CACHE,
|
||||||
))
|
self.HASH_THRESHOLD,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
self.assertTrue(duplicates.compare_photos(
|
self.assertTrue(
|
||||||
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
|
duplicates.compare_photos(
|
||||||
{"url": TESTS_DATA_DIR + "127028739-watermark@seloger.jpg"},
|
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
|
||||||
self.IMAGE_CACHE,
|
{"url": TESTS_DATA_DIR + "127028739-watermark@seloger.jpg"},
|
||||||
self.HASH_THRESHOLD
|
self.IMAGE_CACHE,
|
||||||
))
|
self.HASH_THRESHOLD,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
def test_matching_cropped_photos(self):
|
def test_matching_cropped_photos(self):
|
||||||
"""
|
"""
|
||||||
Compares two matching photos with one being cropped.
|
Compares two matching photos with one being cropped.
|
||||||
"""
|
"""
|
||||||
# Fixme: the image hash treshold should be 10 ideally
|
# Fixme: the image hash treshold should be 10 ideally
|
||||||
self.assertTrue(duplicates.compare_photos(
|
self.assertTrue(
|
||||||
{"url": TESTS_DATA_DIR + "vertical.jpg"},
|
duplicates.compare_photos(
|
||||||
{"url": TESTS_DATA_DIR + "vertical-cropped.jpg"},
|
{"url": TESTS_DATA_DIR + "vertical.jpg"},
|
||||||
self.IMAGE_CACHE,
|
{"url": TESTS_DATA_DIR + "vertical-cropped.jpg"},
|
||||||
20
|
self.IMAGE_CACHE,
|
||||||
))
|
20,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# Fixme: the image hash treshold should be 10 ideally
|
# Fixme: the image hash treshold should be 10 ideally
|
||||||
self.assertTrue(duplicates.compare_photos(
|
self.assertTrue(
|
||||||
{"url": TESTS_DATA_DIR + "13783671@explorimmo.jpg"},
|
duplicates.compare_photos(
|
||||||
{"url": TESTS_DATA_DIR + "124910113@seloger.jpg"},
|
{"url": TESTS_DATA_DIR + "13783671@explorimmo.jpg"},
|
||||||
self.IMAGE_CACHE,
|
{"url": TESTS_DATA_DIR + "124910113@seloger.jpg"},
|
||||||
20
|
self.IMAGE_CACHE,
|
||||||
))
|
20,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestImageCache(unittest.TestCase):
|
class TestImageCache(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
Checks image cache is working as expected.
|
Checks image cache is working as expected.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.IMAGE_CACHE = ImageCache( # pylint: disable=invalid-name
|
self.IMAGE_CACHE = ImageCache(storage_dir=tempfile.mkdtemp(prefix="flatisfy-")) # pylint: disable=invalid-name
|
||||||
storage_dir=tempfile.mkdtemp(prefix="flatisfy-")
|
|
||||||
)
|
|
||||||
super(TestImageCache, self).__init__(*args, **kwargs)
|
super(TestImageCache, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
def test_invalid_url(self):
|
def test_invalid_url(self):
|
||||||
@ -280,33 +252,28 @@ class TestImageCache(unittest.TestCase):
|
|||||||
Check that it returns nothing on an invalid URL.
|
Check that it returns nothing on an invalid URL.
|
||||||
"""
|
"""
|
||||||
# See https://framagit.org/phyks/Flatisfy/issues/116.
|
# See https://framagit.org/phyks/Flatisfy/issues/116.
|
||||||
self.assertIsNone(
|
self.assertIsNone(self.IMAGE_CACHE.get("https://httpbin.org/status/404"))
|
||||||
self.IMAGE_CACHE.get("https://httpbin.org/status/404")
|
self.assertIsNone(self.IMAGE_CACHE.get("https://httpbin.org/status/500"))
|
||||||
)
|
|
||||||
self.assertIsNone(
|
|
||||||
self.IMAGE_CACHE.get("https://httpbin.org/status/500")
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_invalid_data(self):
|
def test_invalid_data(self):
|
||||||
"""
|
"""
|
||||||
Check that it returns nothing on an invalid data.
|
Check that it returns nothing on an invalid data.
|
||||||
"""
|
"""
|
||||||
# See https://framagit.org/phyks/Flatisfy/issues/116.
|
# See https://framagit.org/phyks/Flatisfy/issues/116.
|
||||||
self.assertIsNone(
|
self.assertIsNone(self.IMAGE_CACHE.get("https://httpbin.org/"))
|
||||||
self.IMAGE_CACHE.get("https://httpbin.org/")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestDuplicates(unittest.TestCase):
|
class TestDuplicates(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
Checks duplicates detection.
|
Checks duplicates detection.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS = 8 # pylint: disable=invalid-name
|
DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS = 8 # pylint: disable=invalid-name
|
||||||
DUPLICATES_MIN_SCORE_WITH_PHOTOS = 15 # pylint: disable=invalid-name
|
DUPLICATES_MIN_SCORE_WITH_PHOTOS = 15 # pylint: disable=invalid-name
|
||||||
HASH_THRESHOLD = 10 # pylint: disable=invalid-name
|
HASH_THRESHOLD = 10 # pylint: disable=invalid-name
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.IMAGE_CACHE = ImageCache( # pylint: disable=invalid-name
|
self.IMAGE_CACHE = LocalImageCache( # pylint: disable=invalid-name
|
||||||
storage_dir=tempfile.mkdtemp(prefix="flatisfy-")
|
storage_dir=tempfile.mkdtemp(prefix="flatisfy-")
|
||||||
)
|
)
|
||||||
super(TestDuplicates, self).__init__(*args, **kwargs)
|
super(TestDuplicates, self).__init__(*args, **kwargs)
|
||||||
@ -316,9 +283,7 @@ class TestDuplicates(unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
Generates a fake flat post.
|
Generates a fake flat post.
|
||||||
"""
|
"""
|
||||||
backend = BACKENDS_BY_PRECEDENCE[
|
backend = BACKENDS_BY_PRECEDENCE[random.randint(0, len(BACKENDS_BY_PRECEDENCE) - 1)]
|
||||||
random.randint(0, len(BACKENDS_BY_PRECEDENCE) - 1)
|
|
||||||
]
|
|
||||||
return {
|
return {
|
||||||
"id": str(random.randint(100000, 199999)) + "@" + backend,
|
"id": str(random.randint(100000, 199999)) + "@" + backend,
|
||||||
"phone": "0607080910",
|
"phone": "0607080910",
|
||||||
@ -326,7 +291,7 @@ class TestDuplicates(unittest.TestCase):
|
|||||||
"utilities": "",
|
"utilities": "",
|
||||||
"area": random.randint(200, 1500) / 10,
|
"area": random.randint(200, 1500) / 10,
|
||||||
"cost": random.randint(100000, 300000),
|
"cost": random.randint(100000, 300000),
|
||||||
"bedrooms": random.randint(1, 4)
|
"bedrooms": random.randint(1, 4),
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -350,10 +315,7 @@ class TestDuplicates(unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
flat1 = self.generate_fake_flat()
|
flat1 = self.generate_fake_flat()
|
||||||
flat2 = copy.deepcopy(flat1)
|
flat2 = copy.deepcopy(flat1)
|
||||||
score = duplicates.get_duplicate_score(
|
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
||||||
flat1, flat2,
|
|
||||||
self.IMAGE_CACHE, self.HASH_THRESHOLD
|
|
||||||
)
|
|
||||||
self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
||||||
|
|
||||||
def test_different_prices(self):
|
def test_different_prices(self):
|
||||||
@ -364,10 +326,7 @@ class TestDuplicates(unittest.TestCase):
|
|||||||
flat2 = copy.deepcopy(flat1)
|
flat2 = copy.deepcopy(flat1)
|
||||||
flat2["cost"] += 1000
|
flat2["cost"] += 1000
|
||||||
|
|
||||||
score = duplicates.get_duplicate_score(
|
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
||||||
flat1, flat2,
|
|
||||||
self.IMAGE_CACHE, self.HASH_THRESHOLD
|
|
||||||
)
|
|
||||||
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
||||||
|
|
||||||
def test_different_rooms(self):
|
def test_different_rooms(self):
|
||||||
@ -379,10 +338,7 @@ class TestDuplicates(unittest.TestCase):
|
|||||||
flat2 = copy.deepcopy(flat1)
|
flat2 = copy.deepcopy(flat1)
|
||||||
flat2["rooms"] += 1
|
flat2["rooms"] += 1
|
||||||
|
|
||||||
score = duplicates.get_duplicate_score(
|
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
||||||
flat1, flat2,
|
|
||||||
self.IMAGE_CACHE, self.HASH_THRESHOLD
|
|
||||||
)
|
|
||||||
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
||||||
|
|
||||||
def test_different_areas(self):
|
def test_different_areas(self):
|
||||||
@ -393,10 +349,7 @@ class TestDuplicates(unittest.TestCase):
|
|||||||
flat2 = copy.deepcopy(flat1)
|
flat2 = copy.deepcopy(flat1)
|
||||||
flat2["area"] += 10
|
flat2["area"] += 10
|
||||||
|
|
||||||
score = duplicates.get_duplicate_score(
|
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
||||||
flat1, flat2,
|
|
||||||
self.IMAGE_CACHE, self.HASH_THRESHOLD
|
|
||||||
)
|
|
||||||
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
||||||
|
|
||||||
def test_different_areas_decimals(self):
|
def test_different_areas_decimals(self):
|
||||||
@ -409,10 +362,7 @@ class TestDuplicates(unittest.TestCase):
|
|||||||
flat1["area"] = 50.65
|
flat1["area"] = 50.65
|
||||||
flat2["area"] = 50.37
|
flat2["area"] = 50.37
|
||||||
|
|
||||||
score = duplicates.get_duplicate_score(
|
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
||||||
flat1, flat2,
|
|
||||||
self.IMAGE_CACHE, self.HASH_THRESHOLD
|
|
||||||
)
|
|
||||||
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
||||||
|
|
||||||
def test_different_phones(self):
|
def test_different_phones(self):
|
||||||
@ -424,10 +374,7 @@ class TestDuplicates(unittest.TestCase):
|
|||||||
flat2 = copy.deepcopy(flat1)
|
flat2 = copy.deepcopy(flat1)
|
||||||
flat2["phone"] = "0708091011"
|
flat2["phone"] = "0708091011"
|
||||||
|
|
||||||
score = duplicates.get_duplicate_score(
|
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
||||||
flat1, flat2,
|
|
||||||
self.IMAGE_CACHE, self.HASH_THRESHOLD
|
|
||||||
)
|
|
||||||
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
||||||
|
|
||||||
def test_real_duplicates(self):
|
def test_real_duplicates(self):
|
||||||
@ -435,64 +382,59 @@ class TestDuplicates(unittest.TestCase):
|
|||||||
Two flats with same price, area and rooms quantity should be detected
|
Two flats with same price, area and rooms quantity should be detected
|
||||||
as duplicates.
|
as duplicates.
|
||||||
"""
|
"""
|
||||||
flats = self.load_files(
|
flats = self.load_files("127028739@seloger", "14428129@explorimmo")
|
||||||
"127028739@seloger",
|
|
||||||
"14428129@explorimmo"
|
|
||||||
)
|
|
||||||
|
|
||||||
score = duplicates.get_duplicate_score(
|
score = duplicates.get_duplicate_score(flats[0], flats[1], self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
||||||
flats[0], flats[1],
|
|
||||||
self.IMAGE_CACHE, self.HASH_THRESHOLD
|
|
||||||
)
|
|
||||||
self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITH_PHOTOS)
|
self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITH_PHOTOS)
|
||||||
|
|
||||||
flats = self.load_files(
|
# TODO: fixme, find new testing examples
|
||||||
"128358415@seloger",
|
# flats = self.load_files(
|
||||||
"14818297@explorimmo"
|
# "128358415@seloger",
|
||||||
)
|
# "14818297@explorimmo"
|
||||||
|
# )
|
||||||
|
|
||||||
score = duplicates.get_duplicate_score(
|
# score = duplicates.get_duplicate_score(
|
||||||
flats[0], flats[1],
|
# flats[0], flats[1],
|
||||||
self.IMAGE_CACHE, 20
|
# self.IMAGE_CACHE, 20
|
||||||
)
|
# )
|
||||||
self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITH_PHOTOS)
|
# self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITH_PHOTOS)
|
||||||
|
|
||||||
# Different number of photos, and some are cropped
|
# # Different number of photos, and some are cropped
|
||||||
flats = self.load_files(
|
# flats = self.load_files(
|
||||||
"124910113@seloger",
|
# "124910113@seloger",
|
||||||
"13783671@explorimmo"
|
# "13783671@explorimmo"
|
||||||
)
|
# )
|
||||||
|
|
||||||
score = duplicates.get_duplicate_score(
|
# score = duplicates.get_duplicate_score(
|
||||||
flats[0], flats[1],
|
# flats[0], flats[1],
|
||||||
self.IMAGE_CACHE, 20
|
# self.IMAGE_CACHE, 20
|
||||||
)
|
# )
|
||||||
self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITH_PHOTOS)
|
# self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITH_PHOTOS)
|
||||||
|
|
||||||
# Same flat, different agencies, texts and photos
|
# # Same flat, different agencies, texts and photos
|
||||||
flats = self.load_files(
|
# flats = self.load_files(
|
||||||
"122509451@seloger",
|
# "122509451@seloger",
|
||||||
"127963747@seloger"
|
# "127963747@seloger"
|
||||||
)
|
# )
|
||||||
|
|
||||||
score = duplicates.get_duplicate_score(
|
# score = duplicates.get_duplicate_score(
|
||||||
flats[0], flats[1],
|
# flats[0], flats[1],
|
||||||
self.IMAGE_CACHE, self.HASH_THRESHOLD
|
# self.IMAGE_CACHE, self.HASH_THRESHOLD
|
||||||
)
|
# )
|
||||||
# Fix me : should be TestDuplicates.DUPLICATES_MIN_SCORE_WITH_PHOTOS
|
# # Fix me : should be TestDuplicates.DUPLICATES_MIN_SCORE_WITH_PHOTOS
|
||||||
self.assertGreaterEqual(score, 4)
|
# self.assertGreaterEqual(score, 4)
|
||||||
|
|
||||||
# Really similar flats, but different
|
# # Really similar flats, but different
|
||||||
flats = self.load_files(
|
# flats = self.load_files(
|
||||||
"123312807@seloger",
|
# "123312807@seloger",
|
||||||
"123314207@seloger"
|
# "123314207@seloger"
|
||||||
)
|
# )
|
||||||
|
|
||||||
score = duplicates.get_duplicate_score(
|
# score = duplicates.get_duplicate_score(
|
||||||
flats[0], flats[1],
|
# flats[0], flats[1],
|
||||||
self.IMAGE_CACHE, self.HASH_THRESHOLD
|
# self.IMAGE_CACHE, self.HASH_THRESHOLD
|
||||||
)
|
# )
|
||||||
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITH_PHOTOS)
|
# self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITH_PHOTOS)
|
||||||
|
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
@ -501,8 +443,13 @@ def run():
|
|||||||
"""
|
"""
|
||||||
LOGGER.info("Running tests…")
|
LOGGER.info("Running tests…")
|
||||||
try:
|
try:
|
||||||
for testsuite in [TestTexts, TestPhoneNumbers, TestImageCache,
|
for testsuite in [
|
||||||
TestDuplicates, TestPhotos]:
|
TestTexts,
|
||||||
|
TestPhoneNumbers,
|
||||||
|
TestImageCache,
|
||||||
|
TestDuplicates,
|
||||||
|
TestPhotos,
|
||||||
|
]:
|
||||||
suite = unittest.TestLoader().loadTestsFromTestCase(testsuite)
|
suite = unittest.TestLoader().loadTestsFromTestCase(testsuite)
|
||||||
result = unittest.TextTestRunner(verbosity=2).run(suite)
|
result = unittest.TextTestRunner(verbosity=2).run(suite)
|
||||||
assert result.wasSuccessful()
|
assert result.wasSuccessful()
|
||||||
|
@ -3,9 +3,7 @@
|
|||||||
This module contains basic utility functions, such as pretty printing of JSON
|
This module contains basic utility functions, such as pretty printing of JSON
|
||||||
output, checking that a value is within a given interval etc.
|
output, checking that a value is within a given interval etc.
|
||||||
"""
|
"""
|
||||||
from __future__ import (
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
absolute_import, division, print_function, unicode_literals
|
|
||||||
)
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import itertools
|
import itertools
|
||||||
@ -15,9 +13,13 @@ import math
|
|||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
import imagehash
|
||||||
|
import mapbox
|
||||||
import requests
|
import requests
|
||||||
import unidecode
|
import unidecode
|
||||||
|
|
||||||
|
from flatisfy.constants import TimeToModes
|
||||||
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -25,6 +27,23 @@ LOGGER = logging.getLogger(__name__)
|
|||||||
NAVITIA_ENDPOINT = "https://api.navitia.io/v1/coverage/fr-idf/journeys"
|
NAVITIA_ENDPOINT = "https://api.navitia.io/v1/coverage/fr-idf/journeys"
|
||||||
|
|
||||||
|
|
||||||
|
def next_weekday(d, weekday):
|
||||||
|
"""
|
||||||
|
Find datetime object for next given weekday.
|
||||||
|
|
||||||
|
From
|
||||||
|
https://stackoverflow.com/questions/6558535/find-the-date-for-the-first-monday-after-a-given-a-date.
|
||||||
|
|
||||||
|
:param d: Datetime to search from.
|
||||||
|
:param weekday: Weekday (0 for Monday, etc)
|
||||||
|
:returns: The datetime object for the next given weekday.
|
||||||
|
"""
|
||||||
|
days_ahead = weekday - d.weekday()
|
||||||
|
if days_ahead <= 0: # Target day already happened this week
|
||||||
|
days_ahead += 7
|
||||||
|
return d + datetime.timedelta(days_ahead)
|
||||||
|
|
||||||
|
|
||||||
def convert_arabic_to_roman(arabic):
|
def convert_arabic_to_roman(arabic):
|
||||||
"""
|
"""
|
||||||
Convert an arabic literal to a roman one. Limits to 39, which is a rough
|
Convert an arabic literal to a roman one. Limits to 39, which is a rough
|
||||||
@ -40,8 +59,18 @@ def convert_arabic_to_roman(arabic):
|
|||||||
return arabic
|
return arabic
|
||||||
|
|
||||||
to_roman = {
|
to_roman = {
|
||||||
1: 'I', 2: 'II', 3: 'III', 4: 'IV', 5: 'V', 6: 'VI', 7: 'VII',
|
1: "I",
|
||||||
8: 'VIII', 9: 'IX', 10: 'X', 20: 'XX', 30: 'XXX'
|
2: "II",
|
||||||
|
3: "III",
|
||||||
|
4: "IV",
|
||||||
|
5: "V",
|
||||||
|
6: "VI",
|
||||||
|
7: "VII",
|
||||||
|
8: "VIII",
|
||||||
|
9: "IX",
|
||||||
|
10: "X",
|
||||||
|
20: "XX",
|
||||||
|
30: "XXX",
|
||||||
}
|
}
|
||||||
roman_chars_list = []
|
roman_chars_list = []
|
||||||
count = 1
|
count = 1
|
||||||
@ -50,7 +79,7 @@ def convert_arabic_to_roman(arabic):
|
|||||||
if digit != 0:
|
if digit != 0:
|
||||||
roman_chars_list.append(to_roman[digit * count])
|
roman_chars_list.append(to_roman[digit * count])
|
||||||
count *= 10
|
count *= 10
|
||||||
return ''.join(roman_chars_list[::-1])
|
return "".join(roman_chars_list[::-1])
|
||||||
|
|
||||||
|
|
||||||
def convert_arabic_to_roman_in_text(text):
|
def convert_arabic_to_roman_in_text(text):
|
||||||
@ -61,11 +90,7 @@ def convert_arabic_to_roman_in_text(text):
|
|||||||
:returns: The corresponding text with roman literals converted to
|
:returns: The corresponding text with roman literals converted to
|
||||||
arabic.
|
arabic.
|
||||||
"""
|
"""
|
||||||
return re.sub(
|
return re.sub(r"(\d+)", lambda matchobj: convert_arabic_to_roman(matchobj.group(0)), text)
|
||||||
r'(\d+)',
|
|
||||||
lambda matchobj: convert_arabic_to_roman(matchobj.group(0)),
|
|
||||||
text
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def hash_dict(func):
|
def hash_dict(func):
|
||||||
@ -75,11 +100,13 @@ def hash_dict(func):
|
|||||||
|
|
||||||
From https://stackoverflow.com/a/44776960.
|
From https://stackoverflow.com/a/44776960.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class HDict(dict):
|
class HDict(dict):
|
||||||
"""
|
"""
|
||||||
Transform mutable dictionnary into immutable. Useful to be compatible
|
Transform mutable dictionnary into immutable. Useful to be compatible
|
||||||
with lru_cache
|
with lru_cache
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __hash__(self):
|
def __hash__(self):
|
||||||
return hash(json.dumps(self))
|
return hash(json.dumps(self))
|
||||||
|
|
||||||
@ -87,17 +114,10 @@ def hash_dict(func):
|
|||||||
"""
|
"""
|
||||||
The wrapped function
|
The wrapped function
|
||||||
"""
|
"""
|
||||||
args = tuple(
|
args = tuple([HDict(arg) if isinstance(arg, dict) else arg for arg in args])
|
||||||
[
|
kwargs = {k: HDict(v) if isinstance(v, dict) else v for k, v in kwargs.items()}
|
||||||
HDict(arg) if isinstance(arg, dict) else arg
|
|
||||||
for arg in args
|
|
||||||
]
|
|
||||||
)
|
|
||||||
kwargs = {
|
|
||||||
k: HDict(v) if isinstance(v, dict) else v
|
|
||||||
for k, v in kwargs.items()
|
|
||||||
}
|
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
return wrapped
|
return wrapped
|
||||||
|
|
||||||
|
|
||||||
@ -105,10 +125,17 @@ class DateAwareJSONEncoder(json.JSONEncoder):
|
|||||||
"""
|
"""
|
||||||
Extend the default JSON encoder to serialize datetimes to iso strings.
|
Extend the default JSON encoder to serialize datetimes to iso strings.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def default(self, o): # pylint: disable=locally-disabled,E0202
|
def default(self, o): # pylint: disable=locally-disabled,E0202
|
||||||
if isinstance(o, (datetime.date, datetime.datetime)):
|
if isinstance(o, (datetime.date, datetime.datetime)):
|
||||||
return o.isoformat()
|
return o.isoformat()
|
||||||
return json.JSONEncoder.default(self, o)
|
try:
|
||||||
|
return json.JSONEncoder.default(self, o)
|
||||||
|
except TypeError:
|
||||||
|
# Discard image hashes
|
||||||
|
if isinstance(o, imagehash.ImageHash):
|
||||||
|
return None
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
def pretty_json(data):
|
def pretty_json(data):
|
||||||
@ -126,9 +153,7 @@ def pretty_json(data):
|
|||||||
"toto": "ok"
|
"toto": "ok"
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
return json.dumps(data, cls=DateAwareJSONEncoder,
|
return json.dumps(data, cls=DateAwareJSONEncoder, indent=4, separators=(",", ": "), sort_keys=True)
|
||||||
indent=4, separators=(',', ': '),
|
|
||||||
sort_keys=True)
|
|
||||||
|
|
||||||
|
|
||||||
def batch(iterable, size):
|
def batch(iterable, size):
|
||||||
@ -142,7 +167,10 @@ def batch(iterable, size):
|
|||||||
sourceiter = iter(iterable)
|
sourceiter = iter(iterable)
|
||||||
while True:
|
while True:
|
||||||
batchiter = itertools.islice(sourceiter, size)
|
batchiter = itertools.islice(sourceiter, size)
|
||||||
yield itertools.chain([next(batchiter)], batchiter)
|
try:
|
||||||
|
yield itertools.chain([next(batchiter)], batchiter)
|
||||||
|
except StopIteration:
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
def is_within_interval(value, min_value=None, max_value=None):
|
def is_within_interval(value, min_value=None, max_value=None):
|
||||||
@ -264,10 +292,7 @@ def distance(gps1, gps2):
|
|||||||
long2 = math.radians(gps2[1])
|
long2 = math.radians(gps2[1])
|
||||||
|
|
||||||
# pylint: disable=locally-disabled,invalid-name
|
# pylint: disable=locally-disabled,invalid-name
|
||||||
a = (
|
a = math.sin((lat2 - lat1) / 2.0) ** 2 + math.cos(lat1) * math.cos(lat2) * math.sin((long2 - long1) / 2.0) ** 2
|
||||||
math.sin((lat2 - lat1) / 2.0)**2 +
|
|
||||||
math.cos(lat1) * math.cos(lat2) * math.sin((long2 - long1) / 2.0)**2
|
|
||||||
)
|
|
||||||
c = 2.0 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
|
c = 2.0 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
|
||||||
earth_radius = 6371000
|
earth_radius = 6371000
|
||||||
|
|
||||||
@ -322,7 +347,7 @@ def merge_dicts(*args):
|
|||||||
return merge_dicts(merged_flat, *args[2:])
|
return merge_dicts(merged_flat, *args[2:])
|
||||||
|
|
||||||
|
|
||||||
def get_travel_time_between(latlng_from, latlng_to, config):
|
def get_travel_time_between(latlng_from, latlng_to, mode, config):
|
||||||
"""
|
"""
|
||||||
Query the Navitia API to get the travel time between two points identified
|
Query the Navitia API to get the travel time between two points identified
|
||||||
by their latitude and longitude.
|
by their latitude and longitude.
|
||||||
@ -330,6 +355,7 @@ def get_travel_time_between(latlng_from, latlng_to, config):
|
|||||||
:param latlng_from: A tuple of (latitude, longitude) for the starting
|
:param latlng_from: A tuple of (latitude, longitude) for the starting
|
||||||
point.
|
point.
|
||||||
:param latlng_to: A tuple of (latitude, longitude) for the destination.
|
:param latlng_to: A tuple of (latitude, longitude) for the destination.
|
||||||
|
:param mode: A TimeToMode enum value for the mode of transportation to use.
|
||||||
:return: A dict of the travel time in seconds and sections of the journey
|
:return: A dict of the travel time in seconds and sections of the journey
|
||||||
with GeoJSON paths. Returns ``None`` if it could not fetch it.
|
with GeoJSON paths. Returns ``None`` if it could not fetch it.
|
||||||
|
|
||||||
@ -338,63 +364,114 @@ def get_travel_time_between(latlng_from, latlng_to, config):
|
|||||||
Uses the Navitia API. Requires a ``navitia_api_key`` field to be
|
Uses the Navitia API. Requires a ``navitia_api_key`` field to be
|
||||||
filled-in in the ``config``.
|
filled-in in the ``config``.
|
||||||
"""
|
"""
|
||||||
|
sections = []
|
||||||
travel_time = None
|
travel_time = None
|
||||||
|
|
||||||
# Check that Navitia API key is available
|
if mode == TimeToModes.PUBLIC_TRANSPORT:
|
||||||
if config["navitia_api_key"]:
|
# Check that Navitia API key is available
|
||||||
payload = {
|
if config["navitia_api_key"]:
|
||||||
"from": "%s;%s" % (latlng_from[1], latlng_from[0]),
|
# Search route for next Monday at 8am to avoid looking for a route
|
||||||
"to": "%s;%s" % (latlng_to[1], latlng_to[0]),
|
# in the middle of the night if the fetch is done by night.
|
||||||
"datetime": datetime.datetime.now().isoformat(),
|
date_from = next_weekday(datetime.datetime.now(), 0).replace(
|
||||||
"count": 1
|
hour=8,
|
||||||
}
|
minute=0,
|
||||||
try:
|
|
||||||
# Do the query to Navitia API
|
|
||||||
req = requests.get(
|
|
||||||
NAVITIA_ENDPOINT, params=payload,
|
|
||||||
auth=(config["navitia_api_key"], "")
|
|
||||||
)
|
)
|
||||||
req.raise_for_status()
|
payload = {
|
||||||
|
"from": "%s;%s" % (latlng_from[1], latlng_from[0]),
|
||||||
|
"to": "%s;%s" % (latlng_to[1], latlng_to[0]),
|
||||||
|
"datetime": date_from.isoformat(),
|
||||||
|
"count": 1,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
# Do the query to Navitia API
|
||||||
|
req = requests.get(
|
||||||
|
NAVITIA_ENDPOINT,
|
||||||
|
params=payload,
|
||||||
|
auth=(config["navitia_api_key"], ""),
|
||||||
|
)
|
||||||
|
req.raise_for_status()
|
||||||
|
|
||||||
journeys = req.json()["journeys"][0]
|
journeys = req.json()["journeys"][0]
|
||||||
travel_time = journeys["durations"]["total"]
|
travel_time = journeys["durations"]["total"]
|
||||||
sections = []
|
for section in journeys["sections"]:
|
||||||
for section in journeys["sections"]:
|
if section["type"] == "public_transport":
|
||||||
if section["type"] == "public_transport":
|
# Public transport
|
||||||
# Public transport
|
sections.append(
|
||||||
sections.append({
|
{
|
||||||
"geojson": section["geojson"],
|
"geojson": section["geojson"],
|
||||||
"color": (
|
"color": (section["display_informations"].get("color", None)),
|
||||||
section["display_informations"].get("color", None)
|
}
|
||||||
)
|
)
|
||||||
})
|
elif section["type"] == "street_network":
|
||||||
elif section["type"] == "street_network":
|
# Walking
|
||||||
# Walking
|
sections.append({"geojson": section["geojson"], "color": None})
|
||||||
sections.append({
|
else:
|
||||||
"geojson": section["geojson"],
|
# Skip anything else
|
||||||
"color": None
|
continue
|
||||||
})
|
except (
|
||||||
else:
|
requests.exceptions.RequestException,
|
||||||
# Skip anything else
|
ValueError,
|
||||||
continue
|
IndexError,
|
||||||
except (requests.exceptions.RequestException,
|
KeyError,
|
||||||
ValueError, IndexError, KeyError) as exc:
|
) as exc:
|
||||||
# Ignore any possible exception
|
# Ignore any possible exception
|
||||||
|
LOGGER.warning(
|
||||||
|
"An exception occurred during travel time lookup on Navitia: %s.",
|
||||||
|
str(exc),
|
||||||
|
)
|
||||||
|
else:
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"An exception occurred during travel time lookup on "
|
"No API key available for travel time lookup. Please provide "
|
||||||
"Navitia: %s.",
|
"a Navitia API key. Skipping travel time lookup."
|
||||||
str(exc)
|
|
||||||
)
|
)
|
||||||
else:
|
elif mode in [TimeToModes.WALK, TimeToModes.BIKE, TimeToModes.CAR]:
|
||||||
LOGGER.warning(
|
MAPBOX_MODES = {
|
||||||
"No API key available for travel time lookup. Please provide "
|
TimeToModes.WALK: "mapbox/walking",
|
||||||
"a Navitia API key. Skipping travel time lookup."
|
TimeToModes.BIKE: "mapbox/cycling",
|
||||||
)
|
TimeToModes.CAR: "mapbox/driving",
|
||||||
if travel_time:
|
|
||||||
return {
|
|
||||||
"time": travel_time,
|
|
||||||
"sections": sections
|
|
||||||
}
|
}
|
||||||
|
# Check that Mapbox API key is available
|
||||||
|
if config["mapbox_api_key"]:
|
||||||
|
try:
|
||||||
|
service = mapbox.Directions(access_token=config["mapbox_api_key"])
|
||||||
|
origin = {
|
||||||
|
"type": "Feature",
|
||||||
|
"properties": {"name": "Start"},
|
||||||
|
"geometry": {
|
||||||
|
"type": "Point",
|
||||||
|
"coordinates": [latlng_from[1], latlng_from[0]],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
destination = {
|
||||||
|
"type": "Feature",
|
||||||
|
"properties": {"name": "End"},
|
||||||
|
"geometry": {
|
||||||
|
"type": "Point",
|
||||||
|
"coordinates": [latlng_to[1], latlng_to[0]],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
response = service.directions([origin, destination], MAPBOX_MODES[mode])
|
||||||
|
response.raise_for_status()
|
||||||
|
route = response.geojson()["features"][0]
|
||||||
|
# Fix longitude/latitude inversion in geojson output
|
||||||
|
geometry = route["geometry"]
|
||||||
|
geometry["coordinates"] = [(x[1], x[0]) for x in geometry["coordinates"]]
|
||||||
|
sections = [{"geojson": geometry, "color": "000"}]
|
||||||
|
travel_time = route["properties"]["duration"]
|
||||||
|
except (requests.exceptions.RequestException, IndexError, KeyError) as exc:
|
||||||
|
# Ignore any possible exception
|
||||||
|
LOGGER.warning(
|
||||||
|
"An exception occurred during travel time lookup on Mapbox: %s.",
|
||||||
|
str(exc),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
LOGGER.warning(
|
||||||
|
"No API key available for travel time lookup. Please provide "
|
||||||
|
"a Mapbox API key. Skipping travel time lookup."
|
||||||
|
)
|
||||||
|
|
||||||
|
if travel_time:
|
||||||
|
return {"time": travel_time, "sections": sections}
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@ -402,6 +479,7 @@ def timeit(func):
|
|||||||
"""
|
"""
|
||||||
A decorator that logs how much time was spent in the function.
|
A decorator that logs how much time was spent in the function.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def wrapped(*args, **kwargs):
|
def wrapped(*args, **kwargs):
|
||||||
"""
|
"""
|
||||||
The wrapped function
|
The wrapped function
|
||||||
@ -411,4 +489,5 @@ def timeit(func):
|
|||||||
runtime = time.time() - before
|
runtime = time.time() - before
|
||||||
LOGGER.info("%s -- Execution took %s seconds.", func.__name__, runtime)
|
LOGGER.info("%s -- Execution took %s seconds.", func.__name__, runtime)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
return wrapped
|
return wrapped
|
||||||
|
@ -2,9 +2,7 @@
|
|||||||
"""
|
"""
|
||||||
This module contains the definition of the Bottle web app.
|
This module contains the definition of the Bottle web app.
|
||||||
"""
|
"""
|
||||||
from __future__ import (
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
absolute_import, division, print_function, unicode_literals
|
|
||||||
)
|
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
import json
|
import json
|
||||||
@ -25,14 +23,12 @@ class QuietWSGIRefServer(bottle.WSGIRefServer):
|
|||||||
Quiet implementation of Bottle built-in WSGIRefServer, as `Canister` is
|
Quiet implementation of Bottle built-in WSGIRefServer, as `Canister` is
|
||||||
handling the logging through standard Python logging.
|
handling the logging through standard Python logging.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# pylint: disable=locally-disabled,too-few-public-methods
|
# pylint: disable=locally-disabled,too-few-public-methods
|
||||||
quiet = True
|
quiet = True
|
||||||
|
|
||||||
def run(self, app):
|
def run(self, app):
|
||||||
app.log.info(
|
app.log.info("Server is now up and ready! Listening on %s:%s." % (self.host, self.port))
|
||||||
'Server is now up and ready! Listening on %s:%s.' %
|
|
||||||
(self.host, self.port)
|
|
||||||
)
|
|
||||||
super(QuietWSGIRefServer, self).run(app)
|
super(QuietWSGIRefServer, self).run(app)
|
||||||
|
|
||||||
|
|
||||||
@ -42,12 +38,10 @@ def _serve_static_file(filename):
|
|||||||
"""
|
"""
|
||||||
return bottle.static_file(
|
return bottle.static_file(
|
||||||
filename,
|
filename,
|
||||||
root=os.path.join(
|
root=os.path.join(os.path.dirname(os.path.realpath(__file__)), "static"),
|
||||||
os.path.dirname(os.path.realpath(__file__)),
|
|
||||||
"static"
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_app(config):
|
def get_app(config):
|
||||||
"""
|
"""
|
||||||
Get a Bottle app instance with all the routes set-up.
|
Get a Bottle app instance with all the routes set-up.
|
||||||
@ -65,61 +59,69 @@ def get_app(config):
|
|||||||
app.install(canister.Canister())
|
app.install(canister.Canister())
|
||||||
# Use DateAwareJSONEncoder to dump JSON strings
|
# Use DateAwareJSONEncoder to dump JSON strings
|
||||||
# From http://stackoverflow.com/questions/21282040/bottle-framework-how-to-return-datetime-in-json-response#comment55718456_21282666. pylint: disable=locally-disabled,line-too-long
|
# From http://stackoverflow.com/questions/21282040/bottle-framework-how-to-return-datetime-in-json-response#comment55718456_21282666. pylint: disable=locally-disabled,line-too-long
|
||||||
app.install(
|
app.install(bottle.JSONPlugin(json_dumps=functools.partial(json.dumps, cls=DateAwareJSONEncoder)))
|
||||||
bottle.JSONPlugin(
|
|
||||||
json_dumps=functools.partial(json.dumps, cls=DateAwareJSONEncoder)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Enable CORS
|
# Enable CORS
|
||||||
@app.hook('after_request')
|
@app.hook("after_request")
|
||||||
def enable_cors():
|
def enable_cors():
|
||||||
"""
|
"""
|
||||||
Add CORS headers at each request.
|
Add CORS headers at each request.
|
||||||
"""
|
"""
|
||||||
# The str() call is required as we import unicode_literal and WSGI
|
# The str() call is required as we import unicode_literal and WSGI
|
||||||
# headers list should have plain str type.
|
# headers list should have plain str type.
|
||||||
bottle.response.headers[str('Access-Control-Allow-Origin')] = str('*')
|
bottle.response.headers[str("Access-Control-Allow-Origin")] = str("*")
|
||||||
bottle.response.headers[str('Access-Control-Allow-Methods')] = str(
|
bottle.response.headers[str("Access-Control-Allow-Methods")] = str("PUT, GET, POST, DELETE, OPTIONS, PATCH")
|
||||||
'PUT, GET, POST, DELETE, OPTIONS, PATCH'
|
bottle.response.headers[str("Access-Control-Allow-Headers")] = str(
|
||||||
)
|
"Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token"
|
||||||
bottle.response.headers[str('Access-Control-Allow-Headers')] = str(
|
|
||||||
'Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# API v1 routes
|
# API v1 routes
|
||||||
app.route("/api/v1", ["GET", "OPTIONS"], api_routes.index_v1)
|
app.route("/api/v1", ["GET", "OPTIONS"], api_routes.index_v1)
|
||||||
|
|
||||||
app.route("/api/v1/time_to_places", ["GET", "OPTIONS"],
|
app.route("/api/v1/time_to_places", ["GET", "OPTIONS"], api_routes.time_to_places_v1)
|
||||||
api_routes.time_to_places_v1)
|
|
||||||
|
|
||||||
app.route("/api/v1/flats", ["GET", "OPTIONS"], api_routes.flats_v1)
|
app.route("/api/v1/flats", ["GET", "OPTIONS"], api_routes.flats_v1)
|
||||||
app.route("/api/v1/flats/:flat_id", ["GET", "OPTIONS"], api_routes.flat_v1)
|
app.route("/api/v1/flats/:flat_id", ["GET", "OPTIONS"], api_routes.flat_v1)
|
||||||
app.route("/api/v1/flats/:flat_id", ["PATCH", "OPTIONS"],
|
app.route("/api/v1/flats/:flat_id", ["PATCH", "OPTIONS"], api_routes.update_flat_v1)
|
||||||
api_routes.update_flat_v1)
|
|
||||||
|
|
||||||
app.route("/api/v1/ics/visits.ics", ["GET", "OPTIONS"],
|
app.route("/api/v1/ics/visits.ics", ["GET", "OPTIONS"], api_routes.ics_feed_v1)
|
||||||
api_routes.ics_feed_v1)
|
|
||||||
|
|
||||||
app.route("/api/v1/search", "POST", api_routes.search_v1)
|
app.route("/api/v1/search", ["POST", "OPTIONS"], api_routes.search_v1)
|
||||||
|
|
||||||
app.route("/api/v1/opendata", "GET", api_routes.opendata_index_v1)
|
app.route("/api/v1/opendata", ["GET", "OPTIONS"], api_routes.opendata_index_v1)
|
||||||
app.route("/api/v1/opendata/postal_codes", "GET",
|
app.route(
|
||||||
api_routes.opendata_postal_codes_v1)
|
"/api/v1/opendata/postal_codes",
|
||||||
|
["GET", "OPTIONS"],
|
||||||
|
api_routes.opendata_postal_codes_v1,
|
||||||
|
)
|
||||||
|
|
||||||
|
app.route("/api/v1/metadata", ["GET", "OPTIONS"], api_routes.metadata_v1)
|
||||||
|
app.route("/api/v1/import", ["GET", "OPTIONS"], api_routes.import_v1)
|
||||||
|
|
||||||
# Index
|
# Index
|
||||||
app.route("/", "GET", lambda: _serve_static_file("index.html"))
|
app.route("/", "GET", lambda: _serve_static_file("index.html"))
|
||||||
|
|
||||||
# Static files
|
# Static files
|
||||||
app.route("/favicon.ico", "GET",
|
app.route("/favicon.ico", "GET", lambda: _serve_static_file("favicon.ico"))
|
||||||
lambda: _serve_static_file("favicon.ico"))
|
|
||||||
app.route(
|
app.route(
|
||||||
"/assets/<filename:path>", "GET",
|
"/assets/<filename:path>",
|
||||||
lambda filename: _serve_static_file("/assets/{}".format(filename))
|
"GET",
|
||||||
|
lambda filename: _serve_static_file("/assets/{}".format(filename)),
|
||||||
)
|
)
|
||||||
app.route(
|
app.route(
|
||||||
"/img/<filename:path>", "GET",
|
"/img/<filename:path>",
|
||||||
lambda filename: _serve_static_file("/img/{}".format(filename))
|
"GET",
|
||||||
|
lambda filename: _serve_static_file("/img/{}".format(filename)),
|
||||||
|
)
|
||||||
|
app.route(
|
||||||
|
"/.well-known/<filename:path>",
|
||||||
|
"GET",
|
||||||
|
lambda filename: _serve_static_file("/.well-known/{}".format(filename)),
|
||||||
|
)
|
||||||
|
app.route(
|
||||||
|
"/data/img/<filename:path>",
|
||||||
|
"GET",
|
||||||
|
lambda filename: bottle.static_file(filename, root=os.path.join(config["data_directory"], "images")),
|
||||||
)
|
)
|
||||||
|
|
||||||
return app
|
return app
|
||||||
|
@ -7,9 +7,7 @@ This module is heavily based on code from
|
|||||||
[Bottle-SQLAlchemy](https://github.com/iurisilvio/bottle-sqlalchemy) which is
|
[Bottle-SQLAlchemy](https://github.com/iurisilvio/bottle-sqlalchemy) which is
|
||||||
licensed under MIT license.
|
licensed under MIT license.
|
||||||
"""
|
"""
|
||||||
from __future__ import (
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
absolute_import, division, print_function, unicode_literals
|
|
||||||
)
|
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
import inspect
|
import inspect
|
||||||
@ -22,7 +20,8 @@ class ConfigPlugin(object):
|
|||||||
A Bottle plugin to automatically pass the config object to the routes
|
A Bottle plugin to automatically pass the config object to the routes
|
||||||
specifying they need it.
|
specifying they need it.
|
||||||
"""
|
"""
|
||||||
name = 'config'
|
|
||||||
|
name = "config"
|
||||||
api = 2
|
api = 2
|
||||||
KEYWORD = "config"
|
KEYWORD = "config"
|
||||||
|
|
||||||
@ -41,9 +40,7 @@ class ConfigPlugin(object):
|
|||||||
if not isinstance(other, ConfigPlugin):
|
if not isinstance(other, ConfigPlugin):
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
raise bottle.PluginError(
|
raise bottle.PluginError("Found another conflicting Config plugin.")
|
||||||
"Found another conflicting Config plugin."
|
|
||||||
)
|
|
||||||
|
|
||||||
def apply(self, callback, route):
|
def apply(self, callback, route):
|
||||||
"""
|
"""
|
||||||
|
@ -7,9 +7,7 @@ This module is heavily based on code from
|
|||||||
[Bottle-SQLAlchemy](https://github.com/iurisilvio/bottle-sqlalchemy) which is
|
[Bottle-SQLAlchemy](https://github.com/iurisilvio/bottle-sqlalchemy) which is
|
||||||
licensed under MIT license.
|
licensed under MIT license.
|
||||||
"""
|
"""
|
||||||
from __future__ import (
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
absolute_import, division, print_function, unicode_literals
|
|
||||||
)
|
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
@ -21,7 +19,8 @@ class DatabasePlugin(object):
|
|||||||
A Bottle plugin to automatically pass an SQLAlchemy database session object
|
A Bottle plugin to automatically pass an SQLAlchemy database session object
|
||||||
to the routes specifying they need it.
|
to the routes specifying they need it.
|
||||||
"""
|
"""
|
||||||
name = 'database'
|
|
||||||
|
name = "database"
|
||||||
api = 2
|
api = 2
|
||||||
KEYWORD = "db"
|
KEYWORD = "db"
|
||||||
|
|
||||||
@ -41,9 +40,7 @@ class DatabasePlugin(object):
|
|||||||
if not isinstance(other, DatabasePlugin):
|
if not isinstance(other, DatabasePlugin):
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
raise bottle.PluginError(
|
raise bottle.PluginError("Found another conflicting Database plugin.")
|
||||||
"Found another conflicting Database plugin."
|
|
||||||
)
|
|
||||||
|
|
||||||
def apply(self, callback, route):
|
def apply(self, callback, route):
|
||||||
"""
|
"""
|
||||||
@ -64,6 +61,7 @@ class DatabasePlugin(object):
|
|||||||
if self.KEYWORD not in callback_args:
|
if self.KEYWORD not in callback_args:
|
||||||
# If no need for a db session, call the route callback
|
# If no need for a db session, call the route callback
|
||||||
return callback
|
return callback
|
||||||
|
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Wrap the callback in a call to get_session.
|
Wrap the callback in a call to get_session.
|
||||||
@ -72,6 +70,7 @@ class DatabasePlugin(object):
|
|||||||
# Get a db session and pass it to the callback
|
# Get a db session and pass it to the callback
|
||||||
kwargs[self.KEYWORD] = session
|
kwargs[self.KEYWORD] = session
|
||||||
return callback(*args, **kwargs)
|
return callback(*args, **kwargs)
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
@ -161,3 +161,10 @@ export const doSearch = function (query, callback) {
|
|||||||
console.error('Unable to perform search: ' + ex)
|
console.error('Unable to perform search: ' + ex)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const getMetadata = function (callback) {
|
||||||
|
fetch('/api/v1/metadata', { credentials: 'same-origin' })
|
||||||
|
.then(response => response.json())
|
||||||
|
.then(json => callback(json.data))
|
||||||
|
.catch(ex => console.error('Unable to fetch application metadata: ' + ex))
|
||||||
|
}
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
<style>
|
<style>
|
||||||
body {
|
body {
|
||||||
margin: 0 auto;
|
margin: 0 auto;
|
||||||
max-width: 75em;
|
/* max-width: 75em; */
|
||||||
font-family: "Helvetica", "Arial", sans-serif;
|
font-family: "Helvetica", "Arial", sans-serif;
|
||||||
line-height: 1.5;
|
line-height: 1.5;
|
||||||
padding: 4em 1em;
|
padding: 4em 1em;
|
||||||
|
398
flatisfy/web/js_src/components/flat.vue
Normal file
398
flatisfy/web/js_src/components/flat.vue
Normal file
@ -0,0 +1,398 @@
|
|||||||
|
<template>
|
||||||
|
<div>
|
||||||
|
<template v-if="isLoading">
|
||||||
|
<p>{{ $t("common.loading") }}</p>
|
||||||
|
</template>
|
||||||
|
<div class="grid" v-else-if="flat && timeToPlaces">
|
||||||
|
<div class="left-panel">
|
||||||
|
<h2>
|
||||||
|
(<!--
|
||||||
|
--><router-link :to="{ name: 'status', params: { status: flat.status }}"><!--
|
||||||
|
-->{{ flat.status ? capitalize($t("status." + flat.status)) : '' }}<!--
|
||||||
|
--></router-link><!--
|
||||||
|
-->) {{ flat.title }} [{{ flat.id.split("@")[1] }}]
|
||||||
|
<span class="expired">{{ flat.is_expired ? '[' + $t('common.expired') + ']' : '' }}</span>
|
||||||
|
</h2>
|
||||||
|
<div class="grid">
|
||||||
|
<div class="left-panel">
|
||||||
|
<span>
|
||||||
|
{{ flat.cost | cost(flat.currency) }}
|
||||||
|
<template v-if="flat.utilities === 'included'">
|
||||||
|
{{ $t("flatsDetails.utilities_included") }}
|
||||||
|
</template>
|
||||||
|
<template v-else-if="flat.utilities === 'excluded'">
|
||||||
|
{{ $t("flatsDetails.utilities_excluded") }}
|
||||||
|
</template>
|
||||||
|
</span>
|
||||||
|
<span v-if="flat.flatisfy_postal_code.postal_code">
|
||||||
|
à {{ flat.flatisfy_postal_code.name }} ({{ flat.flatisfy_postal_code.postal_code }})
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<span class="right-panel right">
|
||||||
|
<template v-if="flat.area"><span>{{flat.area}} m<sup>2</sup></span></template><template v-if="flat.rooms"><span>, {{flat.rooms}} {{ $tc("flatsDetails.rooms", flat.rooms) }}</span></template><template v-if="flat.bedrooms"><span>/ {{flat.bedrooms}} {{ $tc("flatsDetails.bedrooms", flat.bedrooms) }}</span></template>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<template v-if="flat.photos && flat.photos.length > 0">
|
||||||
|
<Slider :photos="flat.photos"></Slider>
|
||||||
|
</template>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<h3>{{ $t("flatsDetails.Description") }}</h3>
|
||||||
|
<p>{{ flat.text }}</p>
|
||||||
|
<p class="right">{{ flat.location }}</p>
|
||||||
|
<p>{{ $t("flatsDetails.First_posted") }} {{ flat.date ? flat.date.fromNow() : '?' }}.</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<h3>{{ $t("flatsDetails.Details") }}</h3>
|
||||||
|
<table>
|
||||||
|
<tr v-for="(value, key) in flat.details">
|
||||||
|
<th>{{ key }}</th>
|
||||||
|
<td>{{ value }}</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<h3>{{ $t("flatsDetails.Metadata") }}</h3>
|
||||||
|
<table>
|
||||||
|
<tr>
|
||||||
|
<th>
|
||||||
|
{{ $t("flatsDetails.postal_code") }}
|
||||||
|
</th>
|
||||||
|
<td>
|
||||||
|
<template v-if="flat.flatisfy_postal_code.postal_code">
|
||||||
|
{{ flat.flatisfy_postal_code.name }} ({{ flat.flatisfy_postal_code.postal_code }})
|
||||||
|
</template>
|
||||||
|
<template v-else>
|
||||||
|
?
|
||||||
|
</template>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
<tr v-if="displayedStations">
|
||||||
|
<th>
|
||||||
|
{{ $t("flatsDetails.nearby_stations") }}
|
||||||
|
</th>
|
||||||
|
<td>
|
||||||
|
{{ displayedStations }}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr v-if="Object.keys(flat.flatisfy_time_to).length">
|
||||||
|
<th>
|
||||||
|
{{ $t("flatsDetails.Times_to") }}
|
||||||
|
</th>
|
||||||
|
<td>
|
||||||
|
<ul class="time_to_list">
|
||||||
|
<li v-for="(time_to, place) in flat.flatisfy_time_to" :key="place">
|
||||||
|
{{ place }}: {{ humanizeTimeTo(time_to["time"]) }}
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<th>
|
||||||
|
{{ $t("flatsDetails.SquareMeterCost") }}
|
||||||
|
</th>
|
||||||
|
<td>
|
||||||
|
{{ flat.sqCost }} {{ flat.currency }}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<h3>{{ $t("flatsDetails.Location") }}</h3>
|
||||||
|
|
||||||
|
<FlatsMap :flats="flatMarker" :places="timeToPlaces" :journeys="journeys"></FlatsMap>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<h3>{{ $t("flatsDetails.Notes") }}</h3>
|
||||||
|
|
||||||
|
<form v-on:submit="updateFlatNotes">
|
||||||
|
<textarea ref="notesTextarea" rows="10" :v-model="flat.notes"></textarea>
|
||||||
|
<p class="right"><input type="submit" :value="$t('flatsDetails.Save')"/></p>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="right-panel">
|
||||||
|
<h3>{{ $t("flatsDetails.Contact") }}</h3>
|
||||||
|
<div class="contact">
|
||||||
|
<template v-if="flat.phone">
|
||||||
|
<p v-for="phoneNumber in flat.phone.split(',')">
|
||||||
|
<a :href="'tel:+33' + normalizePhoneNumber(phoneNumber)">{{ phoneNumber }}</a>
|
||||||
|
</p>
|
||||||
|
</template>
|
||||||
|
<template v-if="flat.urls.length == 1">
|
||||||
|
<a :href="flat.urls[0]" target="_blank">
|
||||||
|
{{ $tc("common.Original_post", 1) }}
|
||||||
|
<i class="fa fa-external-link" aria-hidden="true"></i>
|
||||||
|
</a>
|
||||||
|
</template>
|
||||||
|
<template v-else-if="flat.urls.length > 1">
|
||||||
|
<p>{{ $tc("common.Original_post", flat.urls.length) }}
|
||||||
|
<ul>
|
||||||
|
<li v-for="(url, index) in flat.urls">
|
||||||
|
<a :href="url" target="_blank">
|
||||||
|
{{ $tc("common.Original_post", 1) }} {{ index + 1 }}
|
||||||
|
<i class="fa fa-external-link" aria-hidden="true"></i>
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</p>
|
||||||
|
</template>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<h3>{{ $t("flatsDetails.Visit") }}</h3>
|
||||||
|
<div class="visit">
|
||||||
|
<flat-pickr
|
||||||
|
:value="flatpickrValue"
|
||||||
|
:config="flatpickrConfig"
|
||||||
|
:placeholder="$t('flatsDetails.setDateOfVisit')"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<h3>{{ $t("common.Actions") }}</h3>
|
||||||
|
|
||||||
|
<nav>
|
||||||
|
<ul>
|
||||||
|
<template v-if="flat.status !== 'user_deleted'">
|
||||||
|
<Notation :flat="flat"></Notation>
|
||||||
|
<li>
|
||||||
|
<button v-on:click="updateFlatStatus('user_deleted')" class="fullButton">
|
||||||
|
<i class="fa fa-trash" aria-hidden="true"></i>
|
||||||
|
{{ $t("common.Remove") }}
|
||||||
|
</button>
|
||||||
|
</li>
|
||||||
|
</template>
|
||||||
|
<template v-else>
|
||||||
|
<li>
|
||||||
|
<button v-on:click="updateFlatStatus('new')" class="fullButton">
|
||||||
|
<i class="fa fa-undo" aria-hidden="true"></i>
|
||||||
|
{{ $t("common.Restore") }}
|
||||||
|
</button>
|
||||||
|
</li>
|
||||||
|
</template>
|
||||||
|
</ul>
|
||||||
|
</nav>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
import flatPickr from 'vue-flatpickr-component'
|
||||||
|
import moment from 'moment'
|
||||||
|
import 'font-awesome-webpack'
|
||||||
|
import 'flatpickr/dist/flatpickr.css'
|
||||||
|
|
||||||
|
import FlatsMap from '../components/flatsmap.vue'
|
||||||
|
import Slider from '../components/slider.vue'
|
||||||
|
import Notation from '../components/notation.vue'
|
||||||
|
|
||||||
|
import { capitalize } from '../tools'
|
||||||
|
|
||||||
|
export default {
|
||||||
|
components: {
|
||||||
|
FlatsMap,
|
||||||
|
Slider,
|
||||||
|
flatPickr,
|
||||||
|
Notation
|
||||||
|
},
|
||||||
|
|
||||||
|
created () {
|
||||||
|
this.fetchData()
|
||||||
|
},
|
||||||
|
|
||||||
|
data () {
|
||||||
|
return {
|
||||||
|
// TODO: Flatpickr locale
|
||||||
|
'overloadNotation': null,
|
||||||
|
'flatpickrConfig': {
|
||||||
|
static: true,
|
||||||
|
altFormat: 'h:i K, M j, Y',
|
||||||
|
altInput: true,
|
||||||
|
enableTime: true,
|
||||||
|
onChange: selectedDates => this.updateFlatVisitDate(selectedDates.length > 0 ? selectedDates[0] : null)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
props: ['flat'],
|
||||||
|
|
||||||
|
computed: {
|
||||||
|
isLoading () {
|
||||||
|
return this.$store.getters.isLoading
|
||||||
|
},
|
||||||
|
flatMarker () {
|
||||||
|
return this.$store.getters.flatsMarkers(this.$router, flat => flat.id === this.flat.id)
|
||||||
|
},
|
||||||
|
'flatpickrValue' () {
|
||||||
|
if (this.flat && this.flat.visit_date) {
|
||||||
|
return this.flat.visit_date.local().format()
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
},
|
||||||
|
timeToPlaces () {
|
||||||
|
return this.$store.getters.timeToPlaces(this.flat.flatisfy_constraint)
|
||||||
|
},
|
||||||
|
notation () {
|
||||||
|
if (this.overloadNotation) {
|
||||||
|
return this.overloadNotation
|
||||||
|
}
|
||||||
|
return this.flat.notation
|
||||||
|
},
|
||||||
|
journeys () {
|
||||||
|
if (Object.keys(this.flat.flatisfy_time_to).length > 0) {
|
||||||
|
const journeys = []
|
||||||
|
for (const place in this.flat.flatisfy_time_to) {
|
||||||
|
this.flat.flatisfy_time_to[place].sections.forEach(
|
||||||
|
section => journeys.push({
|
||||||
|
geojson: section.geojson,
|
||||||
|
options: {
|
||||||
|
color: section.color ? ('#' + section.color) : '#2196f3',
|
||||||
|
dashArray: section.color ? 'none' : '2, 10'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return journeys
|
||||||
|
}
|
||||||
|
return []
|
||||||
|
},
|
||||||
|
displayedStations () {
|
||||||
|
if (this.flat.flatisfy_stations.length > 0) {
|
||||||
|
const stationsNames = this.flat.flatisfy_stations.map(station => station.name)
|
||||||
|
return stationsNames.join(', ')
|
||||||
|
} else {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
watch: {
|
||||||
|
flat: 'fetchData'
|
||||||
|
},
|
||||||
|
|
||||||
|
methods: {
|
||||||
|
fetchData () {
|
||||||
|
this.$store.dispatch('getAllTimeToPlaces')
|
||||||
|
},
|
||||||
|
|
||||||
|
updateFlatStatus (status) {
|
||||||
|
this.$store.dispatch('updateFlatStatus', { flatId: this.flat.id, newStatus: status })
|
||||||
|
},
|
||||||
|
|
||||||
|
updateFlatNotes () {
|
||||||
|
const notes = this.$refs.notesTextarea.value
|
||||||
|
this.$store.dispatch(
|
||||||
|
'updateFlatNotes',
|
||||||
|
{ flatId: this.flat.id, newNotes: notes }
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
updateFlatVisitDate (date) {
|
||||||
|
if (date) {
|
||||||
|
date = moment(date).utc().format()
|
||||||
|
}
|
||||||
|
this.$store.dispatch(
|
||||||
|
'updateFlatVisitDate',
|
||||||
|
{ flatId: this.flat.id, newVisitDate: date }
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
humanizeTimeTo (time) {
|
||||||
|
const minutes = Math.floor(time.as('minutes'))
|
||||||
|
return minutes + ' ' + this.$tc('common.mins', minutes)
|
||||||
|
},
|
||||||
|
|
||||||
|
normalizePhoneNumber (phoneNumber) {
|
||||||
|
phoneNumber = phoneNumber.replace(/ /g, '')
|
||||||
|
phoneNumber = phoneNumber.replace(/\./g, '')
|
||||||
|
return phoneNumber
|
||||||
|
},
|
||||||
|
|
||||||
|
capitalize: capitalize
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<style scoped>
|
||||||
|
.expired {
|
||||||
|
font-weight: bold;
|
||||||
|
text-transform: uppercase;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media screen and (min-width: 768px) {
|
||||||
|
.grid {
|
||||||
|
display: grid;
|
||||||
|
grid-gap: 50px;
|
||||||
|
grid-template-columns: 75fr 25fr;
|
||||||
|
}
|
||||||
|
|
||||||
|
.left-panel {
|
||||||
|
grid-column: 1;
|
||||||
|
grid-row: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.right-panel {
|
||||||
|
grid-column: 2;
|
||||||
|
grid-row: 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.left-panel textarea {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.right {
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav ul {
|
||||||
|
list-style-type: none;
|
||||||
|
padding-left: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.contact {
|
||||||
|
padding-left: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.right-panel li {
|
||||||
|
margin-bottom: 1em;
|
||||||
|
margin-top: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
button {
|
||||||
|
cursor: pointer;
|
||||||
|
width: 75%;
|
||||||
|
padding: 0.3em;
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table {
|
||||||
|
table-layout: fixed;
|
||||||
|
}
|
||||||
|
|
||||||
|
td {
|
||||||
|
word-wrap: break-word;
|
||||||
|
word-break: break-all;
|
||||||
|
white-space: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
.time_to_list {
|
||||||
|
margin: 0;
|
||||||
|
padding-left: 0;
|
||||||
|
list-style-position: outside;
|
||||||
|
list-style-type: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media screen and (max-width: 767px) {
|
||||||
|
.right-panel nav {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fullButton {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
@ -1,21 +1,26 @@
|
|||||||
<template lang="html">
|
<template lang="html">
|
||||||
<div class="full">
|
<div class="full">
|
||||||
<v-map :zoom="zoom.defaultZoom" :center="center" :bounds="bounds" :min-zoom="zoom.minZoom" :max-zoom="zoom.maxZoom">
|
<v-map v-if="bounds" :zoom="zoom.defaultZoom" :bounds="bounds" :min-zoom="zoom.minZoom" :max-zoom="zoom.maxZoom" v-on:click="$emit('select-flat', null)" @update:bounds="bounds = $event">
|
||||||
<v-tilelayer :url="tiles.url" :attribution="tiles.attribution"></v-tilelayer>
|
<v-tilelayer :url="tiles.url" :attribution="tiles.attribution"></v-tilelayer>
|
||||||
<template v-for="marker in flats">
|
<v-marker-cluster>
|
||||||
<v-marker :lat-lng="{ lat: marker.gps[0], lng: marker.gps[1] }" :icon="icons.flat">
|
<template v-for="marker in flats">
|
||||||
<v-popup :content="marker.content"></v-popup>
|
<v-marker :lat-lng="{ lat: marker.gps[0], lng: marker.gps[1] }" :icon="icons.flat" v-on:click="$emit('select-flat', marker.flatId)">
|
||||||
</v-marker>
|
<!-- <v-popup :content="marker.content"></v-popup> -->
|
||||||
</template>
|
</v-marker>
|
||||||
<template v-for="(place_gps, place_name) in places">
|
</template>
|
||||||
<v-marker :lat-lng="{ lat: place_gps[0], lng: place_gps[1] }" :icon="icons.place">
|
</v-marker-cluster>
|
||||||
<v-tooltip :content="place_name"></v-tooltip>
|
<v-marker-cluster>
|
||||||
</v-marker>
|
<template v-for="(place_gps, place_name) in places">
|
||||||
</template>
|
<v-marker :lat-lng="{ lat: place_gps[0], lng: place_gps[1] }" :icon="icons.place">
|
||||||
|
<v-tooltip :content="place_name"></v-tooltip>
|
||||||
|
</v-marker>
|
||||||
|
</template>
|
||||||
|
</v-marker-cluster>
|
||||||
<template v-for="journey in journeys">
|
<template v-for="journey in journeys">
|
||||||
<v-geojson-layer :geojson="journey.geojson" :options="Object.assign({}, defaultGeoJSONOptions, journey.options)"></v-geojson-layer>
|
<v-geojson-layer :geojson="journey.geojson" :options="Object.assign({}, defaultGeoJSONOptions, journey.options)"></v-geojson-layer>
|
||||||
</template>
|
</template>
|
||||||
</v-map>
|
</v-map>
|
||||||
|
<div v-else>Nothing to display yet</div>
|
||||||
</div>
|
</div>
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
@ -31,10 +36,13 @@ L.Icon.Default.mergeOptions({
|
|||||||
})
|
})
|
||||||
|
|
||||||
import 'leaflet/dist/leaflet.css'
|
import 'leaflet/dist/leaflet.css'
|
||||||
|
import 'leaflet.markercluster/dist/MarkerCluster.css'
|
||||||
|
import 'leaflet.markercluster/dist/MarkerCluster.Default.css'
|
||||||
|
|
||||||
require('leaflet.icon.glyph')
|
require('leaflet.icon.glyph')
|
||||||
|
|
||||||
import Vue2Leaflet from 'vue2-leaflet'
|
import { LMap, LTileLayer, LMarker, LTooltip, LPopup, LGeoJson } from 'vue2-leaflet'
|
||||||
|
import Vue2LeafletMarkerCluster from 'vue2-leaflet-markercluster'
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
data () {
|
data () {
|
||||||
@ -46,11 +54,11 @@ export default {
|
|||||||
fillColor: '#e4ce7f',
|
fillColor: '#e4ce7f',
|
||||||
fillOpacity: 1
|
fillOpacity: 1
|
||||||
},
|
},
|
||||||
center: null,
|
bounds: [[40.91351257612758, -7.580566406250001], [51.65892664880053, 12.0849609375]],
|
||||||
zoom: {
|
zoom: {
|
||||||
defaultZoom: 13,
|
defaultZoom: 6,
|
||||||
minZoom: 5,
|
minZoom: 5,
|
||||||
maxZoom: 17
|
maxZoom: 20
|
||||||
},
|
},
|
||||||
tiles: {
|
tiles: {
|
||||||
url: 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
|
url: 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
|
||||||
@ -67,25 +75,27 @@ export default {
|
|||||||
},
|
},
|
||||||
|
|
||||||
components: {
|
components: {
|
||||||
'v-map': Vue2Leaflet.Map,
|
'v-map': LMap,
|
||||||
'v-tilelayer': Vue2Leaflet.TileLayer,
|
'v-tilelayer': LTileLayer,
|
||||||
'v-marker': Vue2Leaflet.Marker,
|
'v-marker': LMarker,
|
||||||
'v-tooltip': Vue2Leaflet.Tooltip,
|
'v-marker-cluster': Vue2LeafletMarkerCluster,
|
||||||
'v-popup': Vue2Leaflet.Popup,
|
'v-tooltip': LTooltip,
|
||||||
'v-geojson-layer': Vue2Leaflet.GeoJSON
|
'v-popup': LPopup,
|
||||||
|
'v-geojson-layer': LGeoJson
|
||||||
},
|
},
|
||||||
|
|
||||||
computed: {
|
watch: {
|
||||||
bounds () {
|
flats: 'computeBounds',
|
||||||
let bounds = []
|
places: 'computeBounds'
|
||||||
this.flats.forEach(flat => bounds.push(flat.gps))
|
},
|
||||||
Object.keys(this.places).forEach(place => bounds.push(this.places[place]))
|
|
||||||
|
|
||||||
if (bounds.length > 0) {
|
methods: {
|
||||||
bounds = L.latLngBounds(bounds)
|
computeBounds (newData, oldData) {
|
||||||
return bounds
|
if (this.flats.length && JSON.stringify(newData) !== JSON.stringify(oldData)) {
|
||||||
} else {
|
const allBounds = []
|
||||||
return null
|
this.flats.forEach(flat => allBounds.push(flat.gps))
|
||||||
|
Object.keys(this.places).forEach(place => allBounds.push(this.places[place]))
|
||||||
|
this.bounds = allBounds.length ? L.latLngBounds(allBounds) : undefined
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -48,62 +48,7 @@
|
|||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr v-for="flat in sortedFlats" :key="flat.id">
|
<FlatsTableLine :flat="flat" :showNotationColumn="showNotationColumn" :showNotes="showNotes" v-for="flat in sortedFlats" :key="flat.id"></FlatsTableLine>
|
||||||
<td v-if="showNotationColumn">
|
|
||||||
<template v-for="n in range(flat.notation)">
|
|
||||||
<i class="fa fa-star" aria-hidden="true" :title="capitalize($t('status.followed'))"></i>
|
|
||||||
</template>
|
|
||||||
</td>
|
|
||||||
<td class="no-padding">
|
|
||||||
<router-link class="fill" :to="{name: 'details', params: {id: flat.id}}">
|
|
||||||
<template v-if="!showNotationColumn" v-for="n in range(flat.notation)">
|
|
||||||
<i class="fa fa-star" aria-hidden="true" :title="capitalize($t('status.followed'))"></i>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
[{{ flat.id.split("@")[1] }}] {{ flat.title }}
|
|
||||||
|
|
||||||
<template v-if="flat.photos && flat.photos.length > 0">
|
|
||||||
<br/>
|
|
||||||
<img :src="flat.photos[0].url"/>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<template v-if="showNotes">
|
|
||||||
<br/>
|
|
||||||
<pre>{{ flat.notes }}</pre>
|
|
||||||
</template>
|
|
||||||
</router-link>
|
|
||||||
</td>
|
|
||||||
<td>{{ flat.area }} m²</td>
|
|
||||||
<td>
|
|
||||||
{{ flat.rooms ? flat.rooms : '?'}}
|
|
||||||
</td>
|
|
||||||
<td>
|
|
||||||
{{ flat.cost }} {{ flat.currency }}
|
|
||||||
<template v-if="flat.utilities == 'included'">
|
|
||||||
{{ $t("flatsDetails.utilities_included") }}
|
|
||||||
</template>
|
|
||||||
<template v-else-if="flat.utilities == 'excluded'">
|
|
||||||
{{ $t("flatsDetails.utilities_excluded") }}
|
|
||||||
</template>
|
|
||||||
</td>
|
|
||||||
<td>
|
|
||||||
{{ flat.sqCost }} {{ flat.currency }}
|
|
||||||
</td>
|
|
||||||
<td>
|
|
||||||
<router-link :to="{name: 'details', params: {id: flat.id}}" :aria-label="$t('common.More_about') + ' ' + flat.id" :title="$t('common.More_about') + ' ' + flat.id">
|
|
||||||
<i class="fa fa-eye" aria-hidden="true"></i>
|
|
||||||
</router-link>
|
|
||||||
<a :href="flat.urls[0]" :aria-label="$t('common.Original_post_for') + ' ' + flat.id" :title="$t('common.Original_post_for') + ' ' + flat.id" target="_blank">
|
|
||||||
<i class="fa fa-external-link" aria-hidden="true"></i>
|
|
||||||
</a>
|
|
||||||
<button v-if="flat.status !== 'user_deleted'" v-on:click="updateFlatStatus(flat.id, 'user_deleted')" :aria-label="$t('common.Remove') + ' ' + flat.id" :title="$t('common.Remove') + ' ' + flat.id">
|
|
||||||
<i class="fa fa-trash" aria-hidden="true"></i>
|
|
||||||
</button>
|
|
||||||
<button v-else v-on:click="updateFlatStatus(flat.id, 'new')" :aria-label="$t('common.Restore') + ' ' + flat.id" :title="$t('common.Restore') + ' ' + flat.id">
|
|
||||||
<i class="fa fa-undo" aria-hidden="true"></i>
|
|
||||||
</button>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
</template>
|
</template>
|
||||||
@ -111,9 +56,15 @@
|
|||||||
<script>
|
<script>
|
||||||
import 'font-awesome-webpack'
|
import 'font-awesome-webpack'
|
||||||
|
|
||||||
import { capitalize, range } from '../tools'
|
import FlatsTableLine from './flatstableline.vue'
|
||||||
|
|
||||||
|
import { capitalize } from '../tools'
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
|
components: {
|
||||||
|
FlatsTableLine
|
||||||
|
},
|
||||||
|
|
||||||
data () {
|
data () {
|
||||||
return {
|
return {
|
||||||
sortBy: this.initialSortBy,
|
sortBy: this.initialSortBy,
|
||||||
@ -167,9 +118,6 @@ export default {
|
|||||||
},
|
},
|
||||||
|
|
||||||
methods: {
|
methods: {
|
||||||
updateFlatStatus (id, status) {
|
|
||||||
this.$store.dispatch('updateFlatStatus', { flatId: id, newStatus: status })
|
|
||||||
},
|
|
||||||
updateSortBy (field) {
|
updateSortBy (field) {
|
||||||
if (this.sortBy === field) {
|
if (this.sortBy === field) {
|
||||||
if (this.sortOrder === 'up') {
|
if (this.sortOrder === 'up') {
|
||||||
@ -181,8 +129,7 @@ export default {
|
|||||||
this.sortBy = field
|
this.sortBy = field
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
capitalize: capitalize,
|
capitalize: capitalize
|
||||||
range: range
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
100
flatisfy/web/js_src/components/flatstableline.vue
Normal file
100
flatisfy/web/js_src/components/flatstableline.vue
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
<template>
|
||||||
|
<tr>
|
||||||
|
<td v-if="showNotationColumn">
|
||||||
|
<Notation :flat="flat" :title="capitalizedStatus"></Notation>
|
||||||
|
</td>
|
||||||
|
<td class="no-padding">
|
||||||
|
<Notation v-if="!showNotationColumn" :flat="flat" :title="capitalizedStatus"></Notation>
|
||||||
|
<router-link class="fill" :to="{name: 'details', params: {id: flat.id}}">
|
||||||
|
[{{ flat.id.split("@")[1] }}]
|
||||||
|
<span class="expired">{{ flat.is_expired ? "[" + $t("common.expired") + "]" : null }}</span>
|
||||||
|
{{ flat.title }}
|
||||||
|
|
||||||
|
<template v-if="photo">
|
||||||
|
<br/>
|
||||||
|
<img :src="photo" height="200" style="max-width: 25vw" />
|
||||||
|
</template>
|
||||||
|
|
||||||
|
<template v-if="showNotes">
|
||||||
|
<br/>
|
||||||
|
<pre>{{ flat.notes }}</pre>
|
||||||
|
</template>
|
||||||
|
</router-link>
|
||||||
|
</td>
|
||||||
|
<td>{{ flat.area }} m²</td>
|
||||||
|
<td>
|
||||||
|
{{ flat.rooms ? flat.rooms : '?'}}
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
{{ flat.cost | cost(flat.currency) }}
|
||||||
|
<template v-if="flat.utilities == 'included'">
|
||||||
|
{{ $t("flatsDetails.utilities_included") }}
|
||||||
|
</template>
|
||||||
|
<template v-else-if="flat.utilities == 'excluded'">
|
||||||
|
{{ $t("flatsDetails.utilities_excluded") }}
|
||||||
|
</template>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
{{ flat.sqCost }} {{ flat.currency }}
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<router-link :to="{name: 'details', params: {id: flat.id}}" :aria-label="$t('common.More_about') + ' ' + flat.id" :title="$t('common.More_about') + ' ' + flat.id">
|
||||||
|
<i class="fa fa-eye" aria-hidden="true"></i>
|
||||||
|
</router-link>
|
||||||
|
<a :href="flat.urls[0]" :aria-label="$t('common.Original_post_for') + ' ' + flat.id" :title="$t('common.Original_post_for') + ' ' + flat.id" target="_blank">
|
||||||
|
<i class="fa fa-external-link" aria-hidden="true"></i>
|
||||||
|
</a>
|
||||||
|
<button v-if="flat.status !== 'user_deleted'" v-on:click="updateFlatStatus(flat.id, 'user_deleted')" :aria-label="$t('common.Remove') + ' ' + flat.id" :title="$t('common.Remove') + ' ' + flat.id">
|
||||||
|
<i class="fa fa-trash" aria-hidden="true"></i>
|
||||||
|
</button>
|
||||||
|
<button v-else v-on:click="updateFlatStatus(flat.id, 'new')" :aria-label="$t('common.Restore') + ' ' + flat.id" :title="$t('common.Restore') + ' ' + flat.id">
|
||||||
|
<i class="fa fa-undo" aria-hidden="true"></i>
|
||||||
|
</button>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
import { capitalize } from '../tools'
|
||||||
|
import Notation from '../components/notation.vue'
|
||||||
|
|
||||||
|
export default {
|
||||||
|
props: {
|
||||||
|
flat: Object,
|
||||||
|
showNotationColumn: Boolean,
|
||||||
|
showNotes: Boolean
|
||||||
|
},
|
||||||
|
|
||||||
|
components: {
|
||||||
|
Notation
|
||||||
|
},
|
||||||
|
|
||||||
|
computed: {
|
||||||
|
capitalizedStatus () {
|
||||||
|
return capitalize(this.$t('status.followed'))
|
||||||
|
},
|
||||||
|
photo () {
|
||||||
|
if (this.flat.photos && this.flat.photos.length > 0) {
|
||||||
|
if (this.flat.photos[0].local) {
|
||||||
|
return `/data/img/${this.flat.photos[0].local}`
|
||||||
|
}
|
||||||
|
return this.flat.photos[0].url
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
methods: {
|
||||||
|
updateFlatStatus (id, status) {
|
||||||
|
this.$store.dispatch('updateFlatStatus', { flatId: id, newStatus: status })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<style scoped>
|
||||||
|
.expired {
|
||||||
|
font-weight: bold;
|
||||||
|
text-transform: uppercase;
|
||||||
|
}
|
||||||
|
</style>
|
68
flatisfy/web/js_src/components/notation.vue
Normal file
68
flatisfy/web/js_src/components/notation.vue
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
<template>
|
||||||
|
<div>
|
||||||
|
<template v-for="n in range(5)">
|
||||||
|
<button v-bind:key="n" v-on:mouseover="handleHover(n)" v-on:mouseout="handleOut()" v-on:click="updateNotation(n)">
|
||||||
|
<i class="fa" v-bind:class="{'fa-star': n < notation, 'fa-star-o': n >= notation}" aria-hidden="true"></i>
|
||||||
|
</button>
|
||||||
|
</template>
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
|
||||||
|
import { range } from '../tools'
|
||||||
|
import 'flatpickr/dist/flatpickr.css'
|
||||||
|
|
||||||
|
export default {
|
||||||
|
data () {
|
||||||
|
return {
|
||||||
|
'overloadNotation': null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
props: ['flat'],
|
||||||
|
|
||||||
|
computed: {
|
||||||
|
notation () {
|
||||||
|
if (this.overloadNotation) {
|
||||||
|
return this.overloadNotation
|
||||||
|
}
|
||||||
|
return this.flat.notation
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
methods: {
|
||||||
|
updateNotation (notation) {
|
||||||
|
notation = notation + 1
|
||||||
|
|
||||||
|
if (notation === this.flat.notation) {
|
||||||
|
this.flat.notation = 0
|
||||||
|
this.$store.dispatch('updateFlatNotation', { flatId: this.flat.id, newNotation: 0 })
|
||||||
|
this.$store.dispatch('updateFlatStatus', { flatId: this.flat.id, newStatus: 'new' })
|
||||||
|
} else {
|
||||||
|
this.flat.notation = notation
|
||||||
|
this.$store.dispatch('updateFlatNotation', { flatId: this.flat.id, newNotation: notation })
|
||||||
|
this.$store.dispatch('updateFlatStatus', { flatId: this.flat.id, newStatus: 'followed' })
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
handleHover (n) {
|
||||||
|
this.overloadNotation = n + 1
|
||||||
|
},
|
||||||
|
|
||||||
|
handleOut () {
|
||||||
|
this.overloadNotation = null
|
||||||
|
},
|
||||||
|
|
||||||
|
range: range
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<style scoped>
|
||||||
|
button {
|
||||||
|
border: none;
|
||||||
|
width: auto;
|
||||||
|
background-color: transparent;
|
||||||
|
}
|
||||||
|
</style>
|
@ -1,15 +1,15 @@
|
|||||||
<template>
|
<template>
|
||||||
<div @keydown="closeModal">
|
<div @keydown="closeModal">
|
||||||
<isotope ref="cpt" :options="isotopeOptions" v-images-loaded:on.progress="layout" :list="photos">
|
<isotope ref="cpt" :options="isotopeOptions" v-images-loaded:on.progress="layout" :list="photos">
|
||||||
<div v-for="(photo, index) in photos" :key="photo.url">
|
<div v-for="(photo, index) in photosURLOrLocal" :key="photo">
|
||||||
<img :src="photo.url" v-on:click="openModal(index)"/>
|
<img :src="photo" v-on:click="openModal(index)"/>
|
||||||
</div>
|
</div>
|
||||||
</isotope>
|
</isotope>
|
||||||
|
|
||||||
<div class="modal" ref="modal" :aria-label="$t('slider.Fullscreen_photo')" role="dialog">
|
<div class="modal" ref="modal" :aria-label="$t('slider.Fullscreen_photo')" role="dialog">
|
||||||
<span class="close"><button v-on:click="closeModal" :title="$t('common.Close')" :aria-label="$t('common.Close')">×</button></span>
|
<span class="close"><button v-on:click="closeModal" :title="$t('common.Close')" :aria-label="$t('common.Close')">×</button></span>
|
||||||
|
|
||||||
<img class="modal-content" :src="photos[modalImgIndex].url">
|
<img class="modal-content" :src="photosURLOrLocal[modalImgIndex]">
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</template>
|
</template>
|
||||||
@ -27,6 +27,17 @@ export default {
|
|||||||
isotope
|
isotope
|
||||||
},
|
},
|
||||||
|
|
||||||
|
computed: {
|
||||||
|
photosURLOrLocal () {
|
||||||
|
return this.photos.map(photo => {
|
||||||
|
if (photo.local) {
|
||||||
|
return `/data/img/${photo.local}`
|
||||||
|
}
|
||||||
|
return photo.url
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
created () {
|
created () {
|
||||||
window.addEventListener('keydown', event => {
|
window.addEventListener('keydown', event => {
|
||||||
if (!this.isModalOpen) {
|
if (!this.isModalOpen) {
|
||||||
|
@ -13,10 +13,14 @@ export default {
|
|||||||
'Close': 'Close',
|
'Close': 'Close',
|
||||||
'sortUp': 'Sort in ascending order',
|
'sortUp': 'Sort in ascending order',
|
||||||
'sortDown': 'Sort in descending order',
|
'sortDown': 'Sort in descending order',
|
||||||
'mins': 'min | mins'
|
'mins': 'min | mins',
|
||||||
|
'Unknown': 'Unknown',
|
||||||
|
'expired': 'expired'
|
||||||
},
|
},
|
||||||
home: {
|
home: {
|
||||||
'new_available_flats': 'New available flats'
|
'new_available_flats': 'New available flats',
|
||||||
|
'Last_update': 'Last update:',
|
||||||
|
'show_expired_flats': 'Show expired flats'
|
||||||
},
|
},
|
||||||
flatListing: {
|
flatListing: {
|
||||||
'no_available_flats': 'No available flats.',
|
'no_available_flats': 'No available flats.',
|
||||||
@ -38,12 +42,15 @@ export default {
|
|||||||
'utilities_included': '(utilities included)',
|
'utilities_included': '(utilities included)',
|
||||||
'utilities_excluded': '(utilities excluded)',
|
'utilities_excluded': '(utilities excluded)',
|
||||||
'Description': 'Description',
|
'Description': 'Description',
|
||||||
|
'First_posted': 'First posted',
|
||||||
'Details': 'Details',
|
'Details': 'Details',
|
||||||
'Metadata': 'Metadata',
|
'Metadata': 'Metadata',
|
||||||
'postal_code': 'Postal code',
|
'postal_code': 'Postal code',
|
||||||
'nearby_stations': 'Nearby stations',
|
'nearby_stations': 'Nearby stations',
|
||||||
'Times_to': 'Times to',
|
'Times_to': 'Times to',
|
||||||
'Location': 'Location',
|
'Location': 'Location',
|
||||||
|
'Notes': 'Notes',
|
||||||
|
'Save': 'Save',
|
||||||
'Contact': 'Contact',
|
'Contact': 'Contact',
|
||||||
'Visit': 'Visit',
|
'Visit': 'Visit',
|
||||||
'setDateOfVisit': 'Set date of visit',
|
'setDateOfVisit': 'Set date of visit',
|
||||||
|
75
flatisfy/web/js_src/i18n/fr/index.js
Normal file
75
flatisfy/web/js_src/i18n/fr/index.js
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
export default {
|
||||||
|
common: {
|
||||||
|
'flats': 'appartement | appartements',
|
||||||
|
'loading': 'Chargement…',
|
||||||
|
'Actions': 'Actions',
|
||||||
|
'More_about': 'Plus sur',
|
||||||
|
'Remove': 'Enlever',
|
||||||
|
'Restore': 'Remettre',
|
||||||
|
'Original_post': 'Annonce originale | Annonces originales',
|
||||||
|
'Original_post_for': 'Annonce originale pour',
|
||||||
|
'Follow': 'Suivre',
|
||||||
|
'Unfollow': 'Arrêter de suivre',
|
||||||
|
'Close': 'Fermer',
|
||||||
|
'sortUp': 'Trier par ordre croissant',
|
||||||
|
'sortDown': 'Trier par ordre décroissant',
|
||||||
|
'mins': 'min | mins',
|
||||||
|
'Unknown': 'Inconnu',
|
||||||
|
'expired': 'expiré'
|
||||||
|
},
|
||||||
|
home: {
|
||||||
|
'new_available_flats': 'Nouveaux appartements disponibles',
|
||||||
|
'Last_update': 'Dernière mise à jour :',
|
||||||
|
'show_expired_flats': 'Montrer les annonces expirées'
|
||||||
|
},
|
||||||
|
flatListing: {
|
||||||
|
'no_available_flats': 'Pas d\'appartement disponible.',
|
||||||
|
'no_matching_flats': 'Pas d\'appartement correspondant.'
|
||||||
|
},
|
||||||
|
menu: {
|
||||||
|
'available_flats': 'Appartements disponibles',
|
||||||
|
'followed_flats': 'Appartements suivis',
|
||||||
|
'by_status': 'Appartements par statut',
|
||||||
|
'search': 'Rechercher'
|
||||||
|
},
|
||||||
|
flatsDetails: {
|
||||||
|
'Notation': 'Note',
|
||||||
|
'Title': 'Titre',
|
||||||
|
'Area': 'Surface',
|
||||||
|
'Rooms': 'Pièces',
|
||||||
|
'Cost': 'Coût',
|
||||||
|
'SquareMeterCost': 'Coût / m²',
|
||||||
|
'utilities_included': '(charges comprises)',
|
||||||
|
'utilities_excluded': '(charges non comprises)',
|
||||||
|
'Description': 'Description',
|
||||||
|
'First_posted': 'Posté pour la première fois',
|
||||||
|
'Details': 'Détails',
|
||||||
|
'Metadata': 'Metadonnées',
|
||||||
|
'postal_code': 'Code postal',
|
||||||
|
'nearby_stations': 'Stations proches',
|
||||||
|
'Times_to': 'Temps jusqu\'à',
|
||||||
|
'Location': 'Localisation',
|
||||||
|
'Notes': 'Notes',
|
||||||
|
'Save': 'Sauvegarder',
|
||||||
|
'Contact': 'Contact',
|
||||||
|
'Visit': 'Visite',
|
||||||
|
'setDateOfVisit': 'Entrer une date de visite',
|
||||||
|
'no_phone_found': 'Pas de numéro de téléphone trouvé',
|
||||||
|
'rooms': 'pièce | pièces',
|
||||||
|
'bedrooms': 'chambre | chambres'
|
||||||
|
},
|
||||||
|
status: {
|
||||||
|
'new': 'nouveau',
|
||||||
|
'followed': 'suivi',
|
||||||
|
'ignored': 'ignoré',
|
||||||
|
'user_deleted': 'effacé',
|
||||||
|
'duplicate': 'en double'
|
||||||
|
},
|
||||||
|
slider: {
|
||||||
|
'Fullscreen_photo': 'Photo en plein écran'
|
||||||
|
},
|
||||||
|
search: {
|
||||||
|
'input_placeholder': 'Tapez n\'importe quoi à rechercher…',
|
||||||
|
'Search': 'Chercher !'
|
||||||
|
}
|
||||||
|
}
|
@ -1,8 +1,10 @@
|
|||||||
import Vue from 'vue'
|
import Vue from 'vue'
|
||||||
import VueI18n from 'vue-i18n'
|
import VueI18n from 'vue-i18n'
|
||||||
|
import moment from 'moment'
|
||||||
|
|
||||||
// Import translations
|
// Import translations
|
||||||
import en from './en'
|
import en from './en'
|
||||||
|
import fr from './fr'
|
||||||
|
|
||||||
Vue.use(VueI18n)
|
Vue.use(VueI18n)
|
||||||
|
|
||||||
@ -32,7 +34,8 @@ export function getBrowserLocales () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const messages = {
|
const messages = {
|
||||||
'en': en
|
'en': en,
|
||||||
|
'fr': fr
|
||||||
}
|
}
|
||||||
|
|
||||||
const locales = getBrowserLocales()
|
const locales = getBrowserLocales()
|
||||||
@ -46,6 +49,9 @@ for (var i = 0; i < locales.length; ++i) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Set the locale for Moment.js
|
||||||
|
moment.locale(locale)
|
||||||
|
|
||||||
export default new VueI18n({
|
export default new VueI18n({
|
||||||
locale: locale,
|
locale: locale,
|
||||||
messages
|
messages
|
||||||
|
@ -3,9 +3,12 @@ import Vue from 'vue'
|
|||||||
import i18n from './i18n'
|
import i18n from './i18n'
|
||||||
import router from './router'
|
import router from './router'
|
||||||
import store from './store'
|
import store from './store'
|
||||||
|
import { costFilter } from './tools'
|
||||||
|
|
||||||
import App from './components/app.vue'
|
import App from './components/app.vue'
|
||||||
|
|
||||||
|
Vue.filter('cost', costFilter)
|
||||||
|
|
||||||
new Vue({
|
new Vue({
|
||||||
i18n,
|
i18n,
|
||||||
router,
|
router,
|
||||||
|
@ -50,5 +50,11 @@ export default {
|
|||||||
api.doSearch(query, flats => {
|
api.doSearch(query, flats => {
|
||||||
commit(types.REPLACE_FLATS, { flats })
|
commit(types.REPLACE_FLATS, { flats })
|
||||||
})
|
})
|
||||||
|
},
|
||||||
|
getMetadata ({ commit }) {
|
||||||
|
commit(types.IS_LOADING)
|
||||||
|
api.getMetadata(metadata => {
|
||||||
|
commit(types.RECEIVE_METADATA, { metadata })
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,52 +1,67 @@
|
|||||||
import { findFlatGPS } from '../tools'
|
import { findFlatGPS, costFilter } from '../tools'
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
allFlats: state => state.flats,
|
allFlats: (state) => state.flats,
|
||||||
|
|
||||||
flat: (state, getters) => id => state.flats.find(flat => flat.id === id),
|
flat: (state, getters) => (id) =>
|
||||||
|
state.flats.find((flat) => flat.id === id),
|
||||||
|
|
||||||
isLoading: state => state.loading > 0,
|
isLoading: (state) => state.loading > 0,
|
||||||
|
|
||||||
postalCodesFlatsBuckets: (state, getters) => filter => {
|
inseeCodesFlatsBuckets: (state, getters) => (filter) => {
|
||||||
const postalCodeBuckets = {}
|
const buckets = {}
|
||||||
|
|
||||||
state.flats.forEach(flat => {
|
state.flats.forEach((flat) => {
|
||||||
if (!filter || filter(flat)) {
|
if (!filter || filter(flat)) {
|
||||||
const postalCode = flat.flatisfy_postal_code.postal_code
|
const insee = flat.flatisfy_postal_code.insee_code
|
||||||
if (!postalCodeBuckets[postalCode]) {
|
if (!buckets[insee]) {
|
||||||
postalCodeBuckets[postalCode] = {
|
buckets[insee] = {
|
||||||
'name': flat.flatisfy_postal_code.name,
|
name: flat.flatisfy_postal_code.name,
|
||||||
'flats': []
|
flats: []
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
postalCodeBuckets[postalCode].flats.push(flat)
|
buckets[insee].flats.push(flat)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
return postalCodeBuckets
|
return buckets
|
||||||
},
|
},
|
||||||
|
|
||||||
flatsMarkers: (state, getters) => (router, filter) => {
|
flatsMarkers: (state, getters) => (router, filter) => {
|
||||||
const markers = []
|
const markers = []
|
||||||
state.flats.forEach(flat => {
|
state.flats.forEach((flat) => {
|
||||||
if (filter && filter(flat)) {
|
if (filter && filter(flat)) {
|
||||||
const gps = findFlatGPS(flat)
|
const gps = findFlatGPS(flat)
|
||||||
|
|
||||||
if (gps) {
|
if (gps) {
|
||||||
const previousMarkerIndex = markers.findIndex(
|
const previousMarker = markers.find(
|
||||||
marker => marker.gps[0] === gps[0] && marker.gps[1] === gps[1]
|
(marker) =>
|
||||||
|
marker.gps[0] === gps[0] && marker.gps[1] === gps[1]
|
||||||
)
|
)
|
||||||
|
if (previousMarker) {
|
||||||
const href = router.resolve({ name: 'details', params: { id: flat.id }}).href
|
// randomize position a bit
|
||||||
if (previousMarkerIndex !== -1) {
|
// gps[0] += (Math.random() - 0.5) / 500
|
||||||
markers[previousMarkerIndex].content += '<br/><a href="' + href + '">' + flat.title + '</a>'
|
// gps[1] += (Math.random() - 0.5) / 500
|
||||||
} else {
|
|
||||||
markers.push({
|
|
||||||
'title': '',
|
|
||||||
'content': '<a href="' + href + '">' + flat.title + '</a>',
|
|
||||||
'gps': gps
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
const href = router.resolve({
|
||||||
|
name: 'details',
|
||||||
|
params: { id: flat.id }
|
||||||
|
}).href
|
||||||
|
const cost = flat.cost
|
||||||
|
? costFilter(flat.cost, flat.currency)
|
||||||
|
: ''
|
||||||
|
markers.push({
|
||||||
|
title: '',
|
||||||
|
content:
|
||||||
|
'<a href="' +
|
||||||
|
href +
|
||||||
|
'">' +
|
||||||
|
flat.title +
|
||||||
|
'</a>' +
|
||||||
|
cost,
|
||||||
|
gps: gps,
|
||||||
|
flatId: flat.id
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@ -54,11 +69,11 @@ export default {
|
|||||||
return markers
|
return markers
|
||||||
},
|
},
|
||||||
|
|
||||||
allTimeToPlaces: state => {
|
allTimeToPlaces: (state) => {
|
||||||
const places = {}
|
const places = {}
|
||||||
Object.keys(state.timeToPlaces).forEach(constraint => {
|
Object.keys(state.timeToPlaces).forEach((constraint) => {
|
||||||
const constraintTimeToPlaces = state.timeToPlaces[constraint]
|
const constraintTimeToPlaces = state.timeToPlaces[constraint]
|
||||||
Object.keys(constraintTimeToPlaces).forEach(name => {
|
Object.keys(constraintTimeToPlaces).forEach((name) => {
|
||||||
places[name] = constraintTimeToPlaces[name]
|
places[name] = constraintTimeToPlaces[name]
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -67,5 +82,7 @@ export default {
|
|||||||
|
|
||||||
timeToPlaces: (state, getters) => (constraintName) => {
|
timeToPlaces: (state, getters) => (constraintName) => {
|
||||||
return state.timeToPlaces[constraintName]
|
return state.timeToPlaces[constraintName]
|
||||||
}
|
},
|
||||||
|
|
||||||
|
metadata: (state) => state.metadata
|
||||||
}
|
}
|
||||||
|
@ -5,4 +5,5 @@ export const UPDATE_FLAT_NOTES = 'UPDATE_FLAT_NOTES'
|
|||||||
export const UPDATE_FLAT_NOTATION = 'UPDATE_FLAT_NOTATION'
|
export const UPDATE_FLAT_NOTATION = 'UPDATE_FLAT_NOTATION'
|
||||||
export const UPDATE_FLAT_VISIT_DATE = 'UPDATE_FLAT_VISIT_DATE'
|
export const UPDATE_FLAT_VISIT_DATE = 'UPDATE_FLAT_VISIT_DATE'
|
||||||
export const RECEIVE_TIME_TO_PLACES = 'RECEIVE_TIME_TO_PLACES'
|
export const RECEIVE_TIME_TO_PLACES = 'RECEIVE_TIME_TO_PLACES'
|
||||||
|
export const RECEIVE_METADATA = 'RECEIVE_METADATA'
|
||||||
export const IS_LOADING = 'IS_LOADING'
|
export const IS_LOADING = 'IS_LOADING'
|
||||||
|
@ -5,6 +5,7 @@ import * as types from './mutations-types'
|
|||||||
export const state = {
|
export const state = {
|
||||||
flats: [],
|
flats: [],
|
||||||
timeToPlaces: [],
|
timeToPlaces: [],
|
||||||
|
metadata: [],
|
||||||
loading: 0
|
loading: 0
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -58,6 +59,10 @@ export const mutations = {
|
|||||||
state.timeToPlaces = timeToPlaces
|
state.timeToPlaces = timeToPlaces
|
||||||
state.loading -= 1
|
state.loading -= 1
|
||||||
},
|
},
|
||||||
|
[types.RECEIVE_METADATA] (state, { metadata }) {
|
||||||
|
state.metadata = metadata
|
||||||
|
state.loading -= 1
|
||||||
|
},
|
||||||
[types.IS_LOADING] (state) {
|
[types.IS_LOADING] (state) {
|
||||||
state.loading += 1
|
state.loading += 1
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
export function findFlatGPS (flat) {
|
export function findFlatGPS (flat) {
|
||||||
let gps
|
let gps
|
||||||
|
|
||||||
// Try to push a marker based on stations
|
if (flat.flatisfy_position) {
|
||||||
if (flat.flatisfy_stations && flat.flatisfy_stations.length > 0) {
|
gps = [flat.flatisfy_position.lat, flat.flatisfy_position.lng]
|
||||||
|
} else if (flat.flatisfy_stations && flat.flatisfy_stations.length > 0) {
|
||||||
|
// Try to push a marker based on stations
|
||||||
gps = [0.0, 0.0]
|
gps = [0.0, 0.0]
|
||||||
flat.flatisfy_stations.forEach(station => {
|
flat.flatisfy_stations.forEach(station => {
|
||||||
gps = [gps[0] + station.gps[0], gps[1] + station.gps[1]]
|
gps = [gps[0] + station.gps[0], gps[1] + station.gps[1]]
|
||||||
@ -23,3 +25,18 @@ export function capitalize (string) {
|
|||||||
export function range (n) {
|
export function range (n) {
|
||||||
return [...Array(n).keys()]
|
return [...Array(n).keys()]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function costFilter (value, currency) {
|
||||||
|
if (!value) {
|
||||||
|
return 'N/A'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (currency === 'EUR') {
|
||||||
|
currency = ' €'
|
||||||
|
}
|
||||||
|
|
||||||
|
var valueStr = value.toString()
|
||||||
|
valueStr = ' '.repeat((3 + valueStr.length) % 3) + valueStr
|
||||||
|
|
||||||
|
return valueStr.match(/.{1,3}/g).join('.') + currency
|
||||||
|
}
|
||||||
|
@ -3,216 +3,18 @@
|
|||||||
<template v-if="isLoading">
|
<template v-if="isLoading">
|
||||||
<p>{{ $t("common.loading") }}</p>
|
<p>{{ $t("common.loading") }}</p>
|
||||||
</template>
|
</template>
|
||||||
<div class="grid" v-else-if="flat && timeToPlaces">
|
<Flat :flat="flat"></Flat>
|
||||||
<div class="left-panel">
|
|
||||||
<h2>
|
|
||||||
(<!--
|
|
||||||
--><router-link :to="{ name: 'status', params: { status: flat.status }}"><!--
|
|
||||||
-->{{ flat.status ? capitalize($t("status." + flat.status)) : '' }}<!--
|
|
||||||
--></router-link><!--
|
|
||||||
-->) {{ flat.title }} [{{ flat.id.split("@")[1] }}]
|
|
||||||
</h2>
|
|
||||||
<div class="grid">
|
|
||||||
<div class="left-panel">
|
|
||||||
<p>
|
|
||||||
{{ flat.cost }} {{ flat.currency }}
|
|
||||||
<template v-if="flat.utilities === 'included'">
|
|
||||||
{{ $t("flatsDetails.utilities_included") }}
|
|
||||||
</template>
|
|
||||||
<template v-else-if="flat.utilities === 'excluded'">
|
|
||||||
{{ $t("flatsDetails.utilities_excluded") }}
|
|
||||||
</template>
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<p class="right-panel right">
|
|
||||||
{{ flat.area ? flat.area : '?' }} m<sup>2</sup>,
|
|
||||||
{{ flat.rooms ? flat.rooms : '?' }} {{ $tc("flatsDetails.rooms", flat.rooms) }} /
|
|
||||||
{{ flat.bedrooms ? flat.bedrooms : '?' }} {{ $tc("flatsDetails.bedrooms", flat.bedrooms) }}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<template v-if="flat.photos && flat.photos.length > 0">
|
|
||||||
<Slider :photos="flat.photos"></Slider>
|
|
||||||
</template>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<h3>{{ $t("flatsDetails.Description") }}</h3>
|
|
||||||
<p>{{ flat.text }}</p>
|
|
||||||
<p class="right">{{ flat.location }}</p>
|
|
||||||
<p>First posted {{ flat.date ? flat.date.fromNow() : '?' }}.</p>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<h3>{{ $t("flatsDetails.Details") }}</h3>
|
|
||||||
<table>
|
|
||||||
<tr v-for="(value, key) in flat.details">
|
|
||||||
<th>{{ key }}</th>
|
|
||||||
<td>{{ value }}</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<h3>{{ $t("flatsDetails.Metadata") }}</h3>
|
|
||||||
<table>
|
|
||||||
<tr>
|
|
||||||
<th>
|
|
||||||
{{ $t("flatsDetails.postal_code") }}
|
|
||||||
</th>
|
|
||||||
<td>
|
|
||||||
<template v-if="flat.flatisfy_postal_code.postal_code">
|
|
||||||
{{ flat.flatisfy_postal_code.name }} ({{ flat.flatisfy_postal_code.postal_code }})
|
|
||||||
</template>
|
|
||||||
<template v-else>
|
|
||||||
?
|
|
||||||
</template>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
|
|
||||||
<tr>
|
|
||||||
<th>
|
|
||||||
{{ $t("flatsDetails.nearby_stations") }}
|
|
||||||
</th>
|
|
||||||
<td>
|
|
||||||
<template v-if="displayedStations">
|
|
||||||
{{ displayedStations }}
|
|
||||||
</template>
|
|
||||||
<template v-else>
|
|
||||||
?
|
|
||||||
</template>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<th>
|
|
||||||
{{ $t("flatsDetails.Times_to") }}
|
|
||||||
</th>
|
|
||||||
<td>
|
|
||||||
<template v-if="Object.keys(flat.flatisfy_time_to).length">
|
|
||||||
<ul class="time_to_list">
|
|
||||||
<li v-for="(time_to, place) in flat.flatisfy_time_to" :key="place">
|
|
||||||
{{ place }}: {{ humanizeTimeTo(time_to["time"]) }}
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</template>
|
|
||||||
<template v-else>
|
|
||||||
?
|
|
||||||
</template>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<th>
|
|
||||||
{{ $t("flatsDetails.SquareMeterCost") }}
|
|
||||||
</th>
|
|
||||||
<td>
|
|
||||||
{{ flat.sqCost }} {{ flat.currency }}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<h3>{{ $t("flatsDetails.Location") }}</h3>
|
|
||||||
|
|
||||||
<FlatsMap :flats="flatMarkers" :places="timeToPlaces" :journeys="journeys"></FlatsMap>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<h3>Notes</h3>
|
|
||||||
|
|
||||||
<form v-on:submit="updateFlatNotes">
|
|
||||||
<textarea ref="notesTextarea" rows="10">{{ flat.notes }}</textarea>
|
|
||||||
<p class="right"><input type="submit" value="Save"/></p>
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="right-panel">
|
|
||||||
<h3>{{ $t("flatsDetails.Contact") }}</h3>
|
|
||||||
<div class="contact">
|
|
||||||
<p>
|
|
||||||
<template v-if="flat.phone">
|
|
||||||
<template v-for="phoneNumber in flat.phone.split(',')">
|
|
||||||
<a :href="'tel:+33' + normalizePhoneNumber(phoneNumber)">{{ phoneNumber }}</a><br/>
|
|
||||||
</template>
|
|
||||||
</template>
|
|
||||||
<template v-else>
|
|
||||||
{{ $t("flatsDetails.no_phone_found") }}
|
|
||||||
</template>
|
|
||||||
</p>
|
|
||||||
<p>{{ $tc("common.Original_post", 42) }}
|
|
||||||
<ul>
|
|
||||||
<li v-for="(url, index) in flat.urls">
|
|
||||||
<a :href="url">
|
|
||||||
{{ $tc("common.Original_post", 1) }} {{ index + 1 }}
|
|
||||||
<i class="fa fa-external-link" aria-hidden="true"></i>
|
|
||||||
</a>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<h3>{{ $t("flatsDetails.Visit") }}</h3>
|
|
||||||
<div class="visit">
|
|
||||||
<flat-pickr
|
|
||||||
:value="flatpickrValue"
|
|
||||||
:config="flatpickrConfig"
|
|
||||||
:placeholder="$t('flatsDetails.setDateOfVisit')"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<h3>{{ $t("common.Actions") }}</h3>
|
|
||||||
|
|
||||||
<nav>
|
|
||||||
<ul>
|
|
||||||
<template v-if="flat.status !== 'user_deleted'">
|
|
||||||
<li ref="notationButton">
|
|
||||||
<template v-for="n in range(notation)">
|
|
||||||
<button class="btnIcon" v-on:mouseover="handleNotationHover(n)" v-on:mouseout="handleNotationOut()" v-on:click="updateFlatNotation(n)">
|
|
||||||
<i class="fa fa-star" aria-hidden="true"></i>
|
|
||||||
</button>
|
|
||||||
</template>
|
|
||||||
<template v-for="n in range(5 - notation)">
|
|
||||||
<button class="btnIcon" v-on:mouseover="handleNotationHover(notation + n)" v-on:mouseout="handleNotationOut()" v-on:click="updateFlatNotation(notation + n)">
|
|
||||||
<i class="fa fa-star-o" aria-hidden="true"></i>
|
|
||||||
</button>
|
|
||||||
</template>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<button v-on:click="updateFlatStatus('user_deleted')" class="fullButton">
|
|
||||||
<i class="fa fa-trash" aria-hidden="true"></i>
|
|
||||||
{{ $t("common.Remove") }}
|
|
||||||
</button>
|
|
||||||
</li>
|
|
||||||
</template>
|
|
||||||
<template v-else>
|
|
||||||
<li>
|
|
||||||
<button v-on:click="updateFlatStatus('new')" class="fullButton">
|
|
||||||
<i class="fa fa-undo" aria-hidden="true"></i>
|
|
||||||
{{ $t("common.Restore") }}
|
|
||||||
</button>
|
|
||||||
</li>
|
|
||||||
</template>
|
|
||||||
</ul>
|
|
||||||
</nav>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
import flatPickr from 'vue-flatpickr-component'
|
|
||||||
import moment from 'moment'
|
|
||||||
import 'font-awesome-webpack'
|
|
||||||
import 'flatpickr/dist/flatpickr.css'
|
|
||||||
|
|
||||||
import FlatsMap from '../components/flatsmap.vue'
|
import Flat from '../components/flat.vue'
|
||||||
import Slider from '../components/slider.vue'
|
|
||||||
|
|
||||||
import { capitalize, range } from '../tools'
|
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
components: {
|
components: {
|
||||||
FlatsMap,
|
Flat
|
||||||
Slider,
|
|
||||||
flatPickr
|
|
||||||
},
|
},
|
||||||
|
|
||||||
created () {
|
created () {
|
||||||
document.title = this.title // Set title
|
document.title = this.title // Set title
|
||||||
|
|
||||||
@ -231,20 +33,6 @@ export default {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
data () {
|
|
||||||
return {
|
|
||||||
// TODO: Flatpickr locale
|
|
||||||
'overloadNotation': null,
|
|
||||||
'flatpickrConfig': {
|
|
||||||
static: true,
|
|
||||||
altFormat: 'h:i K, M j, Y',
|
|
||||||
altInput: true,
|
|
||||||
enableTime: true,
|
|
||||||
onChange: selectedDates => this.updateFlatVisitDate(selectedDates.length > 0 ? selectedDates[0] : null)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
computed: {
|
computed: {
|
||||||
isLoading () {
|
isLoading () {
|
||||||
return this.$store.getters.isLoading
|
return this.$store.getters.isLoading
|
||||||
@ -252,199 +40,16 @@ export default {
|
|||||||
title () {
|
title () {
|
||||||
return 'Flatisfy - ' + this.$route.params.id
|
return 'Flatisfy - ' + this.$route.params.id
|
||||||
},
|
},
|
||||||
flatMarkers () {
|
|
||||||
return this.$store.getters.flatsMarkers(this.$router, flat => flat.id === this.$route.params.id)
|
|
||||||
},
|
|
||||||
flat () {
|
flat () {
|
||||||
return this.$store.getters.flat(this.$route.params.id)
|
return this.$store.getters.flat(this.$route.params.id)
|
||||||
},
|
|
||||||
'flatpickrValue' () {
|
|
||||||
if (this.flat && this.flat.visit_date) {
|
|
||||||
return this.flat.visit_date.local().format()
|
|
||||||
}
|
|
||||||
return null
|
|
||||||
},
|
|
||||||
timeToPlaces () {
|
|
||||||
return this.$store.getters.timeToPlaces(this.flat.flatisfy_constraint)
|
|
||||||
},
|
|
||||||
notation () {
|
|
||||||
if (this.overloadNotation) {
|
|
||||||
return this.overloadNotation
|
|
||||||
}
|
|
||||||
return this.flat.notation
|
|
||||||
},
|
|
||||||
journeys () {
|
|
||||||
if (Object.keys(this.flat.flatisfy_time_to).length > 0) {
|
|
||||||
const journeys = []
|
|
||||||
for (const place in this.flat.flatisfy_time_to) {
|
|
||||||
this.flat.flatisfy_time_to[place].sections.forEach(
|
|
||||||
section => journeys.push({
|
|
||||||
geojson: section.geojson,
|
|
||||||
options: {
|
|
||||||
color: section.color ? ('#' + section.color) : '#2196f3',
|
|
||||||
dashArray: section.color ? 'none' : '2, 10'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return journeys
|
|
||||||
}
|
|
||||||
return []
|
|
||||||
},
|
|
||||||
displayedStations () {
|
|
||||||
if (this.flat.flatisfy_stations.length > 0) {
|
|
||||||
const stationsNames = this.flat.flatisfy_stations.map(station => station.name)
|
|
||||||
return stationsNames.join(', ')
|
|
||||||
} else {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
methods: {
|
methods: {
|
||||||
fetchData () {
|
fetchData () {
|
||||||
this.$store.dispatch('getFlat', { flatId: this.$route.params.id })
|
this.$store.dispatch('getFlat', { flatId: this.$route.params.id })
|
||||||
this.$store.dispatch('getAllTimeToPlaces')
|
}
|
||||||
},
|
|
||||||
|
|
||||||
updateFlatNotation (notation) {
|
|
||||||
notation = notation + 1
|
|
||||||
|
|
||||||
if (notation === this.flat.notation) {
|
|
||||||
this.$store.dispatch('updateFlatNotation', { flatId: this.$route.params.id, newNotation: 0 })
|
|
||||||
this.$store.dispatch('updateFlatStatus', { flatId: this.$route.params.id, newStatus: 'new' })
|
|
||||||
} else {
|
|
||||||
this.$store.dispatch('updateFlatNotation', { flatId: this.$route.params.id, newNotation: notation })
|
|
||||||
this.$store.dispatch('updateFlatStatus', { flatId: this.$route.params.id, newStatus: 'followed' })
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
updateFlatStatus (status) {
|
|
||||||
this.$store.dispatch('updateFlatStatus', { flatId: this.$route.params.id, newStatus: status })
|
|
||||||
},
|
|
||||||
|
|
||||||
updateFlatNotes () {
|
|
||||||
const notes = this.$refs.notesTextarea.value
|
|
||||||
this.$store.dispatch(
|
|
||||||
'updateFlatNotes',
|
|
||||||
{ flatId: this.$route.params.id, newNotes: notes }
|
|
||||||
)
|
|
||||||
},
|
|
||||||
|
|
||||||
updateFlatVisitDate (date) {
|
|
||||||
if (date) {
|
|
||||||
date = moment(date).utc().format()
|
|
||||||
}
|
|
||||||
this.$store.dispatch(
|
|
||||||
'updateFlatVisitDate',
|
|
||||||
{ flatId: this.$route.params.id, newVisitDate: date }
|
|
||||||
)
|
|
||||||
},
|
|
||||||
|
|
||||||
humanizeTimeTo (time) {
|
|
||||||
const minutes = Math.floor(time.as('minutes'))
|
|
||||||
return minutes + ' ' + this.$tc('common.mins', minutes)
|
|
||||||
},
|
|
||||||
|
|
||||||
handleNotationHover (n) {
|
|
||||||
this.overloadNotation = n + 1
|
|
||||||
},
|
|
||||||
|
|
||||||
handleNotationOut () {
|
|
||||||
this.overloadNotation = null
|
|
||||||
},
|
|
||||||
|
|
||||||
normalizePhoneNumber (phoneNumber) {
|
|
||||||
phoneNumber = phoneNumber.replace(/ /g, '')
|
|
||||||
phoneNumber = phoneNumber.replace(/\./g, '')
|
|
||||||
return phoneNumber
|
|
||||||
},
|
|
||||||
|
|
||||||
capitalize: capitalize,
|
|
||||||
|
|
||||||
range: range
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
@media screen and (min-width: 768px) {
|
|
||||||
.grid {
|
|
||||||
display: grid;
|
|
||||||
grid-gap: 50px;
|
|
||||||
grid-template-columns: 75fr 25fr;
|
|
||||||
}
|
|
||||||
|
|
||||||
.left-panel {
|
|
||||||
grid-column: 1;
|
|
||||||
grid-row: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.right-panel {
|
|
||||||
grid-column: 2;
|
|
||||||
grid-row: 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.left-panel textarea {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.right {
|
|
||||||
text-align: right;
|
|
||||||
}
|
|
||||||
|
|
||||||
nav ul {
|
|
||||||
list-style-type: none;
|
|
||||||
padding-left: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.contact {
|
|
||||||
padding-left: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.right-panel li {
|
|
||||||
margin-bottom: 1em;
|
|
||||||
margin-top: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
button {
|
|
||||||
cursor: pointer;
|
|
||||||
width: 75%;
|
|
||||||
padding: 0.3em;
|
|
||||||
font-size: 0.9em;
|
|
||||||
}
|
|
||||||
|
|
||||||
table {
|
|
||||||
table-layout: fixed;
|
|
||||||
}
|
|
||||||
|
|
||||||
td {
|
|
||||||
word-wrap: break-word;
|
|
||||||
word-break: break-all;
|
|
||||||
white-space: normal;
|
|
||||||
}
|
|
||||||
|
|
||||||
.time_to_list {
|
|
||||||
margin: 0;
|
|
||||||
padding-left: 0;
|
|
||||||
list-style-position: outside;
|
|
||||||
list-style-type: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btnIcon {
|
|
||||||
border: none;
|
|
||||||
width: auto;
|
|
||||||
background-color: transparent;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media screen and (max-width: 767px) {
|
|
||||||
.right-panel nav {
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.fullButton {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
|
@ -1,32 +1,56 @@
|
|||||||
<template>
|
<template>
|
||||||
<div>
|
<div class="flex-row">
|
||||||
<FlatsMap :flats="flatsMarkers" :places="timeToPlaces"></FlatsMap>
|
<div class="flex" style="overflow: auto;">
|
||||||
|
<FlatsMap :flats="flatsMarkers" :places="timeToPlaces" v-on:select-flat="selectFlat($event)"></FlatsMap>
|
||||||
<h2>{{ $t("home.new_available_flats") }}</h2>
|
<h2>
|
||||||
|
{{ $t("home.new_available_flats") }}
|
||||||
<template v-if="Object.keys(postalCodesFlatsBuckets).length > 0">
|
<template v-if="lastUpdate">
|
||||||
<template v-for="(postal_code_data, postal_code) in postalCodesFlatsBuckets">
|
<label class="show-last-update">
|
||||||
<h3>{{ postal_code_data.name }} ({{ postal_code }}) - {{ postal_code_data.flats.length }} {{ $tc("common.flats", postal_code_data.flats.length) }}</h3>
|
{{ $t("home.Last_update") }} {{ lastUpdate.fromNow() }}
|
||||||
<FlatsTable :flats="postal_code_data.flats"></FlatsTable>
|
</label>
|
||||||
</template>
|
</template>
|
||||||
</template>
|
<label class="show-expired-flats-label">
|
||||||
<template v-else-if="isLoading">
|
<input type="checkbox" class="show-expired-flats-checkbox" v-model="showExpiredFlats" />
|
||||||
<p>{{ $t("common.loading") }}</p>
|
{{ $t("home.show_expired_flats") }}
|
||||||
</template>
|
</label>
|
||||||
<template v-else>
|
</h2>
|
||||||
<p>{{ $t("flatListing.no_available_flats") }}</p>
|
|
||||||
</template>
|
<template v-if="Object.keys(inseeCodesFlatsBuckets).length > 0">
|
||||||
|
<template v-for="(insee_code_data, insee_code) in inseeCodesFlatsBuckets">
|
||||||
|
<h3>
|
||||||
|
{{ insee_code_data.name || $t('common.Unknown') }}
|
||||||
|
<span v-if="insee_code !== 'undefined'">
|
||||||
|
({{ insee_code }})
|
||||||
|
</span>
|
||||||
|
- {{ insee_code_data.flats.length }} {{ $tc("common.flats", insee_code_data.flats.length) }}
|
||||||
|
</h3>
|
||||||
|
<FlatsTable :flats="insee_code_data.flats" :key="insee_code"></FlatsTable>
|
||||||
|
</template>
|
||||||
|
</template>
|
||||||
|
<template v-else-if="isLoading">
|
||||||
|
<p>{{ $t("common.loading") }}</p>
|
||||||
|
</template>
|
||||||
|
<template v-else>
|
||||||
|
<p>{{ $t("flatListing.no_available_flats") }}</p>
|
||||||
|
</template>
|
||||||
|
</div>
|
||||||
|
<div v-if="selectedFlat" class="flex">
|
||||||
|
<Flat :flat="selectedFlat"></Flat>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
import FlatsMap from '../components/flatsmap.vue'
|
import FlatsMap from '../components/flatsmap.vue'
|
||||||
import FlatsTable from '../components/flatstable.vue'
|
import FlatsTable from '../components/flatstable.vue'
|
||||||
|
import Flat from '../components/flat.vue'
|
||||||
|
import moment from 'moment'
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
components: {
|
components: {
|
||||||
FlatsMap,
|
FlatsMap,
|
||||||
FlatsTable
|
FlatsTable,
|
||||||
|
Flat
|
||||||
},
|
},
|
||||||
|
|
||||||
created () {
|
created () {
|
||||||
@ -36,21 +60,83 @@ export default {
|
|||||||
this.$store.dispatch('getAllFlats')
|
this.$store.dispatch('getAllFlats')
|
||||||
// Fetch time to places when the component is created
|
// Fetch time to places when the component is created
|
||||||
this.$store.dispatch('getAllTimeToPlaces')
|
this.$store.dispatch('getAllTimeToPlaces')
|
||||||
|
// Fetch application metadata when the component is created
|
||||||
|
this.$store.dispatch('getMetadata')
|
||||||
|
},
|
||||||
|
|
||||||
|
data () {
|
||||||
|
return {
|
||||||
|
showExpiredFlats: false,
|
||||||
|
selectedFlat: undefined
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
methods: {
|
||||||
|
selectFlat: async function (flatId) {
|
||||||
|
if (flatId) {
|
||||||
|
await this.$store.dispatch('getFlat', { flatId })
|
||||||
|
this.selectedFlat = await this.$store.getters.flat(flatId)
|
||||||
|
} else {
|
||||||
|
this.selectedFlat = undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
computed: {
|
computed: {
|
||||||
postalCodesFlatsBuckets () {
|
inseeCodesFlatsBuckets () {
|
||||||
return this.$store.getters.postalCodesFlatsBuckets(flat => flat.status === 'new')
|
return this.$store.getters.inseeCodesFlatsBuckets(flat =>
|
||||||
|
flat.status === 'new' &&
|
||||||
|
(this.showExpiredFlats || !flat.is_expired)
|
||||||
|
)
|
||||||
},
|
},
|
||||||
flatsMarkers () {
|
flatsMarkers () {
|
||||||
return this.$store.getters.flatsMarkers(this.$router, flat => flat.status === 'new')
|
return this.$store.getters.flatsMarkers(this.$router, flat =>
|
||||||
|
flat.status === 'new' &&
|
||||||
|
(this.showExpiredFlats || !flat.is_expired)
|
||||||
|
)
|
||||||
},
|
},
|
||||||
timeToPlaces () {
|
timeToPlaces () {
|
||||||
return this.$store.getters.allTimeToPlaces
|
return this.$store.getters.allTimeToPlaces
|
||||||
},
|
},
|
||||||
|
lastUpdate () {
|
||||||
|
var metadata = this.$store.getters.metadata
|
||||||
|
var lastUpdateDate = moment.unix(metadata['last_update'])
|
||||||
|
if (!lastUpdateDate.isValid()) {
|
||||||
|
lastUpdateDate = 0
|
||||||
|
}
|
||||||
|
return lastUpdateDate
|
||||||
|
},
|
||||||
isLoading () {
|
isLoading () {
|
||||||
return this.$store.getters.isLoading
|
return this.$store.getters.isLoading
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
<style scoped>
|
||||||
|
h2 {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
}
|
||||||
|
.flex-row {
|
||||||
|
display:flex;
|
||||||
|
}
|
||||||
|
.flex {
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
table {
|
||||||
|
margin-left: 0;
|
||||||
|
margin-right: 0;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.show-expired-flats-label {
|
||||||
|
font-weight: initial;
|
||||||
|
font-size: initial;
|
||||||
|
}
|
||||||
|
|
||||||
|
.show-last-update {
|
||||||
|
font-weight: initial;
|
||||||
|
font-size: initial;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
@ -12,10 +12,10 @@
|
|||||||
<template v-if="isLoading">
|
<template v-if="isLoading">
|
||||||
<p>{{ $t("common.loading") }}</p>
|
<p>{{ $t("common.loading") }}</p>
|
||||||
</template>
|
</template>
|
||||||
<template v-else-if="Object.keys(postalCodesFlatsBuckets).length > 0">
|
<template v-else-if="Object.keys(inseeCodesFlatsBuckets).length > 0">
|
||||||
<template v-for="(postal_code_data, postal_code) in postalCodesFlatsBuckets">
|
<template v-for="(insee_code_data, insee_code) in inseeCodesFlatsBuckets">
|
||||||
<h3>{{ postal_code_data.name }} ({{ postal_code }}) - {{ postal_code_data.flats.length }} {{ $tc("common.flats", postal_code_data.flats.length) }}</h3>
|
<h3>{{ insee_code_data.name }} ({{ insee_code }}) - {{ insee_code_data.flats.length }} {{ $tc("common.flats", insee_code_data.flats.length) }}</h3>
|
||||||
<FlatsTable :flats="postal_code_data.flats"></FlatsTable>
|
<FlatsTable :flats="insee_code_data.flats"></FlatsTable>
|
||||||
</template>
|
</template>
|
||||||
</template>
|
</template>
|
||||||
<template v-else>
|
<template v-else>
|
||||||
@ -51,12 +51,12 @@ export default {
|
|||||||
},
|
},
|
||||||
|
|
||||||
computed: {
|
computed: {
|
||||||
postalCodesFlatsBuckets () {
|
inseeCodesFlatsBuckets () {
|
||||||
if (!this.$route.query.query || this.loading) {
|
if (!this.$route.query.query || this.loading) {
|
||||||
return {}
|
return {}
|
||||||
}
|
}
|
||||||
|
|
||||||
return this.$store.getters.postalCodesFlatsBuckets(
|
return this.$store.getters.inseeCodesFlatsBuckets(
|
||||||
flat => flat.status !== 'duplicate' && flat.status !== 'ignored' && flat.status !== 'user_deleted'
|
flat => flat.status !== 'duplicate' && flat.status !== 'ignored' && flat.status !== 'user_deleted'
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
@ -17,11 +17,11 @@
|
|||||||
<template v-if="isLoading">
|
<template v-if="isLoading">
|
||||||
<p>{{ $t("common.loading") }}</p>
|
<p>{{ $t("common.loading") }}</p>
|
||||||
</template>
|
</template>
|
||||||
<template v-else-if="Object.keys(postalCodesFlatsBuckets).length">
|
<template v-else-if="Object.keys(inseeCodesFlatsBuckets).length">
|
||||||
<template v-for="(postal_code_data, postal_code) in postalCodesFlatsBuckets">
|
<template v-for="(insee_code_data, insee_code) in inseeCodesFlatsBuckets">
|
||||||
<h3>{{ postal_code_data.name }} ({{ postal_code }}) - {{ postal_code_data.flats.length }} {{ $tc("common.flats", postal_code_data.flats.length) }}</h3>
|
<h3>{{ insee_code_data.name }} ({{ insee_code }}) - {{ insee_code_data.flats.length }} {{ $tc("common.flats", insee_code_data.flats.length) }}</h3>
|
||||||
<FlatsTable
|
<FlatsTable
|
||||||
:flats="postal_code_data.flats"
|
:flats="insee_code_data.flats"
|
||||||
:showNotationColumn="$route.params.status === 'followed'"
|
:showNotationColumn="$route.params.status === 'followed'"
|
||||||
:showNotes="$route.params.status === 'followed'"
|
:showNotes="$route.params.status === 'followed'"
|
||||||
:initialSortBy="$route.params.status === 'followed' ? 'notation' : undefined"
|
:initialSortBy="$route.params.status === 'followed' ? 'notation' : undefined"
|
||||||
@ -81,8 +81,8 @@ export default {
|
|||||||
},
|
},
|
||||||
|
|
||||||
computed: {
|
computed: {
|
||||||
postalCodesFlatsBuckets () {
|
inseeCodesFlatsBuckets () {
|
||||||
return this.$store.getters.postalCodesFlatsBuckets(flat => flat.status === this.$route.params.status)
|
return this.$store.getters.inseeCodesFlatsBuckets(flat => flat.status === this.$route.params.status)
|
||||||
},
|
},
|
||||||
title () {
|
title () {
|
||||||
return 'Flatisfy - ' + capitalize(this.$t('status.' + this.$route.params.status))
|
return 'Flatisfy - ' + capitalize(this.$t('status.' + this.$route.params.status))
|
||||||
|
@ -2,21 +2,23 @@
|
|||||||
"""
|
"""
|
||||||
This module contains the definition of the web app API routes.
|
This module contains the definition of the web app API routes.
|
||||||
"""
|
"""
|
||||||
from __future__ import (
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
absolute_import, division, print_function, unicode_literals
|
|
||||||
)
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import re
|
import re
|
||||||
|
import os
|
||||||
|
|
||||||
import bottle
|
import bottle
|
||||||
import vobject
|
import vobject
|
||||||
|
|
||||||
import flatisfy.data
|
import flatisfy.data
|
||||||
from flatisfy.models import flat as flat_model
|
from flatisfy.models import flat as flat_model
|
||||||
|
from flatisfy.models import postal_code
|
||||||
from flatisfy.models.postal_code import PostalCode
|
from flatisfy.models.postal_code import PostalCode
|
||||||
|
from flatisfy import cmds
|
||||||
|
|
||||||
FILTER_RE = re.compile(r"filter\[([A-z0-9_]+)\]")
|
FILTER_RE = re.compile(r"filter\[([A-z0-9_]+)\]")
|
||||||
|
|
||||||
@ -59,26 +61,24 @@ def _JSONApiSpec(query, model, default_sorting=None):
|
|||||||
# Handle pagination according to JSON API spec
|
# Handle pagination according to JSON API spec
|
||||||
page_number, page_size = 0, None
|
page_number, page_size = 0, None
|
||||||
try:
|
try:
|
||||||
if 'page[size]' in query:
|
if "page[size]" in query:
|
||||||
page_size = int(query['page[size]'])
|
page_size = int(query["page[size]"])
|
||||||
assert page_size > 0
|
assert page_size > 0
|
||||||
if 'page[number]' in query:
|
if "page[number]" in query:
|
||||||
page_number = int(query['page[number]'])
|
page_number = int(query["page[number]"])
|
||||||
assert page_number >= 0
|
assert page_number >= 0
|
||||||
except (AssertionError, ValueError):
|
except (AssertionError, ValueError):
|
||||||
raise ValueError("Invalid pagination provided.")
|
raise ValueError("Invalid pagination provided.")
|
||||||
|
|
||||||
# Handle sorting according to JSON API spec
|
# Handle sorting according to JSON API spec
|
||||||
sorting = []
|
sorting = []
|
||||||
if 'sort' in query:
|
if "sort" in query:
|
||||||
for index in query['sort'].split(','):
|
for index in query["sort"].split(","):
|
||||||
try:
|
try:
|
||||||
sort_field = getattr(model, index.lstrip('-'))
|
sort_field = getattr(model, index.lstrip("-"))
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise ValueError(
|
raise ValueError("Invalid sorting key provided: {}.".format(index))
|
||||||
"Invalid sorting key provided: {}.".format(index)
|
if index.startswith("-"):
|
||||||
)
|
|
||||||
if index.startswith('-'):
|
|
||||||
sort_field = sort_field.desc()
|
sort_field = sort_field.desc()
|
||||||
sorting.append(sort_field)
|
sorting.append(sort_field)
|
||||||
# Default sorting options
|
# Default sorting options
|
||||||
@ -86,11 +86,7 @@ def _JSONApiSpec(query, model, default_sorting=None):
|
|||||||
try:
|
try:
|
||||||
sorting.append(getattr(model, default_sorting))
|
sorting.append(getattr(model, default_sorting))
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise ValueError(
|
raise ValueError("Invalid default sorting key provided: {}.".format(default_sorting))
|
||||||
"Invalid default sorting key provided: {}.".format(
|
|
||||||
default_sorting
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return filters, page_number, page_size, sorting
|
return filters, page_number, page_size, sorting
|
||||||
|
|
||||||
@ -109,22 +105,22 @@ def _serialize_flat(flat, config):
|
|||||||
|
|
||||||
postal_codes = {}
|
postal_codes = {}
|
||||||
for constraint_name, constraint in config["constraints"].items():
|
for constraint_name, constraint in config["constraints"].items():
|
||||||
postal_codes[constraint_name] = flatisfy.data.load_data(
|
postal_codes[constraint_name] = flatisfy.data.load_data(PostalCode, constraint, config)
|
||||||
PostalCode, constraint, config
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
assert flat["flatisfy_postal_code"]
|
assert flat["flatisfy_position"]
|
||||||
|
|
||||||
|
lat = flat["flatisfy_position"]["lat"]
|
||||||
|
lng = flat["flatisfy_position"]["lng"]
|
||||||
postal_code_data = next(
|
postal_code_data = next(
|
||||||
x
|
x for x in postal_codes.get(flat["flatisfy_constraint"], []) if x.lat == lat and x.lng == lng
|
||||||
for x in postal_codes.get(flat["flatisfy_constraint"], [])
|
|
||||||
if x.postal_code == flat["flatisfy_postal_code"]
|
|
||||||
)
|
)
|
||||||
|
logging.warn(f"{postal_code_data.name}, {lat}, {lng}")
|
||||||
flat["flatisfy_postal_code"] = {
|
flat["flatisfy_postal_code"] = {
|
||||||
"postal_code": flat["flatisfy_postal_code"],
|
"postal_code": postal_code_data.postal_code,
|
||||||
|
"insee_code": postal_code_data.insee_code,
|
||||||
"name": postal_code_data.name,
|
"name": postal_code_data.name,
|
||||||
"gps": (postal_code_data.lat, postal_code_data.lng)
|
"gps": (postal_code_data.lat, postal_code_data.lng),
|
||||||
}
|
}
|
||||||
except (AssertionError, StopIteration):
|
except (AssertionError, StopIteration):
|
||||||
flat["flatisfy_postal_code"] = {}
|
flat["flatisfy_postal_code"] = {}
|
||||||
@ -146,7 +142,8 @@ def index_v1():
|
|||||||
"flat": "/api/v1/flat/:id",
|
"flat": "/api/v1/flat/:id",
|
||||||
"search": "/api/v1/search",
|
"search": "/api/v1/search",
|
||||||
"ics": "/api/v1/ics/visits.ics",
|
"ics": "/api/v1/ics/visits.ics",
|
||||||
"time_to_places": "/api/v1/time_to_places"
|
"time_to_places": "/api/v1/time_to_places",
|
||||||
|
"metadata": "/api/v1/metadata",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -177,36 +174,32 @@ def flats_v1(config, db):
|
|||||||
|
|
||||||
:return: The available flats objects in a JSON ``data`` dict.
|
:return: The available flats objects in a JSON ``data`` dict.
|
||||||
"""
|
"""
|
||||||
if bottle.request.method == 'OPTIONS':
|
if bottle.request.method == "OPTIONS":
|
||||||
# CORS
|
# CORS
|
||||||
return ''
|
return ""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
filters, page_number, page_size, sorting = _JSONApiSpec(
|
filters, page_number, page_size, sorting = _JSONApiSpec(
|
||||||
bottle.request.query,
|
bottle.request.query, flat_model.Flat, default_sorting="cost"
|
||||||
flat_model.Flat,
|
|
||||||
default_sorting='cost'
|
|
||||||
)
|
)
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
return JSONError(400, str(exc))
|
return JSONError(400, str(exc))
|
||||||
|
|
||||||
# Build flat list
|
# Build flat list
|
||||||
db_query = (
|
db_query = db.query(flat_model.Flat).filter_by(**filters).order_by(*sorting)
|
||||||
db.query(flat_model.Flat).filter_by(**filters).order_by(*sorting)
|
|
||||||
)
|
|
||||||
flats = [
|
flats = [
|
||||||
_serialize_flat(flat, config)
|
_serialize_flat(flat, config)
|
||||||
for flat in itertools.islice(
|
for flat in itertools.islice(
|
||||||
db_query,
|
db_query,
|
||||||
page_number * page_size if page_size else None,
|
page_number * page_size if page_size else None,
|
||||||
page_number * page_size + page_size if page_size else None
|
page_number * page_size + page_size if page_size else None,
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
return {
|
return {
|
||||||
"data": flats,
|
"data": flats,
|
||||||
"page": page_number,
|
"page": page_number,
|
||||||
"items_per_page": page_size if page_size else len(flats)
|
"items_per_page": page_size if page_size else len(flats),
|
||||||
}
|
}
|
||||||
except Exception as exc: # pylint: disable= broad-except
|
except Exception as exc: # pylint: disable= broad-except
|
||||||
return JSONError(500, str(exc))
|
return JSONError(500, str(exc))
|
||||||
@ -222,7 +215,7 @@ def flat_v1(flat_id, config, db):
|
|||||||
|
|
||||||
:return: The flat object in a JSON ``data`` dict.
|
:return: The flat object in a JSON ``data`` dict.
|
||||||
"""
|
"""
|
||||||
if bottle.request.method == 'OPTIONS':
|
if bottle.request.method == "OPTIONS":
|
||||||
# CORS
|
# CORS
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
@ -232,9 +225,7 @@ def flat_v1(flat_id, config, db):
|
|||||||
if not flat:
|
if not flat:
|
||||||
return JSONError(404, "No flat with id {}.".format(flat_id))
|
return JSONError(404, "No flat with id {}.".format(flat_id))
|
||||||
|
|
||||||
return {
|
return {"data": _serialize_flat(flat, config)}
|
||||||
"data": _serialize_flat(flat, config)
|
|
||||||
}
|
|
||||||
except Exception as exc: # pylint: disable= broad-except
|
except Exception as exc: # pylint: disable= broad-except
|
||||||
return JSONError(500, str(exc))
|
return JSONError(500, str(exc))
|
||||||
|
|
||||||
@ -258,7 +249,7 @@ def update_flat_v1(flat_id, config, db):
|
|||||||
|
|
||||||
:return: The new flat object in a JSON ``data`` dict.
|
:return: The new flat object in a JSON ``data`` dict.
|
||||||
"""
|
"""
|
||||||
if bottle.request.method == 'OPTIONS':
|
if bottle.request.method == "OPTIONS":
|
||||||
# CORS
|
# CORS
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
@ -272,14 +263,9 @@ def update_flat_v1(flat_id, config, db):
|
|||||||
for key, value in json_body.items():
|
for key, value in json_body.items():
|
||||||
setattr(flat, key, value)
|
setattr(flat, key, value)
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
return JSONError(
|
return JSONError(400, "Invalid payload provided: {}.".format(str(exc)))
|
||||||
400,
|
|
||||||
"Invalid payload provided: {}.".format(str(exc))
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
return {"data": _serialize_flat(flat, config)}
|
||||||
"data": _serialize_flat(flat, config)
|
|
||||||
}
|
|
||||||
except Exception as exc: # pylint: disable= broad-except
|
except Exception as exc: # pylint: disable= broad-except
|
||||||
return JSONError(500, str(exc))
|
return JSONError(500, str(exc))
|
||||||
|
|
||||||
@ -295,20 +281,15 @@ def time_to_places_v1(config):
|
|||||||
:return: The JSON dump of the places to compute time to (dict of places
|
:return: The JSON dump of the places to compute time to (dict of places
|
||||||
names mapped to GPS coordinates).
|
names mapped to GPS coordinates).
|
||||||
"""
|
"""
|
||||||
if bottle.request.method == 'OPTIONS':
|
if bottle.request.method == "OPTIONS":
|
||||||
# CORS
|
# CORS
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
places = {}
|
places = {}
|
||||||
for constraint_name, constraint in config["constraints"].items():
|
for constraint_name, constraint in config["constraints"].items():
|
||||||
places[constraint_name] = {
|
places[constraint_name] = {k: v["gps"] for k, v in constraint["time_to"].items()}
|
||||||
k: v["gps"]
|
return {"data": places}
|
||||||
for k, v in constraint["time_to"].items()
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
"data": places
|
|
||||||
}
|
|
||||||
except Exception as exc: # pylint: disable= broad-except
|
except Exception as exc: # pylint: disable= broad-except
|
||||||
return JSONError(500, str(exc))
|
return JSONError(500, str(exc))
|
||||||
|
|
||||||
@ -343,7 +324,7 @@ def search_v1(db, config):
|
|||||||
|
|
||||||
:return: The matching flat objects in a JSON ``data`` dict.
|
:return: The matching flat objects in a JSON ``data`` dict.
|
||||||
"""
|
"""
|
||||||
if bottle.request.method == 'OPTIONS':
|
if bottle.request.method == "OPTIONS":
|
||||||
# CORS
|
# CORS
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
@ -355,30 +336,25 @@ def search_v1(db, config):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
filters, page_number, page_size, sorting = _JSONApiSpec(
|
filters, page_number, page_size, sorting = _JSONApiSpec(
|
||||||
bottle.request.query,
|
bottle.request.query, flat_model.Flat, default_sorting="cost"
|
||||||
flat_model.Flat,
|
|
||||||
default_sorting='cost'
|
|
||||||
)
|
)
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
return JSONError(400, str(exc))
|
return JSONError(400, str(exc))
|
||||||
|
|
||||||
flats_db_query = (flat_model.Flat
|
flats_db_query = flat_model.Flat.search_query(db, query).filter_by(**filters).order_by(*sorting)
|
||||||
.search_query(db, query)
|
|
||||||
.filter_by(**filters)
|
|
||||||
.order_by(*sorting))
|
|
||||||
flats = [
|
flats = [
|
||||||
_serialize_flat(flat, config)
|
_serialize_flat(flat, config)
|
||||||
for flat in itertools.islice(
|
for flat in itertools.islice(
|
||||||
flats_db_query,
|
flats_db_query,
|
||||||
page_number * page_size if page_size else None,
|
page_number * page_size if page_size else None,
|
||||||
page_number * page_size + page_size if page_size else None
|
page_number * page_size + page_size if page_size else None,
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"data": flats,
|
"data": flats,
|
||||||
"page": page_number,
|
"page": page_number,
|
||||||
"items_per_page": page_size if page_size else len(flats)
|
"items_per_page": page_size if page_size else len(flats),
|
||||||
}
|
}
|
||||||
except Exception as exc: # pylint: disable= broad-except
|
except Exception as exc: # pylint: disable= broad-except
|
||||||
return JSONError(500, str(exc))
|
return JSONError(500, str(exc))
|
||||||
@ -394,35 +370,33 @@ def ics_feed_v1(config, db):
|
|||||||
|
|
||||||
:return: The ICS feed for the visits.
|
:return: The ICS feed for the visits.
|
||||||
"""
|
"""
|
||||||
if bottle.request.method == 'OPTIONS':
|
if bottle.request.method == "OPTIONS":
|
||||||
# CORS
|
# CORS
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
cal = vobject.iCalendar()
|
cal = vobject.iCalendar()
|
||||||
try:
|
try:
|
||||||
flats_with_visits = db.query(flat_model.Flat).filter(
|
flats_with_visits = db.query(flat_model.Flat).filter(flat_model.Flat.visit_date.isnot(None))
|
||||||
flat_model.Flat.visit_date.isnot(None)
|
|
||||||
)
|
|
||||||
|
|
||||||
for flat in flats_with_visits:
|
for flat in flats_with_visits:
|
||||||
vevent = cal.add('vevent')
|
vevent = cal.add("vevent")
|
||||||
vevent.add('dtstart').value = flat.visit_date
|
vevent.add("dtstart").value = flat.visit_date
|
||||||
vevent.add('dtend').value = (
|
vevent.add("dtend").value = flat.visit_date + datetime.timedelta(hours=1)
|
||||||
flat.visit_date + datetime.timedelta(hours=1)
|
vevent.add("summary").value = "Visit - {}".format(flat.title)
|
||||||
)
|
|
||||||
vevent.add('summary').value = 'Visit - {}'.format(flat.title)
|
|
||||||
|
|
||||||
description = (
|
description = "{} (area: {}, cost: {} {})\n{}#/flat/{}\n".format(
|
||||||
'{} (area: {}, cost: {} {})\n{}#/flat/{}\n'.format(
|
flat.title,
|
||||||
flat.title, flat.area, flat.cost, flat.currency,
|
flat.area,
|
||||||
config['website_url'], flat.id
|
flat.cost,
|
||||||
)
|
flat.currency,
|
||||||
|
config["website_url"],
|
||||||
|
flat.id,
|
||||||
)
|
)
|
||||||
description += '\n{}\n'.format(flat.text)
|
description += "\n{}\n".format(flat.text)
|
||||||
if flat.notes:
|
if flat.notes:
|
||||||
description += '\n{}\n'.format(flat.notes)
|
description += "\n{}\n".format(flat.notes)
|
||||||
|
|
||||||
vevent.add('description').value = description
|
vevent.add("description").value = description
|
||||||
except Exception: # pylint: disable= broad-except
|
except Exception: # pylint: disable= broad-except
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -437,9 +411,11 @@ def opendata_index_v1():
|
|||||||
|
|
||||||
GET /api/v1/opendata
|
GET /api/v1/opendata
|
||||||
"""
|
"""
|
||||||
return {
|
if bottle.request.method == "OPTIONS":
|
||||||
"postal_codes": "/api/v1/opendata/postal_codes"
|
# CORS
|
||||||
}
|
return {}
|
||||||
|
|
||||||
|
return {"postal_codes": "/api/v1/opendata/postal_codes"}
|
||||||
|
|
||||||
|
|
||||||
def opendata_postal_codes_v1(db):
|
def opendata_postal_codes_v1(db):
|
||||||
@ -470,32 +446,79 @@ def opendata_postal_codes_v1(db):
|
|||||||
|
|
||||||
:return: The postal codes data from opendata.
|
:return: The postal codes data from opendata.
|
||||||
"""
|
"""
|
||||||
if bottle.request.method == 'OPTIONS':
|
if bottle.request.method == "OPTIONS":
|
||||||
# CORS
|
# CORS
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
filters, page_number, page_size, sorting = _JSONApiSpec(
|
filters, page_number, page_size, sorting = _JSONApiSpec(
|
||||||
bottle.request.query,
|
bottle.request.query, PostalCode, default_sorting="postal_code"
|
||||||
PostalCode,
|
|
||||||
default_sorting='postal_code'
|
|
||||||
)
|
)
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
return JSONError(400, str(exc))
|
return JSONError(400, str(exc))
|
||||||
|
|
||||||
db_query = db.query(PostalCode).filter_by(**filters).order_by(*sorting)
|
db_query = db.query(PostalCode).filter_by(**filters).order_by(*sorting)
|
||||||
postal_codes = [
|
postal_codes = [
|
||||||
x.json_api_repr() for x in itertools.islice(
|
x.json_api_repr()
|
||||||
|
for x in itertools.islice(
|
||||||
db_query,
|
db_query,
|
||||||
page_number * page_size if page_size else None,
|
page_number * page_size if page_size else None,
|
||||||
page_number * page_size + page_size if page_size else None
|
page_number * page_size + page_size if page_size else None,
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
return {
|
return {
|
||||||
"data": postal_codes,
|
"data": postal_codes,
|
||||||
"page": page_number,
|
"page": page_number,
|
||||||
"items_per_page": page_size if page_size else len(postal_codes)
|
"items_per_page": page_size if page_size else len(postal_codes),
|
||||||
}
|
}
|
||||||
except Exception as exc: # pylint: disable= broad-except
|
except Exception as exc: # pylint: disable= broad-except
|
||||||
return JSONError(500, str(exc))
|
return JSONError(500, str(exc))
|
||||||
|
|
||||||
|
|
||||||
|
def metadata_v1(config):
|
||||||
|
"""
|
||||||
|
API v1 metadata of the application.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
GET /api/v1/metadata
|
||||||
|
|
||||||
|
:return: The application metadata.
|
||||||
|
"""
|
||||||
|
if bottle.request.method == "OPTIONS":
|
||||||
|
# CORS
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
last_update = None
|
||||||
|
try:
|
||||||
|
ts_file = os.path.join(config["data_directory"], "timestamp")
|
||||||
|
last_update = os.path.getmtime(ts_file)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return {"data": {"last_update": last_update}}
|
||||||
|
except Exception as exc: # pylint: disable= broad-except
|
||||||
|
return JSONError(500, str(exc))
|
||||||
|
|
||||||
|
|
||||||
|
def import_v1(config):
|
||||||
|
"""
|
||||||
|
API v1 import new flats.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
GET /api/v1/import
|
||||||
|
|
||||||
|
:return: The new flats.
|
||||||
|
"""
|
||||||
|
if bottle.request.method == "OPTIONS":
|
||||||
|
# CORS
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
flats_id = cmds.import_and_filter(config, False, True)
|
||||||
|
return {"flats": flats_id}
|
||||||
|
except Exception as exc: # pylint: disable= broad-except
|
||||||
|
return JSONError(500, str(exc))
|
||||||
|
2
import.sh
Executable file
2
import.sh
Executable file
@ -0,0 +1,2 @@
|
|||||||
|
#!/bin/sh -ev
|
||||||
|
python -m flatisfy import --config config.json --new-only -v "$@"
|
1
migrations/README
Normal file
1
migrations/README
Normal file
@ -0,0 +1 @@
|
|||||||
|
Generic single-database configuration.
|
68
migrations/env.py
Normal file
68
migrations/env.py
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
from __future__ import with_statement
|
||||||
|
from alembic import context
|
||||||
|
from sqlalchemy import engine_from_config, pool
|
||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
# from myapp import mymodel
|
||||||
|
# target_metadata = mymodel.Base.metadata
|
||||||
|
target_metadata = None
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline():
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online():
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(connection=connection, target_metadata=target_metadata)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
24
migrations/script.py.mako
Normal file
24
migrations/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = ${repr(up_revision)}
|
||||||
|
down_revision = ${repr(down_revision)}
|
||||||
|
branch_labels = ${repr(branch_labels)}
|
||||||
|
depends_on = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
${downgrades if downgrades else "pass"}
|
24
migrations/versions/8155b83242eb_add_is_expired.py
Normal file
24
migrations/versions/8155b83242eb_add_is_expired.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
"""Add is_expired
|
||||||
|
|
||||||
|
Revision ID: 8155b83242eb
|
||||||
|
Revises:
|
||||||
|
Create Date: 2018-10-16 22:51:25.442678
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "8155b83242eb"
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column("flats", sa.Column("is_expired", sa.Boolean(), default=False))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column("flats", "is_expired")
|
24
migrations/versions/9e58c66f1ac1_add_flat_insee_column.py
Normal file
24
migrations/versions/9e58c66f1ac1_add_flat_insee_column.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
"""Add flat INSEE column
|
||||||
|
|
||||||
|
Revision ID: 9e58c66f1ac1
|
||||||
|
Revises: d21933db9ad8
|
||||||
|
Create Date: 2021-02-08 16:31:18.961186
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "9e58c66f1ac1"
|
||||||
|
down_revision = "d21933db9ad8"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column("postal_codes", sa.Column("insee_code", sa.String()))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column("postal_codes", "insee_code")
|
69
migrations/versions/d21933db9ad8_add_flat_position_column.py
Normal file
69
migrations/versions/d21933db9ad8_add_flat_position_column.py
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
"""Add flat position column
|
||||||
|
|
||||||
|
Revision ID: d21933db9ad8
|
||||||
|
Revises: 8155b83242eb
|
||||||
|
Create Date: 2021-02-08 16:26:37.190842
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
import sqlalchemy.types as types
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class StringyJSON(types.TypeDecorator):
|
||||||
|
"""
|
||||||
|
Stores and retrieves JSON as TEXT for SQLite.
|
||||||
|
|
||||||
|
From
|
||||||
|
https://avacariu.me/articles/2016/compiling-json-as-text-for-sqlite-with-sqlalchemy.
|
||||||
|
|
||||||
|
.. note ::
|
||||||
|
|
||||||
|
The associated field is immutable. That is, changes to the data
|
||||||
|
(typically, changing the value of a dict field) will not trigger an
|
||||||
|
update on the SQL side upon ``commit`` as the reference to the object
|
||||||
|
will not have been updated. One should force the update by forcing an
|
||||||
|
update of the reference (by performing a ``copy`` operation on the dict
|
||||||
|
for instance).
|
||||||
|
"""
|
||||||
|
|
||||||
|
impl = types.TEXT
|
||||||
|
|
||||||
|
def process_bind_param(self, value, dialect):
|
||||||
|
"""
|
||||||
|
Process the bound param, serialize the object to JSON before saving
|
||||||
|
into database.
|
||||||
|
"""
|
||||||
|
if value is not None:
|
||||||
|
value = json.dumps(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def process_result_value(self, value, dialect):
|
||||||
|
"""
|
||||||
|
Process the value fetched from the database, deserialize the JSON
|
||||||
|
string before returning the object.
|
||||||
|
"""
|
||||||
|
if value is not None:
|
||||||
|
value = json.loads(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
# TypeEngine.with_variant says "use StringyJSON instead when
|
||||||
|
# connecting to 'sqlite'"
|
||||||
|
# pylint: disable=locally-disabled,invalid-name
|
||||||
|
MagicJSON = types.JSON().with_variant(StringyJSON, "sqlite")
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "d21933db9ad8"
|
||||||
|
down_revision = "8155b83242eb"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column("flats", sa.Column("flatisfy_position", MagicJSON, default=False))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column("flats", "flatisfy_position")
|
24
modules/explorimmo/__init__.py
Normal file
24
modules/explorimmo/__init__.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2014 Bezleputh
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
|
from .module import ExplorimmoModule
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['ExplorimmoModule']
|
92
modules/explorimmo/browser.py
Normal file
92
modules/explorimmo/browser.py
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2014 Bezleputh
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from woob.browser import PagesBrowser, URL
|
||||||
|
from woob.capabilities.housing import (TypeNotSupported, POSTS_TYPES,
|
||||||
|
HOUSE_TYPES)
|
||||||
|
from woob.tools.compat import urlencode
|
||||||
|
from .pages import CitiesPage, SearchPage, HousingPage, HousingPage2, PhonePage
|
||||||
|
|
||||||
|
|
||||||
|
class ExplorimmoBrowser(PagesBrowser):
|
||||||
|
BASEURL = 'https://immobilier.lefigaro.fr'
|
||||||
|
|
||||||
|
cities = URL('/rest/locations\?q=(?P<city>.*)', CitiesPage)
|
||||||
|
search = URL('/annonces/resultat/annonces.html\?(?P<query>.*)', SearchPage)
|
||||||
|
housing_html = URL('/annonces/annonce-(?P<_id>.*).html', HousingPage)
|
||||||
|
phone = URL('/rest/classifieds/(?P<_id>.*)/phone', PhonePage)
|
||||||
|
housing = URL('/rest/classifieds/(?P<_id>.*)',
|
||||||
|
'/rest/classifieds/\?(?P<js_datas>.*)', HousingPage2)
|
||||||
|
|
||||||
|
TYPES = {POSTS_TYPES.RENT: 'location',
|
||||||
|
POSTS_TYPES.SALE: 'vente',
|
||||||
|
POSTS_TYPES.FURNISHED_RENT: 'location',
|
||||||
|
POSTS_TYPES.VIAGER: 'vente'}
|
||||||
|
|
||||||
|
RET = {HOUSE_TYPES.HOUSE: 'Maison',
|
||||||
|
HOUSE_TYPES.APART: 'Appartement',
|
||||||
|
HOUSE_TYPES.LAND: 'Terrain',
|
||||||
|
HOUSE_TYPES.PARKING: 'Parking',
|
||||||
|
HOUSE_TYPES.OTHER: 'Divers'}
|
||||||
|
|
||||||
|
def get_cities(self, pattern):
|
||||||
|
return self.cities.open(city=pattern).get_cities()
|
||||||
|
|
||||||
|
def search_housings(self, type, cities, nb_rooms, area_min, area_max,
|
||||||
|
cost_min, cost_max, house_types, advert_types):
|
||||||
|
|
||||||
|
if type not in self.TYPES:
|
||||||
|
raise TypeNotSupported()
|
||||||
|
|
||||||
|
ret = []
|
||||||
|
if type == POSTS_TYPES.VIAGER:
|
||||||
|
ret = ['Viager']
|
||||||
|
else:
|
||||||
|
for house_type in house_types:
|
||||||
|
if house_type in self.RET:
|
||||||
|
ret.append(self.RET.get(house_type))
|
||||||
|
|
||||||
|
data = {'location': ','.join(cities).encode('iso 8859-1'),
|
||||||
|
'furnished': type == POSTS_TYPES.FURNISHED_RENT,
|
||||||
|
'areaMin': area_min or '',
|
||||||
|
'areaMax': area_max or '',
|
||||||
|
'priceMin': cost_min or '',
|
||||||
|
'priceMax': cost_max or '',
|
||||||
|
'transaction': self.TYPES.get(type, 'location'),
|
||||||
|
'recherche': '',
|
||||||
|
'mode': '',
|
||||||
|
'proximity': '0',
|
||||||
|
'roomMin': nb_rooms or '',
|
||||||
|
'page': '1'}
|
||||||
|
|
||||||
|
query = u'%s%s%s' % (urlencode(data), '&type=', '&type='.join(ret))
|
||||||
|
|
||||||
|
return self.search.go(query=query).iter_housings(
|
||||||
|
query_type=type,
|
||||||
|
advert_types=advert_types
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_housing(self, _id, housing=None):
|
||||||
|
return self.housing.go(_id=_id).get_housing(obj=housing)
|
||||||
|
|
||||||
|
def get_phone(self, _id):
|
||||||
|
return self.phone.go(_id=_id).get_phone()
|
||||||
|
|
||||||
|
def get_total_page(self, js_datas):
|
||||||
|
return self.housing.open(js_datas=js_datas).get_total_page()
|
81
modules/explorimmo/module.py
Normal file
81
modules/explorimmo/module.py
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2014 Bezleputh
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
|
from woob.tools.backend import Module
|
||||||
|
from woob.capabilities.housing import CapHousing, Housing, HousingPhoto
|
||||||
|
from woob import __version__ as WOOB_VERSION
|
||||||
|
|
||||||
|
from .browser import ExplorimmoBrowser
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['ExplorimmoModule']
|
||||||
|
|
||||||
|
|
||||||
|
class ExplorimmoModule(Module, CapHousing):
|
||||||
|
NAME = 'explorimmo'
|
||||||
|
DESCRIPTION = u'explorimmo website'
|
||||||
|
MAINTAINER = u'Bezleputh'
|
||||||
|
EMAIL = 'carton_ben@yahoo.fr'
|
||||||
|
LICENSE = 'AGPLv3+'
|
||||||
|
VERSION = WOOB_VERSION
|
||||||
|
|
||||||
|
BROWSER = ExplorimmoBrowser
|
||||||
|
|
||||||
|
def get_housing(self, housing):
|
||||||
|
if isinstance(housing, Housing):
|
||||||
|
id = housing.id
|
||||||
|
else:
|
||||||
|
id = housing
|
||||||
|
housing = None
|
||||||
|
housing = self.browser.get_housing(id, housing)
|
||||||
|
return housing
|
||||||
|
|
||||||
|
def search_city(self, pattern):
|
||||||
|
return self.browser.get_cities(pattern)
|
||||||
|
|
||||||
|
def search_housings(self, query):
|
||||||
|
cities = ['%s' % c.id for c in query.cities if c.backend == self.name]
|
||||||
|
if len(cities) == 0:
|
||||||
|
return list()
|
||||||
|
|
||||||
|
return self.browser.search_housings(query.type, cities, query.nb_rooms,
|
||||||
|
query.area_min, query.area_max,
|
||||||
|
query.cost_min, query.cost_max,
|
||||||
|
query.house_types,
|
||||||
|
query.advert_types)
|
||||||
|
|
||||||
|
def fill_housing(self, housing, fields):
|
||||||
|
if 'phone' in fields:
|
||||||
|
housing.phone = self.browser.get_phone(housing.id)
|
||||||
|
fields.remove('phone')
|
||||||
|
|
||||||
|
if len(fields) > 0:
|
||||||
|
self.browser.get_housing(housing.id, housing)
|
||||||
|
|
||||||
|
return housing
|
||||||
|
|
||||||
|
def fill_photo(self, photo, fields):
|
||||||
|
if 'data' in fields and photo.url and not photo.data:
|
||||||
|
photo.data = self.browser.open(photo.url).content
|
||||||
|
return photo
|
||||||
|
|
||||||
|
OBJECTS = {Housing: fill_housing,
|
||||||
|
HousingPhoto: fill_photo,
|
||||||
|
}
|
455
modules/explorimmo/pages.py
Normal file
455
modules/explorimmo/pages.py
Normal file
@ -0,0 +1,455 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2014 Bezleputh
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import json
|
||||||
|
import math
|
||||||
|
import re
|
||||||
|
from decimal import Decimal
|
||||||
|
from datetime import datetime
|
||||||
|
from woob.browser.filters.json import Dict
|
||||||
|
from woob.browser.elements import ItemElement, ListElement, DictElement, method
|
||||||
|
from woob.browser.pages import JsonPage, HTMLPage, pagination
|
||||||
|
from woob.browser.filters.standard import (CleanText, CleanDecimal, Currency,
|
||||||
|
Regexp, Env, BrowserURL, Filter,
|
||||||
|
Format)
|
||||||
|
from woob.browser.filters.html import Attr, CleanHTML, XPath
|
||||||
|
from woob.capabilities.base import NotAvailable, NotLoaded, Currency as BaseCurrency
|
||||||
|
from woob.capabilities.housing import (Housing, HousingPhoto, City,
|
||||||
|
UTILITIES, ENERGY_CLASS, POSTS_TYPES,
|
||||||
|
ADVERT_TYPES, HOUSE_TYPES)
|
||||||
|
from woob.tools.capabilities.housing.housing import PricePerMeterFilter
|
||||||
|
from woob.tools.compat import unquote
|
||||||
|
|
||||||
|
|
||||||
|
class CitiesPage(JsonPage):
|
||||||
|
|
||||||
|
ENCODING = 'UTF-8'
|
||||||
|
|
||||||
|
def build_doc(self, content):
|
||||||
|
content = super(CitiesPage, self).build_doc(content)
|
||||||
|
if content:
|
||||||
|
return content
|
||||||
|
else:
|
||||||
|
return [{"locations": []}]
|
||||||
|
|
||||||
|
@method
|
||||||
|
class get_cities(DictElement):
|
||||||
|
item_xpath = '0/locations'
|
||||||
|
|
||||||
|
class item(ItemElement):
|
||||||
|
klass = City
|
||||||
|
|
||||||
|
obj_id = Dict('label')
|
||||||
|
obj_name = Dict('label')
|
||||||
|
|
||||||
|
|
||||||
|
class SearchPage(HTMLPage):
|
||||||
|
@pagination
|
||||||
|
@method
|
||||||
|
class iter_housings(ListElement):
|
||||||
|
item_xpath = '//div[starts-with(@id, "bloc-vue-")]'
|
||||||
|
|
||||||
|
def next_page(self):
|
||||||
|
js_datas = CleanText(
|
||||||
|
'//div[@id="js-data"]/@data-rest-search-request'
|
||||||
|
)(self).split('?')[-1].split('&')
|
||||||
|
|
||||||
|
try:
|
||||||
|
resultsPerPage = next(
|
||||||
|
x for x in js_datas if 'resultsPerPage' in x
|
||||||
|
).split('=')[-1]
|
||||||
|
currentPageNumber = next(
|
||||||
|
x for x in js_datas if 'currentPageNumber' in x
|
||||||
|
).split('=')[-1]
|
||||||
|
resultCount = CleanText(
|
||||||
|
'(//div[@id="js-data"]/@data-result-count)[1]'
|
||||||
|
)(self)
|
||||||
|
totalPageNumber = math.ceil(
|
||||||
|
int(resultCount) / int(resultsPerPage)
|
||||||
|
)
|
||||||
|
|
||||||
|
next_page = int(currentPageNumber) + 1
|
||||||
|
if next_page <= totalPageNumber:
|
||||||
|
return self.page.url.replace(
|
||||||
|
'page=%s' % currentPageNumber,
|
||||||
|
'page=%d' % next_page
|
||||||
|
)
|
||||||
|
except StopIteration:
|
||||||
|
pass
|
||||||
|
|
||||||
|
class item(ItemElement):
|
||||||
|
klass = Housing
|
||||||
|
price_selector = './/span[@class="price-label"]|./div/div[@class="item-price-pdf"]'
|
||||||
|
|
||||||
|
def is_agency(self):
|
||||||
|
agency = CleanText('.//span[has-class("item-agency-name")]')(self.el)
|
||||||
|
return 'annonce de particulier' not in agency.lower()
|
||||||
|
|
||||||
|
def condition(self):
|
||||||
|
if len(self.env['advert_types']) == 1:
|
||||||
|
is_agency = self.is_agency()
|
||||||
|
if self.env['advert_types'][0] == ADVERT_TYPES.PERSONAL:
|
||||||
|
return not is_agency
|
||||||
|
elif self.env['advert_types'][0] == ADVERT_TYPES.PROFESSIONAL:
|
||||||
|
return is_agency
|
||||||
|
return Attr('.', 'data-classified-id', default=False)(self)
|
||||||
|
|
||||||
|
obj_id = Attr('.', 'data-classified-id')
|
||||||
|
obj_type = Env('query_type')
|
||||||
|
obj_title = CleanText('./div/h2[@class="item-type"]')
|
||||||
|
|
||||||
|
def obj_advert_type(self):
|
||||||
|
if self.is_agency():
|
||||||
|
return ADVERT_TYPES.PROFESSIONAL
|
||||||
|
else:
|
||||||
|
return ADVERT_TYPES.PERSONAL
|
||||||
|
|
||||||
|
def obj_house_type(self):
|
||||||
|
type = self.obj_title(self).split()[0].lower()
|
||||||
|
if type == "appartement" or type == "studio" or type == "chambre":
|
||||||
|
return HOUSE_TYPES.APART
|
||||||
|
elif type == "maison" or type == "villa":
|
||||||
|
return HOUSE_TYPES.HOUSE
|
||||||
|
elif type == "parking":
|
||||||
|
return HOUSE_TYPES.PARKING
|
||||||
|
elif type == "terrain":
|
||||||
|
return HOUSE_TYPES.LAND
|
||||||
|
else:
|
||||||
|
return HOUSE_TYPES.OTHER
|
||||||
|
|
||||||
|
def obj_location(self):
|
||||||
|
script = CleanText('./script')(self)
|
||||||
|
try:
|
||||||
|
# Should be standard JSON+LD data
|
||||||
|
script = json.loads(script)
|
||||||
|
except ValueError:
|
||||||
|
try:
|
||||||
|
# But explorimmo can't write JSON correctly and there
|
||||||
|
# is a trailing "}"
|
||||||
|
script = json.loads(script.strip().rstrip('}'))
|
||||||
|
except ValueError:
|
||||||
|
script = None
|
||||||
|
if not script:
|
||||||
|
return NotLoaded
|
||||||
|
|
||||||
|
try:
|
||||||
|
return '%s (%s)' % (
|
||||||
|
script['address']['addressLocality'],
|
||||||
|
script['address']['postalCode']
|
||||||
|
)
|
||||||
|
except (KeyError):
|
||||||
|
return NotLoaded
|
||||||
|
|
||||||
|
def obj_cost(self):
|
||||||
|
cost = CleanDecimal(Regexp(CleanText(self.price_selector, default=''),
|
||||||
|
r'de (.*) à .*',
|
||||||
|
default=0))(self)
|
||||||
|
if cost == 0:
|
||||||
|
return CleanDecimal(self.price_selector, default=NotAvailable)(self)
|
||||||
|
else:
|
||||||
|
return cost
|
||||||
|
|
||||||
|
obj_currency = Currency(price_selector)
|
||||||
|
|
||||||
|
def obj_utilities(self):
|
||||||
|
utilities = CleanText(
|
||||||
|
'./div/div/span[@class="price-label"]|'
|
||||||
|
'./div/div[@class="item-price-pdf"]|'
|
||||||
|
'./div/div/span[@class="item-price"]'
|
||||||
|
)(self)
|
||||||
|
if "CC" in utilities:
|
||||||
|
return UTILITIES.INCLUDED
|
||||||
|
else:
|
||||||
|
return UTILITIES.UNKNOWN
|
||||||
|
|
||||||
|
obj_text = CleanText('./div/p[@itemprop="description"]')
|
||||||
|
obj_area = CleanDecimal(
|
||||||
|
Regexp(
|
||||||
|
obj_title,
|
||||||
|
r'(.*?)([\d,\.]*) m2(.*?)',
|
||||||
|
'\\2',
|
||||||
|
default=None
|
||||||
|
),
|
||||||
|
replace_dots=True,
|
||||||
|
default=NotLoaded
|
||||||
|
)
|
||||||
|
|
||||||
|
obj_url = Format(
|
||||||
|
"https://immobilier.lefigaro.fr/annonces/annonce-%s.html",
|
||||||
|
CleanText('./@data-classified-id')
|
||||||
|
)
|
||||||
|
|
||||||
|
obj_price_per_meter = PricePerMeterFilter()
|
||||||
|
|
||||||
|
def obj_phone(self):
|
||||||
|
phone = CleanText('./div/div/ul/li[has-class("js-clickphone")]',
|
||||||
|
replace=[('Téléphoner : ', '')],
|
||||||
|
default=NotLoaded)(self)
|
||||||
|
|
||||||
|
if '...' in phone:
|
||||||
|
return NotLoaded
|
||||||
|
|
||||||
|
return phone
|
||||||
|
|
||||||
|
def obj_details(self):
|
||||||
|
charges = CleanText('.//span[@class="price-fees"]',
|
||||||
|
default=None)(self)
|
||||||
|
if charges:
|
||||||
|
return {
|
||||||
|
"fees": charges.split(":")[1].strip()
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
return NotLoaded
|
||||||
|
|
||||||
|
def obj_photos(self):
|
||||||
|
url = CleanText('./div[has-class("default-img")]/img/@data-src')(self)
|
||||||
|
if url:
|
||||||
|
url = unquote(url)
|
||||||
|
if "http://" in url[3:]:
|
||||||
|
rindex = url.rfind("?")
|
||||||
|
if rindex == -1:
|
||||||
|
rindex = None
|
||||||
|
url = url[url.find("http://", 3):rindex]
|
||||||
|
return [HousingPhoto(url)]
|
||||||
|
else:
|
||||||
|
return NotLoaded
|
||||||
|
|
||||||
|
|
||||||
|
class TypeDecimal(Filter):
|
||||||
|
def filter(self, el):
|
||||||
|
return Decimal(el)
|
||||||
|
|
||||||
|
|
||||||
|
class FromTimestamp(Filter):
|
||||||
|
def filter(self, el):
|
||||||
|
return datetime.fromtimestamp(el / 1000.0)
|
||||||
|
|
||||||
|
|
||||||
|
class PhonePage(JsonPage):
|
||||||
|
def get_phone(self):
|
||||||
|
return self.doc.get('phoneNumber')
|
||||||
|
|
||||||
|
|
||||||
|
class HousingPage2(JsonPage):
|
||||||
|
@method
|
||||||
|
class get_housing(ItemElement):
|
||||||
|
klass = Housing
|
||||||
|
|
||||||
|
def is_agency(self):
|
||||||
|
return Dict('agency/isParticulier')(self) == 'false'
|
||||||
|
|
||||||
|
obj_id = Env('_id')
|
||||||
|
|
||||||
|
def obj_type(self):
|
||||||
|
transaction = Dict('characteristics/transaction')(self)
|
||||||
|
if transaction == 'location':
|
||||||
|
if Dict('characteristics/isFurnished')(self):
|
||||||
|
return POSTS_TYPES.FURNISHED_RENT
|
||||||
|
else:
|
||||||
|
return POSTS_TYPES.RENT
|
||||||
|
elif transaction == 'vente':
|
||||||
|
type = Dict('characteristics/estateType')(self).lower()
|
||||||
|
if 'viager' in type:
|
||||||
|
return POSTS_TYPES.VIAGER
|
||||||
|
else:
|
||||||
|
return POSTS_TYPES.SALE
|
||||||
|
else:
|
||||||
|
return NotAvailable
|
||||||
|
|
||||||
|
def obj_advert_type(self):
|
||||||
|
if self.is_agency:
|
||||||
|
return ADVERT_TYPES.PROFESSIONAL
|
||||||
|
else:
|
||||||
|
return ADVERT_TYPES.PERSONAL
|
||||||
|
|
||||||
|
def obj_house_type(self):
|
||||||
|
type = Dict('characteristics/estateType')(self).lower()
|
||||||
|
if 'appartement' in type:
|
||||||
|
return HOUSE_TYPES.APART
|
||||||
|
elif 'maison' in type:
|
||||||
|
return HOUSE_TYPES.HOUSE
|
||||||
|
elif 'parking' in type:
|
||||||
|
return HOUSE_TYPES.PARKING
|
||||||
|
elif 'terrain' in type:
|
||||||
|
return HOUSE_TYPES.LAND
|
||||||
|
else:
|
||||||
|
return HOUSE_TYPES.OTHER
|
||||||
|
|
||||||
|
obj_title = Dict('characteristics/titleWithTransaction')
|
||||||
|
obj_location = Format('%s %s %s', Dict('location/address'),
|
||||||
|
Dict('location/cityLabel'),
|
||||||
|
Dict('location/postalCode'))
|
||||||
|
|
||||||
|
def obj_cost(self):
|
||||||
|
cost = TypeDecimal(Dict('characteristics/price'))(self)
|
||||||
|
if cost == 0:
|
||||||
|
cost = TypeDecimal(Dict('characteristics/priceMin'))(self)
|
||||||
|
return cost
|
||||||
|
|
||||||
|
obj_currency = BaseCurrency.get_currency('€')
|
||||||
|
|
||||||
|
def obj_utilities(self):
|
||||||
|
are_fees_included = Dict('characteristics/areFeesIncluded',
|
||||||
|
default=None)(self)
|
||||||
|
if are_fees_included:
|
||||||
|
return UTILITIES.INCLUDED
|
||||||
|
else:
|
||||||
|
return UTILITIES.EXCLUDED
|
||||||
|
|
||||||
|
obj_text = CleanHTML(Dict('characteristics/description'))
|
||||||
|
obj_url = BrowserURL('housing_html', _id=Env('_id'))
|
||||||
|
|
||||||
|
def obj_area(self):
|
||||||
|
area = TypeDecimal(Dict('characteristics/area'))(self)
|
||||||
|
if area == 0:
|
||||||
|
area = TypeDecimal(Dict('characteristics/areaMin'))(self)
|
||||||
|
return area
|
||||||
|
|
||||||
|
obj_date = FromTimestamp(Dict('characteristics/date'))
|
||||||
|
obj_bedrooms = TypeDecimal(Dict('characteristics/bedroomCount'))
|
||||||
|
|
||||||
|
def obj_rooms(self):
|
||||||
|
# TODO: Why is roomCount a list?
|
||||||
|
rooms = Dict('characteristics/roomCount', default=[])(self)
|
||||||
|
if rooms:
|
||||||
|
return TypeDecimal(rooms[0])(self)
|
||||||
|
return NotAvailable
|
||||||
|
|
||||||
|
obj_price_per_meter = PricePerMeterFilter()
|
||||||
|
|
||||||
|
def obj_photos(self):
|
||||||
|
photos = []
|
||||||
|
for img in Dict('characteristics/images')(self):
|
||||||
|
m = re.search('http://thbr\.figarocms\.net.*(http://.*)', img.get('xl'))
|
||||||
|
if m:
|
||||||
|
photos.append(HousingPhoto(m.group(1)))
|
||||||
|
else:
|
||||||
|
photos.append(HousingPhoto(img.get('xl')))
|
||||||
|
return photos
|
||||||
|
|
||||||
|
def obj_DPE(self):
|
||||||
|
DPE = Dict(
|
||||||
|
'characteristics/energyConsumptionCategory',
|
||||||
|
default=""
|
||||||
|
)(self)
|
||||||
|
return getattr(ENERGY_CLASS, DPE, NotAvailable)
|
||||||
|
|
||||||
|
def obj_GES(self):
|
||||||
|
GES = Dict(
|
||||||
|
'characteristics/greenhouseGasEmissionCategory',
|
||||||
|
default=""
|
||||||
|
)(self)
|
||||||
|
return getattr(ENERGY_CLASS, GES, NotAvailable)
|
||||||
|
|
||||||
|
def obj_details(self):
|
||||||
|
details = {}
|
||||||
|
details['fees'] = Dict(
|
||||||
|
'characteristics/fees', default=NotAvailable
|
||||||
|
)(self)
|
||||||
|
details['agencyFees'] = Dict(
|
||||||
|
'characteristics/agencyFees', default=NotAvailable
|
||||||
|
)(self)
|
||||||
|
details['guarantee'] = Dict(
|
||||||
|
'characteristics/guarantee', default=NotAvailable
|
||||||
|
)(self)
|
||||||
|
details['bathrooms'] = Dict(
|
||||||
|
'characteristics/bathroomCount', default=NotAvailable
|
||||||
|
)(self)
|
||||||
|
details['creationDate'] = FromTimestamp(
|
||||||
|
Dict(
|
||||||
|
'characteristics/creationDate', default=NotAvailable
|
||||||
|
),
|
||||||
|
default=NotAvailable
|
||||||
|
)(self)
|
||||||
|
details['availabilityDate'] = Dict(
|
||||||
|
'characteristics/estateAvailabilityDate', default=NotAvailable
|
||||||
|
)(self)
|
||||||
|
details['exposure'] = Dict(
|
||||||
|
'characteristics/exposure', default=NotAvailable
|
||||||
|
)(self)
|
||||||
|
details['heatingType'] = Dict(
|
||||||
|
'characteristics/heatingType', default=NotAvailable
|
||||||
|
)(self)
|
||||||
|
details['floor'] = Dict(
|
||||||
|
'characteristics/floor', default=NotAvailable
|
||||||
|
)(self)
|
||||||
|
details['bedrooms'] = Dict(
|
||||||
|
'characteristics/bedroomCount', default=NotAvailable
|
||||||
|
)(self)
|
||||||
|
details['isFurnished'] = Dict(
|
||||||
|
'characteristics/isFurnished', default=NotAvailable
|
||||||
|
)(self)
|
||||||
|
rooms = Dict('characteristics/roomCount', default=[])(self)
|
||||||
|
if len(rooms):
|
||||||
|
details['rooms'] = rooms[0]
|
||||||
|
details['available'] = Dict(
|
||||||
|
'characteristics/isAvailable', default=NotAvailable
|
||||||
|
)(self)
|
||||||
|
agency = Dict('agency', default=NotAvailable)(self)
|
||||||
|
details['agency'] = ', '.join([
|
||||||
|
x for x in [
|
||||||
|
agency.get('corporateName', ''),
|
||||||
|
agency.get('corporateAddress', ''),
|
||||||
|
agency.get('corporatePostalCode', ''),
|
||||||
|
agency.get('corporateCity', '')
|
||||||
|
] if x
|
||||||
|
])
|
||||||
|
return details
|
||||||
|
|
||||||
|
def get_total_page(self):
|
||||||
|
return self.doc.get('pagination').get('total') if 'pagination' in self.doc else 0
|
||||||
|
|
||||||
|
|
||||||
|
class HousingPage(HTMLPage):
|
||||||
|
@method
|
||||||
|
class get_housing(ItemElement):
|
||||||
|
klass = Housing
|
||||||
|
|
||||||
|
obj_id = Env('_id')
|
||||||
|
obj_title = CleanText('//h1[@itemprop="name"]')
|
||||||
|
obj_location = CleanText('//span[@class="informations-localisation"]')
|
||||||
|
obj_cost = CleanDecimal('//span[@itemprop="price"]')
|
||||||
|
obj_currency = Currency('//span[@itemprop="price"]')
|
||||||
|
obj_text = CleanHTML('//div[@itemprop="description"]')
|
||||||
|
obj_url = BrowserURL('housing', _id=Env('_id'))
|
||||||
|
obj_area = CleanDecimal(Regexp(CleanText('//h1[@itemprop="name"]'),
|
||||||
|
r'(.*?)(\d*) m2(.*?)', '\\2'), default=NotAvailable)
|
||||||
|
obj_price_per_meter = PricePerMeterFilter()
|
||||||
|
|
||||||
|
def obj_photos(self):
|
||||||
|
photos = []
|
||||||
|
for img in XPath('//a[@class="thumbnail-link"]/img[@itemprop="image"]')(self):
|
||||||
|
url = Regexp(CleanText('./@src'), r'http://thbr\.figarocms\.net.*(http://.*)')(img)
|
||||||
|
photos.append(HousingPhoto(url))
|
||||||
|
return photos
|
||||||
|
|
||||||
|
def obj_details(self):
|
||||||
|
details = dict()
|
||||||
|
for item in XPath('//div[@class="features clearfix"]/ul/li')(self):
|
||||||
|
key = CleanText('./span[@class="name"]')(item)
|
||||||
|
value = CleanText('./span[@class="value"]')(item)
|
||||||
|
if value and key:
|
||||||
|
details[key] = value
|
||||||
|
|
||||||
|
key = CleanText('//div[@class="title-dpe clearfix"]')(self)
|
||||||
|
value = CleanText('//div[@class="energy-consumption"]')(self)
|
||||||
|
if value and key:
|
||||||
|
details[key] = value
|
||||||
|
return details
|
101
modules/explorimmo/test.py
Normal file
101
modules/explorimmo/test.py
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2014 Bezleputh
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from woob.capabilities.housing import Query, ADVERT_TYPES, POSTS_TYPES
|
||||||
|
from woob.tools.capabilities.housing.housing_test import HousingTest
|
||||||
|
from woob.tools.test import BackendTest
|
||||||
|
|
||||||
|
|
||||||
|
class ExplorimmoTest(BackendTest, HousingTest):
|
||||||
|
MODULE = 'explorimmo'
|
||||||
|
|
||||||
|
FIELDS_ALL_HOUSINGS_LIST = [
|
||||||
|
"id", "type", "advert_type", "house_type", "title", "location",
|
||||||
|
"utilities", "text", "area", "url"
|
||||||
|
]
|
||||||
|
FIELDS_ANY_HOUSINGS_LIST = [
|
||||||
|
"photos", "cost", "currency"
|
||||||
|
]
|
||||||
|
FIELDS_ALL_SINGLE_HOUSING = [
|
||||||
|
"id", "url", "type", "advert_type", "house_type", "title", "area",
|
||||||
|
"cost", "currency", "utilities", "date", "location", "text", "rooms",
|
||||||
|
"details"
|
||||||
|
]
|
||||||
|
FIELDS_ANY_SINGLE_HOUSING = [
|
||||||
|
"bedrooms",
|
||||||
|
"photos",
|
||||||
|
"DPE",
|
||||||
|
"GES",
|
||||||
|
"phone"
|
||||||
|
]
|
||||||
|
|
||||||
|
def test_explorimmo_rent(self):
|
||||||
|
query = Query()
|
||||||
|
query.area_min = 20
|
||||||
|
query.cost_max = 1500
|
||||||
|
query.type = POSTS_TYPES.RENT
|
||||||
|
query.cities = []
|
||||||
|
for city in self.backend.search_city('paris'):
|
||||||
|
city.backend = self.backend.name
|
||||||
|
query.cities.append(city)
|
||||||
|
self.check_against_query(query)
|
||||||
|
|
||||||
|
def test_explorimmo_sale(self):
|
||||||
|
query = Query()
|
||||||
|
query.area_min = 20
|
||||||
|
query.type = POSTS_TYPES.SALE
|
||||||
|
query.cities = []
|
||||||
|
for city in self.backend.search_city('paris'):
|
||||||
|
city.backend = self.backend.name
|
||||||
|
query.cities.append(city)
|
||||||
|
self.check_against_query(query)
|
||||||
|
|
||||||
|
def test_explorimmo_furnished_rent(self):
|
||||||
|
query = Query()
|
||||||
|
query.area_min = 20
|
||||||
|
query.cost_max = 1500
|
||||||
|
query.type = POSTS_TYPES.FURNISHED_RENT
|
||||||
|
query.cities = []
|
||||||
|
for city in self.backend.search_city('paris'):
|
||||||
|
city.backend = self.backend.name
|
||||||
|
query.cities.append(city)
|
||||||
|
self.check_against_query(query)
|
||||||
|
|
||||||
|
def test_explorimmo_viager(self):
|
||||||
|
query = Query()
|
||||||
|
query.type = POSTS_TYPES.VIAGER
|
||||||
|
query.cities = []
|
||||||
|
for city in self.backend.search_city('85'):
|
||||||
|
city.backend = self.backend.name
|
||||||
|
query.cities.append(city)
|
||||||
|
self.check_against_query(query)
|
||||||
|
|
||||||
|
def test_explorimmo_personal(self):
|
||||||
|
query = Query()
|
||||||
|
query.area_min = 20
|
||||||
|
query.cost_max = 900
|
||||||
|
query.type = POSTS_TYPES.RENT
|
||||||
|
query.advert_types = [ADVERT_TYPES.PERSONAL]
|
||||||
|
query.cities = []
|
||||||
|
for city in self.backend.search_city('paris'):
|
||||||
|
city.backend = self.backend.name
|
||||||
|
query.cities.append(city)
|
||||||
|
|
||||||
|
results = list(self.backend.search_housings(query))
|
||||||
|
self.assertEqual(len(results), 0)
|
26
modules/foncia/__init__.py
Normal file
26
modules/foncia/__init__.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2017 Phyks (Lucas Verney)
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
|
||||||
|
from .module import FonciaModule
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['FonciaModule']
|
61
modules/foncia/browser.py
Normal file
61
modules/foncia/browser.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2017 Phyks (Lucas Verney)
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
|
||||||
|
from woob.browser import PagesBrowser, URL
|
||||||
|
|
||||||
|
from .constants import QUERY_TYPES
|
||||||
|
from .pages import CitiesPage, HousingPage, SearchPage, SearchResultsPage
|
||||||
|
|
||||||
|
|
||||||
|
class FonciaBrowser(PagesBrowser):
|
||||||
|
BASEURL = 'https://fr.foncia.com'
|
||||||
|
|
||||||
|
cities = URL(r'/recherche/autocomplete\?term=(?P<term>.+)', CitiesPage)
|
||||||
|
housing = URL(r'/(?P<type>[^/]+)/.*\d+.htm', HousingPage)
|
||||||
|
search_results = URL(r'/(?P<type>[^/]+)/.*', SearchResultsPage)
|
||||||
|
search = URL(r'/(?P<type>.+)', SearchPage)
|
||||||
|
|
||||||
|
def get_cities(self, pattern):
|
||||||
|
"""
|
||||||
|
Get cities matching a given pattern.
|
||||||
|
"""
|
||||||
|
return self.cities.open(term=pattern).iter_cities()
|
||||||
|
|
||||||
|
def search_housings(self, query, cities):
|
||||||
|
"""
|
||||||
|
Search for housings matching given query.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
query_type = QUERY_TYPES[query.type]
|
||||||
|
except KeyError:
|
||||||
|
return []
|
||||||
|
|
||||||
|
self.search.go(type=query_type).do_search(query, cities)
|
||||||
|
return self.page.iter_housings(query_type=query.type)
|
||||||
|
|
||||||
|
def get_housing(self, housing):
|
||||||
|
"""
|
||||||
|
Get specific housing.
|
||||||
|
"""
|
||||||
|
query_type, housing = housing.split(':')
|
||||||
|
self.search.go(type=query_type).find_housing(query_type, housing)
|
||||||
|
return self.page.get_housing()
|
24
modules/foncia/constants.py
Normal file
24
modules/foncia/constants.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
from woob.capabilities.housing import POSTS_TYPES, HOUSE_TYPES
|
||||||
|
|
||||||
|
QUERY_TYPES = {
|
||||||
|
POSTS_TYPES.RENT: 'location',
|
||||||
|
POSTS_TYPES.SALE: 'achat',
|
||||||
|
POSTS_TYPES.FURNISHED_RENT: 'location'
|
||||||
|
}
|
||||||
|
|
||||||
|
QUERY_HOUSE_TYPES = {
|
||||||
|
HOUSE_TYPES.APART: ['appartement', 'appartement-meuble'],
|
||||||
|
HOUSE_TYPES.HOUSE: ['maison'],
|
||||||
|
HOUSE_TYPES.PARKING: ['parking'],
|
||||||
|
HOUSE_TYPES.LAND: ['terrain'],
|
||||||
|
HOUSE_TYPES.OTHER: ['chambre', 'programme-neuf',
|
||||||
|
'local-commercial', 'immeuble']
|
||||||
|
}
|
||||||
|
|
||||||
|
AVAILABLE_TYPES = {
|
||||||
|
POSTS_TYPES.RENT: ['appartement', 'maison', 'parking', 'chambre',
|
||||||
|
'local-commercial'],
|
||||||
|
POSTS_TYPES.SALE: ['appartement', 'maison', 'parking', 'local-commercial',
|
||||||
|
'terrain', 'immeuble', 'programme-neuf'],
|
||||||
|
POSTS_TYPES.FURNISHED_RENT: ['appartement-meuble']
|
||||||
|
}
|
BIN
modules/foncia/favicon.png
Normal file
BIN
modules/foncia/favicon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 2.5 KiB |
75
modules/foncia/module.py
Normal file
75
modules/foncia/module.py
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2017 Phyks (Lucas Verney)
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
|
||||||
|
from woob.tools.backend import Module
|
||||||
|
from woob.capabilities.housing import CapHousing, Housing, ADVERT_TYPES, HousingPhoto
|
||||||
|
from woob import __version__ as WOOB_VERSION
|
||||||
|
|
||||||
|
from .browser import FonciaBrowser
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['FonciaModule']
|
||||||
|
|
||||||
|
|
||||||
|
class FonciaModule(Module, CapHousing):
|
||||||
|
NAME = 'foncia'
|
||||||
|
DESCRIPTION = u'Foncia housing website.'
|
||||||
|
MAINTAINER = u'Phyks (Lucas Verney)'
|
||||||
|
EMAIL = 'phyks@phyks.me'
|
||||||
|
LICENSE = 'AGPLv3+'
|
||||||
|
VERSION = WOOB_VERSION
|
||||||
|
|
||||||
|
BROWSER = FonciaBrowser
|
||||||
|
|
||||||
|
def get_housing(self, housing):
|
||||||
|
return self.browser.get_housing(housing)
|
||||||
|
|
||||||
|
def search_city(self, pattern):
|
||||||
|
return self.browser.get_cities(pattern)
|
||||||
|
|
||||||
|
def search_housings(self, query):
|
||||||
|
if (
|
||||||
|
len(query.advert_types) == 1 and
|
||||||
|
query.advert_types[0] == ADVERT_TYPES.PERSONAL
|
||||||
|
):
|
||||||
|
# Foncia is pro only
|
||||||
|
return list()
|
||||||
|
|
||||||
|
cities = ','.join(
|
||||||
|
['%s' % c.name for c in query.cities if c.backend == self.name]
|
||||||
|
)
|
||||||
|
if len(cities) == 0:
|
||||||
|
return []
|
||||||
|
|
||||||
|
return self.browser.search_housings(query, cities)
|
||||||
|
|
||||||
|
def fill_housing(self, housing, fields):
|
||||||
|
if len(fields) > 0:
|
||||||
|
self.browser.get_housing(housing)
|
||||||
|
return housing
|
||||||
|
|
||||||
|
def fill_photo(self, photo, fields):
|
||||||
|
if 'data' in fields and photo.url and not photo.data:
|
||||||
|
photo.data = self.browser.open(photo.url).content
|
||||||
|
return photo
|
||||||
|
|
||||||
|
OBJECTS = {Housing: fill_housing, HousingPhoto: fill_photo}
|
359
modules/foncia/pages.py
Normal file
359
modules/foncia/pages.py
Normal file
@ -0,0 +1,359 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2017 Phyks (Lucas Verney)
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from woob.browser.pages import JsonPage, HTMLPage, pagination
|
||||||
|
from woob.browser.filters.standard import (
|
||||||
|
CleanDecimal, CleanText, Currency, Date, Env, Format, Regexp, RegexpError
|
||||||
|
)
|
||||||
|
from woob.browser.filters.html import AbsoluteLink, Attr, Link, XPathNotFound
|
||||||
|
from woob.browser.elements import ItemElement, ListElement, method
|
||||||
|
from woob.capabilities.base import NotAvailable, NotLoaded
|
||||||
|
from woob.capabilities.housing import (
|
||||||
|
City, Housing, HousingPhoto,
|
||||||
|
UTILITIES, ENERGY_CLASS, POSTS_TYPES, ADVERT_TYPES
|
||||||
|
)
|
||||||
|
from woob.tools.capabilities.housing.housing import PricePerMeterFilter
|
||||||
|
|
||||||
|
from .constants import AVAILABLE_TYPES, QUERY_TYPES, QUERY_HOUSE_TYPES
|
||||||
|
|
||||||
|
|
||||||
|
class CitiesPage(JsonPage):
|
||||||
|
def iter_cities(self):
|
||||||
|
cities_list = self.doc
|
||||||
|
if isinstance(self.doc, dict):
|
||||||
|
cities_list = self.doc.values()
|
||||||
|
|
||||||
|
for city in cities_list:
|
||||||
|
city_obj = City()
|
||||||
|
city_obj.id = city
|
||||||
|
city_obj.name = city
|
||||||
|
yield city_obj
|
||||||
|
|
||||||
|
|
||||||
|
class HousingPage(HTMLPage):
|
||||||
|
@method
|
||||||
|
class get_housing(ItemElement):
|
||||||
|
klass = Housing
|
||||||
|
|
||||||
|
obj_id = Format(
|
||||||
|
'%s:%s',
|
||||||
|
Env('type'),
|
||||||
|
Attr('//div[boolean(@data-property-reference)]', 'data-property-reference')
|
||||||
|
)
|
||||||
|
obj_advert_type = ADVERT_TYPES.PROFESSIONAL
|
||||||
|
|
||||||
|
def obj_type(self):
|
||||||
|
type = Env('type')(self)
|
||||||
|
if type == 'location':
|
||||||
|
if 'appartement-meuble' in self.page.url:
|
||||||
|
return POSTS_TYPES.FURNISHED_RENT
|
||||||
|
else:
|
||||||
|
return POSTS_TYPES.RENT
|
||||||
|
elif type == 'achat':
|
||||||
|
return POSTS_TYPES.SALE
|
||||||
|
else:
|
||||||
|
return NotAvailable
|
||||||
|
|
||||||
|
def obj_url(self):
|
||||||
|
return self.page.url
|
||||||
|
|
||||||
|
def obj_house_type(self):
|
||||||
|
url = self.obj_url()
|
||||||
|
for house_type, types in QUERY_HOUSE_TYPES.items():
|
||||||
|
for type in types:
|
||||||
|
if ('/%s/' % type) in url:
|
||||||
|
return house_type
|
||||||
|
return NotAvailable
|
||||||
|
|
||||||
|
obj_title = CleanText('//h1[has-class("OfferTop-title")]')
|
||||||
|
obj_area = CleanDecimal(
|
||||||
|
Regexp(
|
||||||
|
CleanText(
|
||||||
|
'//div[has-class("MiniData")]//p[has-class("MiniData-item")][1]'
|
||||||
|
),
|
||||||
|
r'(\d*\.*\d*) .*',
|
||||||
|
default=NotAvailable
|
||||||
|
),
|
||||||
|
default=NotAvailable
|
||||||
|
)
|
||||||
|
obj_cost = CleanDecimal(
|
||||||
|
'//span[has-class("OfferTop-price")]',
|
||||||
|
default=NotAvailable
|
||||||
|
)
|
||||||
|
obj_price_per_meter = PricePerMeterFilter()
|
||||||
|
obj_currency = Currency(
|
||||||
|
'//span[has-class("OfferTop-price")]'
|
||||||
|
)
|
||||||
|
obj_location = Format(
|
||||||
|
'%s - %s',
|
||||||
|
CleanText('//p[@data-behat="adresseBien"]'),
|
||||||
|
CleanText('//p[has-class("OfferTop-loc")]')
|
||||||
|
)
|
||||||
|
obj_text = CleanText('//div[has-class("OfferDetails-content")]/p[1]')
|
||||||
|
obj_phone = Regexp(
|
||||||
|
Link(
|
||||||
|
'//a[has-class("OfferContact-btn--tel")]'
|
||||||
|
),
|
||||||
|
r'tel:(.*)'
|
||||||
|
)
|
||||||
|
|
||||||
|
def obj_photos(self):
|
||||||
|
photos = []
|
||||||
|
for photo in self.xpath('//div[has-class("OfferSlider")]//img'):
|
||||||
|
photo_url = Attr('.', 'src')(photo)
|
||||||
|
photo_url = photo_url.replace('640/480', '800/600')
|
||||||
|
photos.append(HousingPhoto(photo_url))
|
||||||
|
return photos
|
||||||
|
|
||||||
|
obj_date = datetime.date.today()
|
||||||
|
|
||||||
|
def obj_utilities(self):
|
||||||
|
price = CleanText(
|
||||||
|
'//p[has-class("OfferTop-price")]'
|
||||||
|
)(self)
|
||||||
|
if "charges comprises" in price.lower():
|
||||||
|
return UTILITIES.INCLUDED
|
||||||
|
else:
|
||||||
|
return UTILITIES.EXCLUDED
|
||||||
|
|
||||||
|
obj_rooms = CleanDecimal(
|
||||||
|
'//div[has-class("MiniData")]//p[has-class("MiniData-item")][2]',
|
||||||
|
default=NotAvailable
|
||||||
|
)
|
||||||
|
obj_bedrooms = CleanDecimal(
|
||||||
|
'//div[has-class("MiniData")]//p[has-class("MiniData-item")][3]',
|
||||||
|
default=NotAvailable
|
||||||
|
)
|
||||||
|
|
||||||
|
def obj_DPE(self):
|
||||||
|
try:
|
||||||
|
electric_consumption = CleanDecimal(Regexp(
|
||||||
|
Attr('//div[has-class("OfferDetails-content")]//img', 'src'),
|
||||||
|
r'https://dpe.foncia.net\/(\d+)\/.*'
|
||||||
|
))(self)
|
||||||
|
except (RegexpError, XPathNotFound):
|
||||||
|
electric_consumption = None
|
||||||
|
|
||||||
|
DPE = ""
|
||||||
|
if electric_consumption is not None:
|
||||||
|
if electric_consumption <= 50:
|
||||||
|
DPE = "A"
|
||||||
|
elif 50 < electric_consumption <= 90:
|
||||||
|
DPE = "B"
|
||||||
|
elif 90 < electric_consumption <= 150:
|
||||||
|
DPE = "C"
|
||||||
|
elif 150 < electric_consumption <= 230:
|
||||||
|
DPE = "D"
|
||||||
|
elif 230 < electric_consumption <= 330:
|
||||||
|
DPE = "E"
|
||||||
|
elif 330 < electric_consumption <= 450:
|
||||||
|
DPE = "F"
|
||||||
|
else:
|
||||||
|
DPE = "G"
|
||||||
|
return getattr(ENERGY_CLASS, DPE, NotAvailable)
|
||||||
|
return NotAvailable
|
||||||
|
|
||||||
|
def obj_details(self):
|
||||||
|
details = {}
|
||||||
|
|
||||||
|
dispo = Date(
|
||||||
|
Regexp(
|
||||||
|
CleanText('//p[has-class("OfferTop-dispo")]'),
|
||||||
|
r'.* (\d\d\/\d\d\/\d\d\d\d)',
|
||||||
|
default=datetime.date.today().isoformat()
|
||||||
|
)
|
||||||
|
)(self)
|
||||||
|
if dispo is not None:
|
||||||
|
details["dispo"] = dispo
|
||||||
|
|
||||||
|
priceMentions = CleanText(
|
||||||
|
'//p[has-class("OfferTop-mentions")]',
|
||||||
|
default=None
|
||||||
|
)(self)
|
||||||
|
if priceMentions is not None:
|
||||||
|
details["priceMentions"] = priceMentions
|
||||||
|
|
||||||
|
agency = CleanText(
|
||||||
|
'//p[has-class("OfferContact-address")]',
|
||||||
|
default=None
|
||||||
|
)(self)
|
||||||
|
if agency is not None:
|
||||||
|
details["agency"] = agency
|
||||||
|
|
||||||
|
for item in self.xpath('//div[has-class("OfferDetails-columnize")]/div'):
|
||||||
|
category = CleanText(
|
||||||
|
'./h3[has-class("OfferDetails-title--2")]',
|
||||||
|
default=None
|
||||||
|
)(item)
|
||||||
|
if not category:
|
||||||
|
continue
|
||||||
|
|
||||||
|
details[category] = {}
|
||||||
|
|
||||||
|
for detail_item in item.xpath('.//ul[has-class("List--data")]/li'):
|
||||||
|
detail_title = CleanText('.//span[has-class("List-data")]')(detail_item)
|
||||||
|
detail_value = CleanText('.//*[has-class("List-value")]')(detail_item)
|
||||||
|
details[category][detail_title] = detail_value
|
||||||
|
|
||||||
|
for detail_item in item.xpath('.//ul[has-class("List--bullet")]/li'):
|
||||||
|
detail_title = CleanText('.')(detail_item)
|
||||||
|
details[category][detail_title] = True
|
||||||
|
|
||||||
|
try:
|
||||||
|
electric_consumption = CleanDecimal(Regexp(
|
||||||
|
Attr('//div[has-class("OfferDetails-content")]//img', 'src'),
|
||||||
|
r'https://dpe.foncia.net\/(\d+)\/.*'
|
||||||
|
))(self)
|
||||||
|
details["electric_consumption"] = (
|
||||||
|
'{} kWhEP/m².an'.format(electric_consumption)
|
||||||
|
)
|
||||||
|
except (RegexpError, XPathNotFound):
|
||||||
|
pass
|
||||||
|
|
||||||
|
return details
|
||||||
|
|
||||||
|
|
||||||
|
class SearchPage(HTMLPage):
|
||||||
|
def do_search(self, query, cities):
|
||||||
|
form = self.get_form('//form[@name="searchForm"]')
|
||||||
|
|
||||||
|
form['searchForm[type]'] = QUERY_TYPES.get(query.type, None)
|
||||||
|
form['searchForm[localisation]'] = cities
|
||||||
|
form['searchForm[type_bien][]'] = []
|
||||||
|
for house_type in query.house_types:
|
||||||
|
try:
|
||||||
|
form['searchForm[type_bien][]'].extend(
|
||||||
|
QUERY_HOUSE_TYPES[house_type]
|
||||||
|
)
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
form['searchForm[type_bien][]'] = [
|
||||||
|
x for x in form['searchForm[type_bien][]']
|
||||||
|
if x in AVAILABLE_TYPES.get(query.type, [])
|
||||||
|
]
|
||||||
|
if query.area_min:
|
||||||
|
form['searchForm[surface_min]'] = query.area_min
|
||||||
|
if query.area_max:
|
||||||
|
form['searchForm[surface_max]'] = query.area_max
|
||||||
|
if query.cost_min:
|
||||||
|
form['searchForm[prix_min]'] = query.cost_min
|
||||||
|
if query.cost_max:
|
||||||
|
form['searchForm[prix_max]'] = query.cost_max
|
||||||
|
if query.nb_rooms:
|
||||||
|
form['searchForm[pieces]'] = [i for i in range(1, query.nb_rooms + 1)]
|
||||||
|
form.submit()
|
||||||
|
|
||||||
|
def find_housing(self, query_type, housing):
|
||||||
|
form = self.get_form('//form[@name="searchForm"]')
|
||||||
|
form['searchForm[type]'] = query_type
|
||||||
|
form['searchForm[reference]'] = housing
|
||||||
|
form.submit()
|
||||||
|
|
||||||
|
|
||||||
|
class SearchResultsPage(HTMLPage):
|
||||||
|
@pagination
|
||||||
|
@method
|
||||||
|
class iter_housings(ListElement):
|
||||||
|
item_xpath = '//article[has-class("TeaserOffer")]'
|
||||||
|
|
||||||
|
next_page = Link('//div[has-class("Pagination--more")]/a[contains(text(), "Suivant")]')
|
||||||
|
|
||||||
|
class item(ItemElement):
|
||||||
|
klass = Housing
|
||||||
|
|
||||||
|
obj_id = Format(
|
||||||
|
'%s:%s',
|
||||||
|
Env('type'),
|
||||||
|
Attr('.//span[boolean(@data-reference)]', 'data-reference')
|
||||||
|
)
|
||||||
|
obj_url = AbsoluteLink('.//h3[has-class("TeaserOffer-title")]/a')
|
||||||
|
obj_type = Env('query_type')
|
||||||
|
obj_advert_type = ADVERT_TYPES.PROFESSIONAL
|
||||||
|
|
||||||
|
def obj_house_type(self):
|
||||||
|
url = self.obj_url(self)
|
||||||
|
for house_type, types in QUERY_HOUSE_TYPES.items():
|
||||||
|
for type in types:
|
||||||
|
if ('/%s/' % type) in url:
|
||||||
|
return house_type
|
||||||
|
return NotLoaded
|
||||||
|
|
||||||
|
obj_url = AbsoluteLink('.//h3[has-class("TeaserOffer-title")]/a')
|
||||||
|
obj_title = CleanText('.//h3[has-class("TeaserOffer-title")]')
|
||||||
|
obj_area = CleanDecimal(
|
||||||
|
Regexp(
|
||||||
|
CleanText(
|
||||||
|
'.//div[has-class("MiniData")]//p[@data-behat="surfaceDesBiens"]'
|
||||||
|
),
|
||||||
|
r'(\d*\.*\d*) .*',
|
||||||
|
default=NotAvailable
|
||||||
|
),
|
||||||
|
default=NotAvailable
|
||||||
|
)
|
||||||
|
obj_cost = CleanDecimal(
|
||||||
|
'.//strong[has-class("TeaserOffer-price-num")]',
|
||||||
|
default=NotAvailable
|
||||||
|
)
|
||||||
|
obj_price_per_meter = PricePerMeterFilter()
|
||||||
|
obj_currency = Currency(
|
||||||
|
'.//strong[has-class("TeaserOffer-price-num")]'
|
||||||
|
)
|
||||||
|
obj_location = CleanText('.//p[has-class("TeaserOffer-loc")]')
|
||||||
|
obj_text = CleanText('.//p[has-class("TeaserOffer-description")]')
|
||||||
|
|
||||||
|
def obj_photos(self):
|
||||||
|
url = CleanText(Attr('.//a[has-class("TeaserOffer-ill")]/img', 'src'))(self)
|
||||||
|
# If the used photo is a default no photo, the src is on the same domain.
|
||||||
|
if url[0] == '/':
|
||||||
|
return []
|
||||||
|
else:
|
||||||
|
return [HousingPhoto(url)]
|
||||||
|
|
||||||
|
obj_date = datetime.date.today()
|
||||||
|
|
||||||
|
def obj_utilities(self):
|
||||||
|
price = CleanText(
|
||||||
|
'.//strong[has-class("TeaserOffer-price-num")]'
|
||||||
|
)(self)
|
||||||
|
if "charges comprises" in price.lower():
|
||||||
|
return UTILITIES.INCLUDED
|
||||||
|
else:
|
||||||
|
return UTILITIES.EXCLUDED
|
||||||
|
|
||||||
|
obj_rooms = CleanDecimal(
|
||||||
|
'.//div[has-class("MiniData")]//p[@data-behat="nbPiecesDesBiens"]',
|
||||||
|
default=NotLoaded
|
||||||
|
)
|
||||||
|
obj_bedrooms = CleanDecimal(
|
||||||
|
'.//div[has-class("MiniData")]//p[@data-behat="nbChambresDesBiens"]',
|
||||||
|
default=NotLoaded
|
||||||
|
)
|
||||||
|
|
||||||
|
def obj_details(self):
|
||||||
|
return {
|
||||||
|
"dispo": Date(
|
||||||
|
Attr('.//span[boolean(@data-dispo)]', 'data-dispo',
|
||||||
|
default=datetime.date.today().isoformat())
|
||||||
|
)(self),
|
||||||
|
"priceMentions": CleanText('.//span[has-class("TeaserOffer-price-mentions")]')(self)
|
||||||
|
}
|
95
modules/foncia/test.py
Normal file
95
modules/foncia/test.py
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2017 Phyks (Lucas Verney)
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from woob.capabilities.housing import (
|
||||||
|
Query, POSTS_TYPES, ADVERT_TYPES
|
||||||
|
)
|
||||||
|
from woob.tools.capabilities.housing.housing_test import HousingTest
|
||||||
|
from woob.tools.test import BackendTest
|
||||||
|
|
||||||
|
|
||||||
|
class FonciaTest(BackendTest, HousingTest):
|
||||||
|
MODULE = 'foncia'
|
||||||
|
|
||||||
|
FIELDS_ALL_HOUSINGS_LIST = [
|
||||||
|
"id", "type", "advert_type", "house_type", "url", "title", "area",
|
||||||
|
"cost", "currency", "date", "location", "text", "details"
|
||||||
|
]
|
||||||
|
FIELDS_ANY_HOUSINGS_LIST = [
|
||||||
|
"photos",
|
||||||
|
"rooms"
|
||||||
|
]
|
||||||
|
FIELDS_ALL_SINGLE_HOUSING = [
|
||||||
|
"id", "url", "type", "advert_type", "house_type", "title", "area",
|
||||||
|
"cost", "currency", "utilities", "date", "location", "text", "phone",
|
||||||
|
"DPE", "details"
|
||||||
|
]
|
||||||
|
FIELDS_ANY_SINGLE_HOUSING = [
|
||||||
|
"bedrooms",
|
||||||
|
"photos",
|
||||||
|
"rooms"
|
||||||
|
]
|
||||||
|
|
||||||
|
def test_foncia_rent(self):
|
||||||
|
query = Query()
|
||||||
|
query.area_min = 20
|
||||||
|
query.cost_max = 1500
|
||||||
|
query.type = POSTS_TYPES.RENT
|
||||||
|
query.cities = []
|
||||||
|
for city in self.backend.search_city('paris'):
|
||||||
|
city.backend = self.backend.name
|
||||||
|
query.cities.append(city)
|
||||||
|
self.check_against_query(query)
|
||||||
|
|
||||||
|
def test_foncia_sale(self):
|
||||||
|
query = Query()
|
||||||
|
query.area_min = 20
|
||||||
|
query.type = POSTS_TYPES.SALE
|
||||||
|
query.cities = []
|
||||||
|
for city in self.backend.search_city('paris'):
|
||||||
|
city.backend = self.backend.name
|
||||||
|
query.cities.append(city)
|
||||||
|
self.check_against_query(query)
|
||||||
|
|
||||||
|
def test_foncia_furnished_rent(self):
|
||||||
|
query = Query()
|
||||||
|
query.area_min = 20
|
||||||
|
query.cost_max = 1500
|
||||||
|
query.type = POSTS_TYPES.FURNISHED_RENT
|
||||||
|
query.cities = []
|
||||||
|
for city in self.backend.search_city('paris'):
|
||||||
|
city.backend = self.backend.name
|
||||||
|
query.cities.append(city)
|
||||||
|
self.check_against_query(query)
|
||||||
|
|
||||||
|
def test_foncia_personal(self):
|
||||||
|
query = Query()
|
||||||
|
query.area_min = 20
|
||||||
|
query.cost_max = 900
|
||||||
|
query.type = POSTS_TYPES.RENT
|
||||||
|
query.advert_types = [ADVERT_TYPES.PERSONAL]
|
||||||
|
query.cities = []
|
||||||
|
for city in self.backend.search_city('paris'):
|
||||||
|
city.backend = self.backend.name
|
||||||
|
query.cities.append(city)
|
||||||
|
|
||||||
|
results = list(self.backend.search_housings(query))
|
||||||
|
self.assertEqual(len(results), 0)
|
24
modules/leboncoin/__init__.py
Normal file
24
modules/leboncoin/__init__.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2014 Bezleputh
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
|
from .module import LeboncoinModule
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['LeboncoinModule']
|
145
modules/leboncoin/browser.py
Normal file
145
modules/leboncoin/browser.py
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2014 Bezleputh
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from woob.tools.json import json
|
||||||
|
|
||||||
|
from woob.browser import PagesBrowser, URL
|
||||||
|
from woob.capabilities.housing import (TypeNotSupported, POSTS_TYPES,
|
||||||
|
HOUSE_TYPES, ADVERT_TYPES)
|
||||||
|
from .pages import CityListPage, HousingListPage, HousingPage, PhonePage, HomePage
|
||||||
|
|
||||||
|
|
||||||
|
class LeboncoinBrowser(PagesBrowser):
|
||||||
|
BASEURL = 'https://www.leboncoin.fr/'
|
||||||
|
city = URL('ajax/location_list.html\?city=(?P<city>.*)&zipcode=(?P<zip>.*)', CityListPage)
|
||||||
|
housing = URL('ventes_immobilieres/(?P<_id>.*).htm', HousingPage)
|
||||||
|
|
||||||
|
home = URL('annonces/offres', HomePage)
|
||||||
|
api = URL('https://api.leboncoin.fr/finder/search', HousingListPage)
|
||||||
|
phone = URL('https://api.leboncoin.fr/api/utils/phonenumber.json', PhonePage)
|
||||||
|
|
||||||
|
TYPES = {POSTS_TYPES.RENT: '10',
|
||||||
|
POSTS_TYPES.FURNISHED_RENT: '10',
|
||||||
|
POSTS_TYPES.SALE: '9',
|
||||||
|
POSTS_TYPES.SHARING: '11', }
|
||||||
|
|
||||||
|
RET = {HOUSE_TYPES.HOUSE: '1',
|
||||||
|
HOUSE_TYPES.APART: '2',
|
||||||
|
HOUSE_TYPES.LAND: '3',
|
||||||
|
HOUSE_TYPES.PARKING: '4',
|
||||||
|
HOUSE_TYPES.OTHER: '5'}
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(LeboncoinBrowser, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def get_cities(self, pattern):
|
||||||
|
city = ''
|
||||||
|
zip_code = ''
|
||||||
|
if pattern.isdigit():
|
||||||
|
zip_code = pattern
|
||||||
|
else:
|
||||||
|
city = pattern.replace(" ", "_")
|
||||||
|
|
||||||
|
return self.city.go(city=city, zip=zip_code).get_cities()
|
||||||
|
|
||||||
|
def search_housings(self, query, module_name):
|
||||||
|
|
||||||
|
if query.type not in self.TYPES.keys():
|
||||||
|
return TypeNotSupported()
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
data['filters'] = {}
|
||||||
|
data['filters']['category'] = {}
|
||||||
|
data['filters']['category']['id'] = self.TYPES.get(query.type)
|
||||||
|
data['filters']['enums'] = {}
|
||||||
|
data['filters']['enums']['ad_type'] = ['offer']
|
||||||
|
|
||||||
|
data['filters']['enums']['real_estate_type'] = []
|
||||||
|
for t in query.house_types:
|
||||||
|
t = self.RET.get(t)
|
||||||
|
if t:
|
||||||
|
data['filters']['enums']['real_estate_type'].append(t)
|
||||||
|
|
||||||
|
if query.type == POSTS_TYPES.FURNISHED_RENT:
|
||||||
|
data['filters']['enums']['furnished'] = ['1']
|
||||||
|
elif query.type == POSTS_TYPES.RENT:
|
||||||
|
data['filters']['enums']['furnished'] = ['2']
|
||||||
|
|
||||||
|
data['filters']['keywords'] = {}
|
||||||
|
data['filters']['ranges'] = {}
|
||||||
|
|
||||||
|
if query.cost_max or query.cost_min:
|
||||||
|
data['filters']['ranges']['price'] = {}
|
||||||
|
|
||||||
|
if query.cost_max:
|
||||||
|
data['filters']['ranges']['price']['max'] = query.cost_max
|
||||||
|
|
||||||
|
if query.cost_min:
|
||||||
|
data['filters']['ranges']['price']['min'] = query.cost_min
|
||||||
|
|
||||||
|
if query.area_max or query.area_min:
|
||||||
|
data['filters']['ranges']['square'] = {}
|
||||||
|
if query.area_max:
|
||||||
|
data['filters']['ranges']['square']['max'] = query.area_max
|
||||||
|
|
||||||
|
if query.area_min:
|
||||||
|
data['filters']['ranges']['square']['min'] = query.area_min
|
||||||
|
|
||||||
|
if query.nb_rooms:
|
||||||
|
data['filters']['ranges']['rooms'] = {}
|
||||||
|
data['filters']['ranges']['rooms']['min'] = query.nb_rooms
|
||||||
|
|
||||||
|
data['filters']['location'] = {}
|
||||||
|
data['filters']['location']['city_zipcodes'] = []
|
||||||
|
|
||||||
|
for c in query.cities:
|
||||||
|
if c.backend == module_name:
|
||||||
|
_c = c.id.split(' ')
|
||||||
|
__c = {}
|
||||||
|
__c['city'] = _c[0]
|
||||||
|
__c['zipcode'] = _c[1]
|
||||||
|
__c['label'] = c.name
|
||||||
|
|
||||||
|
data['filters']['location']['city_zipcodes'].append(__c)
|
||||||
|
|
||||||
|
if len(query.advert_types) == 1:
|
||||||
|
if query.advert_types[0] == ADVERT_TYPES.PERSONAL:
|
||||||
|
data['owner_type'] = 'private'
|
||||||
|
elif query.advert_types[0] == ADVERT_TYPES.PROFESSIONAL:
|
||||||
|
data['owner_type'] = 'pro'
|
||||||
|
else:
|
||||||
|
data['owner_type'] = 'all'
|
||||||
|
|
||||||
|
data['limit'] = 100
|
||||||
|
data['limit_alu'] = 3
|
||||||
|
data['offset'] = 0
|
||||||
|
|
||||||
|
self.session.headers.update({"api_key": self.home.go().get_api_key()})
|
||||||
|
return self.api.go(data=json.dumps(data)).get_housing_list(query_type=query.type, data=data)
|
||||||
|
|
||||||
|
def get_housing(self, _id, obj=None):
|
||||||
|
return self.housing.go(_id=_id).get_housing(obj=obj)
|
||||||
|
|
||||||
|
def get_phone(self, _id):
|
||||||
|
api_key = self.housing.stay_or_go(_id=_id).get_api_key()
|
||||||
|
data = {'list_id': _id,
|
||||||
|
'app_id': 'leboncoin_web_utils',
|
||||||
|
'key': api_key,
|
||||||
|
'text': 1, }
|
||||||
|
return self.phone.go(data=data).get_phone()
|
BIN
modules/leboncoin/favicon.png
Normal file
BIN
modules/leboncoin/favicon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 766 B |
67
modules/leboncoin/module.py
Normal file
67
modules/leboncoin/module.py
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2014 Bezleputh
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
|
from woob.tools.backend import Module
|
||||||
|
from woob.capabilities.housing import (CapHousing, Housing, HousingPhoto)
|
||||||
|
from .browser import LeboncoinBrowser
|
||||||
|
from woob import __version__ as WOOB_VERSION
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['LeboncoinModule']
|
||||||
|
|
||||||
|
|
||||||
|
class LeboncoinModule(Module, CapHousing):
|
||||||
|
NAME = 'leboncoin'
|
||||||
|
DESCRIPTION = u'search house on leboncoin website'
|
||||||
|
MAINTAINER = u'Bezleputh'
|
||||||
|
EMAIL = 'carton_ben@yahoo.fr'
|
||||||
|
LICENSE = 'AGPLv3+'
|
||||||
|
VERSION = WOOB_VERSION
|
||||||
|
|
||||||
|
BROWSER = LeboncoinBrowser
|
||||||
|
|
||||||
|
def create_default_browser(self):
|
||||||
|
return self.create_browser()
|
||||||
|
|
||||||
|
def get_housing(self, _id):
|
||||||
|
return self.browser.get_housing(_id)
|
||||||
|
|
||||||
|
def fill_housing(self, housing, fields):
|
||||||
|
if 'phone' in fields:
|
||||||
|
housing.phone = self.browser.get_phone(housing.id)
|
||||||
|
fields.remove('phone')
|
||||||
|
|
||||||
|
if len(fields) > 0:
|
||||||
|
self.browser.get_housing(housing.id, housing)
|
||||||
|
|
||||||
|
return housing
|
||||||
|
|
||||||
|
def fill_photo(self, photo, fields):
|
||||||
|
if 'data' in fields and photo.url and not photo.data:
|
||||||
|
photo.data = self.browser.open(photo.url).content
|
||||||
|
return photo
|
||||||
|
|
||||||
|
def search_city(self, pattern):
|
||||||
|
return self.browser.get_cities(pattern)
|
||||||
|
|
||||||
|
def search_housings(self, query):
|
||||||
|
return self.browser.search_housings(query, self.name)
|
||||||
|
|
||||||
|
OBJECTS = {Housing: fill_housing, HousingPhoto: fill_photo}
|
301
modules/leboncoin/pages.py
Normal file
301
modules/leboncoin/pages.py
Normal file
@ -0,0 +1,301 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2014 Bezleputh
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from woob.browser.pages import HTMLPage, JsonPage, pagination
|
||||||
|
from woob.browser.elements import ItemElement, ListElement, method, DictElement
|
||||||
|
from woob.capabilities.base import Currency as BaseCurrency
|
||||||
|
from woob.browser.filters.standard import (CleanText, CleanDecimal, _Filter,
|
||||||
|
Env, DateTime, Format)
|
||||||
|
from woob.browser.filters.json import Dict
|
||||||
|
from woob.capabilities.housing import (City, Housing, HousingPhoto,
|
||||||
|
UTILITIES, ENERGY_CLASS, POSTS_TYPES,
|
||||||
|
ADVERT_TYPES, HOUSE_TYPES)
|
||||||
|
from woob.capabilities.base import NotAvailable
|
||||||
|
from woob.tools.capabilities.housing.housing import PricePerMeterFilter
|
||||||
|
|
||||||
|
from decimal import Decimal
|
||||||
|
from lxml import etree
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class PopDetail(_Filter):
|
||||||
|
def __init__(self, name, default=NotAvailable):
|
||||||
|
super(PopDetail, self).__init__(default)
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def __call__(self, item):
|
||||||
|
return item.env['details'].pop(self.name, self.default)
|
||||||
|
|
||||||
|
|
||||||
|
class CityListPage(HTMLPage):
|
||||||
|
|
||||||
|
def build_doc(self, content):
|
||||||
|
content = super(CityListPage, self).build_doc(content)
|
||||||
|
if content.getroot() is not None:
|
||||||
|
return content
|
||||||
|
return etree.Element("html")
|
||||||
|
|
||||||
|
@method
|
||||||
|
class get_cities(ListElement):
|
||||||
|
item_xpath = '//li'
|
||||||
|
|
||||||
|
class item(ItemElement):
|
||||||
|
klass = City
|
||||||
|
|
||||||
|
obj_id = Format('%s %s',
|
||||||
|
CleanText('./span[has-class("city")]'),
|
||||||
|
CleanText('./span[@class="zipcode"]'))
|
||||||
|
|
||||||
|
obj_name = Format('%s %s',
|
||||||
|
CleanText('./span[has-class("city")]'),
|
||||||
|
CleanText('./span[@class="zipcode"]'))
|
||||||
|
|
||||||
|
|
||||||
|
class HomePage(HTMLPage):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
HTMLPage.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
|
add_content = CleanText('(//body/script)[4]', replace=[('window.FLUX_STATE = ', '')])(self.doc) or '{}'
|
||||||
|
api_content = CleanText('(//body/script[@id="__NEXT_DATA__"])')(self.doc)
|
||||||
|
|
||||||
|
self.htmldoc = self.doc
|
||||||
|
self.api_content = json.loads(api_content)
|
||||||
|
self.doc = json.loads(add_content)
|
||||||
|
|
||||||
|
def get_api_key(self):
|
||||||
|
return Dict('runtimeConfig/API/KEY')(self.api_content)
|
||||||
|
|
||||||
|
|
||||||
|
class HousingListPage(JsonPage):
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
JsonPage.__init__(self, *args, **kwargs)
|
||||||
|
if 'ads' not in self.doc:
|
||||||
|
self.doc['ads'] = []
|
||||||
|
|
||||||
|
@pagination
|
||||||
|
@method
|
||||||
|
class get_housing_list(DictElement):
|
||||||
|
item_xpath = 'ads'
|
||||||
|
|
||||||
|
def next_page(self):
|
||||||
|
data = Env('data')(self)
|
||||||
|
if data['offset'] > self.page.doc['total_all']:
|
||||||
|
return
|
||||||
|
|
||||||
|
data['offset'] = data['offset'] + data['limit']
|
||||||
|
return requests.Request("POST", self.page.url, data=json.dumps(data))
|
||||||
|
|
||||||
|
class item(ItemElement):
|
||||||
|
klass = Housing
|
||||||
|
|
||||||
|
def parse(self, el):
|
||||||
|
self.env['details'] = {obj['key']: obj['value_label'] for obj in self.el['attributes']}
|
||||||
|
|
||||||
|
obj_id = Dict('list_id')
|
||||||
|
obj_url = Dict('url')
|
||||||
|
obj_type = Env('query_type')
|
||||||
|
|
||||||
|
obj_area = CleanDecimal(PopDetail('square',
|
||||||
|
default=0),
|
||||||
|
default=NotAvailable)
|
||||||
|
obj_rooms = CleanDecimal(PopDetail('rooms',
|
||||||
|
default=0),
|
||||||
|
default=NotAvailable)
|
||||||
|
|
||||||
|
def obj_GES(self):
|
||||||
|
ges = CleanText(PopDetail('ges', default='|'))(self)
|
||||||
|
return getattr(ENERGY_CLASS, ges[0], NotAvailable)
|
||||||
|
|
||||||
|
def obj_DPE(self):
|
||||||
|
dpe = CleanText(PopDetail('energy_rate', default='|'))(self)
|
||||||
|
return getattr(ENERGY_CLASS, dpe[0], NotAvailable)
|
||||||
|
|
||||||
|
def obj_house_type(self):
|
||||||
|
value = CleanText(PopDetail('real_estate_type'), default=' ')(self).lower()
|
||||||
|
if value == 'parking':
|
||||||
|
return HOUSE_TYPES.PARKING
|
||||||
|
elif value == 'appartement':
|
||||||
|
return HOUSE_TYPES.APART
|
||||||
|
elif value == 'maison':
|
||||||
|
return HOUSE_TYPES.HOUSE
|
||||||
|
elif value == 'terrain':
|
||||||
|
return HOUSE_TYPES.LAND
|
||||||
|
else:
|
||||||
|
return HOUSE_TYPES.OTHER
|
||||||
|
|
||||||
|
def obj_utilities(self):
|
||||||
|
value = CleanText(PopDetail('charges_included',
|
||||||
|
default='Non'),
|
||||||
|
default=NotAvailable)(self)
|
||||||
|
if value == "Oui":
|
||||||
|
return UTILITIES.INCLUDED
|
||||||
|
else:
|
||||||
|
return UTILITIES.EXCLUDED
|
||||||
|
|
||||||
|
def obj_advert_type(self):
|
||||||
|
line_pro = Dict('owner/type')(self)
|
||||||
|
if line_pro == u'pro':
|
||||||
|
return ADVERT_TYPES.PROFESSIONAL
|
||||||
|
else:
|
||||||
|
return ADVERT_TYPES.PERSONAL
|
||||||
|
|
||||||
|
obj_title = Dict('subject')
|
||||||
|
obj_cost = CleanDecimal(Dict('price/0', default=NotAvailable), default=Decimal(0))
|
||||||
|
obj_currency = BaseCurrency.get_currency(u'€')
|
||||||
|
obj_text = Dict('body')
|
||||||
|
obj_location = Dict('location/city_label')
|
||||||
|
obj_date = DateTime(Dict('first_publication_date'))
|
||||||
|
|
||||||
|
def obj_photos(self):
|
||||||
|
photos = []
|
||||||
|
for img in Dict('images/urls_large', default=[])(self):
|
||||||
|
photos.append(HousingPhoto(img))
|
||||||
|
return photos
|
||||||
|
|
||||||
|
def obj_type(self):
|
||||||
|
try:
|
||||||
|
breadcrumb = int(Dict('category_id')(self))
|
||||||
|
except ValueError:
|
||||||
|
breadcrumb = None
|
||||||
|
|
||||||
|
if breadcrumb == 11:
|
||||||
|
return POSTS_TYPES.SHARING
|
||||||
|
elif breadcrumb == 10:
|
||||||
|
|
||||||
|
isFurnished = CleanText(PopDetail('furnished', default=' '))(self)
|
||||||
|
|
||||||
|
if isFurnished.lower() == u'meublé':
|
||||||
|
return POSTS_TYPES.FURNISHED_RENT
|
||||||
|
else:
|
||||||
|
return POSTS_TYPES.RENT
|
||||||
|
else:
|
||||||
|
return POSTS_TYPES.SALE
|
||||||
|
|
||||||
|
obj_price_per_meter = PricePerMeterFilter()
|
||||||
|
obj_details = Env('details')
|
||||||
|
|
||||||
|
|
||||||
|
class HousingPage(HomePage):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
HomePage.__init__(self, *args, **kwargs)
|
||||||
|
self.doc = self.api_content["props"]["pageProps"]["ad"]
|
||||||
|
|
||||||
|
def get_api_key(self):
|
||||||
|
return Dict('runtimeConfig/API/KEY_JSON')(self.api_content)
|
||||||
|
|
||||||
|
@method
|
||||||
|
class get_housing(ItemElement):
|
||||||
|
klass = Housing
|
||||||
|
|
||||||
|
def parse(self, el):
|
||||||
|
self.env['details'] = {obj['key']: obj['value_label'] for obj in el['attributes']}
|
||||||
|
|
||||||
|
obj_id = Env('_id')
|
||||||
|
|
||||||
|
obj_area = CleanDecimal(PopDetail('square',
|
||||||
|
default=0),
|
||||||
|
default=NotAvailable)
|
||||||
|
obj_rooms = CleanDecimal(PopDetail('rooms',
|
||||||
|
default=0),
|
||||||
|
default=NotAvailable)
|
||||||
|
|
||||||
|
def obj_GES(self):
|
||||||
|
ges = CleanText(PopDetail('ges', default='|'))(self)
|
||||||
|
return getattr(ENERGY_CLASS, ges[0], NotAvailable)
|
||||||
|
|
||||||
|
def obj_DPE(self):
|
||||||
|
dpe = CleanText(PopDetail('energy_rate', default='|'))(self)
|
||||||
|
return getattr(ENERGY_CLASS, dpe[0], NotAvailable)
|
||||||
|
|
||||||
|
def obj_house_type(self):
|
||||||
|
value = CleanText(PopDetail('real_estate_type'), default=' ')(self).lower()
|
||||||
|
if value == 'parking':
|
||||||
|
return HOUSE_TYPES.PARKING
|
||||||
|
elif value == 'appartement':
|
||||||
|
return HOUSE_TYPES.APART
|
||||||
|
elif value == 'maison':
|
||||||
|
return HOUSE_TYPES.HOUSE
|
||||||
|
elif value == 'terrain':
|
||||||
|
return HOUSE_TYPES.LAND
|
||||||
|
else:
|
||||||
|
return HOUSE_TYPES.OTHER
|
||||||
|
|
||||||
|
def obj_utilities(self):
|
||||||
|
value = CleanText(PopDetail('charges_included',
|
||||||
|
default='Non'),
|
||||||
|
default=NotAvailable)(self)
|
||||||
|
if value == "Oui":
|
||||||
|
return UTILITIES.INCLUDED
|
||||||
|
else:
|
||||||
|
return UTILITIES.EXCLUDED
|
||||||
|
|
||||||
|
obj_title = Dict('subject')
|
||||||
|
obj_cost = CleanDecimal(Dict('price/0', default=NotAvailable), default=Decimal(0))
|
||||||
|
obj_currency = BaseCurrency.get_currency(u'€')
|
||||||
|
obj_text = Dict('body')
|
||||||
|
obj_location = Dict('location/city_label')
|
||||||
|
|
||||||
|
def obj_advert_type(self):
|
||||||
|
line_pro = Dict('owner/type')(self)
|
||||||
|
if line_pro == u'pro':
|
||||||
|
return ADVERT_TYPES.PROFESSIONAL
|
||||||
|
else:
|
||||||
|
return ADVERT_TYPES.PERSONAL
|
||||||
|
|
||||||
|
obj_date = DateTime(Dict('first_publication_date'))
|
||||||
|
|
||||||
|
def obj_photos(self):
|
||||||
|
photos = []
|
||||||
|
for img in Dict('images/urls_large', default=[])(self):
|
||||||
|
photos.append(HousingPhoto(img))
|
||||||
|
return photos
|
||||||
|
|
||||||
|
def obj_type(self):
|
||||||
|
try:
|
||||||
|
breadcrumb = int(Dict('category_id')(self))
|
||||||
|
except ValueError:
|
||||||
|
breadcrumb = None
|
||||||
|
|
||||||
|
if breadcrumb == 11:
|
||||||
|
return POSTS_TYPES.SHARING
|
||||||
|
elif breadcrumb == 10:
|
||||||
|
|
||||||
|
isFurnished = CleanText(PopDetail('furnished', default=' '))(self)
|
||||||
|
|
||||||
|
if isFurnished.lower() == u'meublé':
|
||||||
|
return POSTS_TYPES.FURNISHED_RENT
|
||||||
|
else:
|
||||||
|
return POSTS_TYPES.RENT
|
||||||
|
else:
|
||||||
|
return POSTS_TYPES.SALE
|
||||||
|
|
||||||
|
obj_price_per_meter = PricePerMeterFilter()
|
||||||
|
obj_url = Dict('url')
|
||||||
|
obj_details = Env('details')
|
||||||
|
|
||||||
|
|
||||||
|
class PhonePage(JsonPage):
|
||||||
|
def get_phone(self):
|
||||||
|
if Dict('utils/status')(self.doc) == u'OK':
|
||||||
|
return Dict('utils/phonenumber')(self.doc)
|
||||||
|
return NotAvailable
|
105
modules/leboncoin/test.py
Normal file
105
modules/leboncoin/test.py
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2014 Bezleputh
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from woob.tools.test import BackendTest
|
||||||
|
from woob.tools.value import Value
|
||||||
|
from woob.capabilities.housing import Query, POSTS_TYPES, ADVERT_TYPES
|
||||||
|
from woob.tools.capabilities.housing.housing_test import HousingTest
|
||||||
|
|
||||||
|
|
||||||
|
class LeboncoinTest(BackendTest, HousingTest):
|
||||||
|
MODULE = 'leboncoin'
|
||||||
|
|
||||||
|
FIELDS_ALL_HOUSINGS_LIST = [
|
||||||
|
"id", "type", "advert_type", "url", "title",
|
||||||
|
"currency", "utilities", "date", "location", "text"
|
||||||
|
]
|
||||||
|
FIELDS_ANY_HOUSINGS_LIST = [
|
||||||
|
"area",
|
||||||
|
"cost",
|
||||||
|
"price_per_meter",
|
||||||
|
"photos"
|
||||||
|
]
|
||||||
|
FIELDS_ALL_SINGLE_HOUSING = [
|
||||||
|
"id", "url", "type", "advert_type", "house_type", "title",
|
||||||
|
"cost", "currency", "utilities", "date", "location", "text",
|
||||||
|
"rooms", "details"
|
||||||
|
]
|
||||||
|
FIELDS_ANY_SINGLE_HOUSING = [
|
||||||
|
"area",
|
||||||
|
"GES",
|
||||||
|
"DPE",
|
||||||
|
"photos",
|
||||||
|
# Don't test phone as leboncoin API is strongly rate-limited
|
||||||
|
]
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
if not self.is_backend_configured():
|
||||||
|
self.backend.config['advert_type'] = Value(value='a')
|
||||||
|
self.backend.config['region'] = Value(value='ile_de_france')
|
||||||
|
|
||||||
|
def test_leboncoin_rent(self):
|
||||||
|
query = Query()
|
||||||
|
query.area_min = 20
|
||||||
|
query.cost_max = 1500
|
||||||
|
query.type = POSTS_TYPES.RENT
|
||||||
|
query.cities = []
|
||||||
|
for city in self.backend.search_city('paris'):
|
||||||
|
city.backend = self.backend.name
|
||||||
|
query.cities.append(city)
|
||||||
|
if len(query.cities) == 3:
|
||||||
|
break
|
||||||
|
self.check_against_query(query)
|
||||||
|
|
||||||
|
def test_leboncoin_sale(self):
|
||||||
|
query = Query()
|
||||||
|
query.area_min = 20
|
||||||
|
query.type = POSTS_TYPES.SALE
|
||||||
|
query.cities = []
|
||||||
|
for city in self.backend.search_city('paris'):
|
||||||
|
city.backend = self.backend.name
|
||||||
|
query.cities.append(city)
|
||||||
|
if len(query.cities) == 3:
|
||||||
|
break
|
||||||
|
self.check_against_query(query)
|
||||||
|
|
||||||
|
def test_leboncoin_furnished_rent(self):
|
||||||
|
query = Query()
|
||||||
|
query.area_min = 20
|
||||||
|
query.cost_max = 1500
|
||||||
|
query.type = POSTS_TYPES.FURNISHED_RENT
|
||||||
|
query.cities = []
|
||||||
|
for city in self.backend.search_city('paris'):
|
||||||
|
city.backend = self.backend.name
|
||||||
|
query.cities.append(city)
|
||||||
|
if len(query.cities) == 3:
|
||||||
|
break
|
||||||
|
self.check_against_query(query)
|
||||||
|
|
||||||
|
def test_leboncoin_professional(self):
|
||||||
|
query = Query()
|
||||||
|
query.area_min = 20
|
||||||
|
query.cost_max = 900
|
||||||
|
query.type = POSTS_TYPES.RENT
|
||||||
|
query.advert_types = [ADVERT_TYPES.PROFESSIONAL]
|
||||||
|
query.cities = []
|
||||||
|
for city in self.backend.search_city('paris'):
|
||||||
|
city.backend = self.backend.name
|
||||||
|
query.cities.append(city)
|
||||||
|
self.check_against_query(query)
|
24
modules/logicimmo/__init__.py
Normal file
24
modules/logicimmo/__init__.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2014 Bezleputh
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
|
from .module import LogicimmoModule
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['LogicimmoModule']
|
108
modules/logicimmo/browser.py
Normal file
108
modules/logicimmo/browser.py
Normal file
@ -0,0 +1,108 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2014 Bezleputh
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
|
from woob.browser import PagesBrowser, URL
|
||||||
|
from woob.browser.profiles import Firefox
|
||||||
|
from woob.capabilities.housing import (TypeNotSupported, POSTS_TYPES,
|
||||||
|
HOUSE_TYPES)
|
||||||
|
from .pages import CitiesPage, SearchPage, HousingPage, PhonePage
|
||||||
|
|
||||||
|
|
||||||
|
class LogicimmoBrowser(PagesBrowser):
|
||||||
|
BASEURL = 'https://www.logic-immo.com/'
|
||||||
|
PROFILE = Firefox()
|
||||||
|
city = URL('asset/t9/getLocalityT9.php\?site=fr&lang=fr&json=%22(?P<pattern>.*)%22',
|
||||||
|
CitiesPage)
|
||||||
|
search = URL('(?P<type>location-immobilier|vente-immobilier|recherche-colocation)-(?P<cities>.*)/options/(?P<options>.*)', SearchPage)
|
||||||
|
housing = URL('detail-(?P<_id>.*).htm', HousingPage)
|
||||||
|
phone = URL('(?P<urlcontact>.*)', PhonePage)
|
||||||
|
|
||||||
|
TYPES = {POSTS_TYPES.RENT: 'location-immobilier',
|
||||||
|
POSTS_TYPES.SALE: 'vente-immobilier',
|
||||||
|
POSTS_TYPES.SHARING: 'recherche-colocation',
|
||||||
|
POSTS_TYPES.FURNISHED_RENT: 'location-immobilier',
|
||||||
|
POSTS_TYPES.VIAGER: 'vente-immobilier'}
|
||||||
|
|
||||||
|
RET = {HOUSE_TYPES.HOUSE: '2',
|
||||||
|
HOUSE_TYPES.APART: '1',
|
||||||
|
HOUSE_TYPES.LAND: '3',
|
||||||
|
HOUSE_TYPES.PARKING: '10',
|
||||||
|
HOUSE_TYPES.OTHER: '14'}
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(LogicimmoBrowser, self).__init__(*args, **kwargs)
|
||||||
|
self.session.headers['X-Requested-With'] = 'XMLHttpRequest'
|
||||||
|
|
||||||
|
def get_cities(self, pattern):
|
||||||
|
if pattern:
|
||||||
|
return self.city.go(pattern=pattern).get_cities()
|
||||||
|
|
||||||
|
def search_housings(self, type, cities, nb_rooms, area_min, area_max, cost_min, cost_max, house_types):
|
||||||
|
if type not in self.TYPES:
|
||||||
|
raise TypeNotSupported()
|
||||||
|
|
||||||
|
options = []
|
||||||
|
|
||||||
|
ret = []
|
||||||
|
if type == POSTS_TYPES.VIAGER:
|
||||||
|
ret = ['15']
|
||||||
|
else:
|
||||||
|
for house_type in house_types:
|
||||||
|
if house_type in self.RET:
|
||||||
|
ret.append(self.RET.get(house_type))
|
||||||
|
|
||||||
|
if len(ret):
|
||||||
|
options.append('groupprptypesids=%s' % ','.join(ret))
|
||||||
|
|
||||||
|
if type == POSTS_TYPES.FURNISHED_RENT:
|
||||||
|
options.append('searchoptions=4')
|
||||||
|
|
||||||
|
options.append('pricemin=%s' % (cost_min if cost_min else '0'))
|
||||||
|
|
||||||
|
if cost_max:
|
||||||
|
options.append('pricemax=%s' % cost_max)
|
||||||
|
|
||||||
|
options.append('areamin=%s' % (area_min if area_min else '0'))
|
||||||
|
|
||||||
|
if area_max:
|
||||||
|
options.append('areamax=%s' % area_max)
|
||||||
|
|
||||||
|
if nb_rooms:
|
||||||
|
if type == POSTS_TYPES.SHARING:
|
||||||
|
options.append('nbbedrooms=%s' % ','.join([str(i) for i in range(nb_rooms, 7)]))
|
||||||
|
else:
|
||||||
|
options.append('nbrooms=%s' % ','.join([str(i) for i in range(nb_rooms, 7)]))
|
||||||
|
|
||||||
|
self.search.go(type=self.TYPES.get(type, 'location-immobilier'),
|
||||||
|
cities=cities,
|
||||||
|
options='/'.join(options))
|
||||||
|
|
||||||
|
if type == POSTS_TYPES.SHARING:
|
||||||
|
return self.page.iter_sharing()
|
||||||
|
|
||||||
|
return self.page.iter_housings(query_type=type)
|
||||||
|
|
||||||
|
def get_housing(self, _id, housing=None):
|
||||||
|
return self.housing.go(_id=_id).get_housing(obj=housing)
|
||||||
|
|
||||||
|
def get_phone(self, _id):
|
||||||
|
if _id.startswith('location') or _id.startswith('vente'):
|
||||||
|
urlcontact, params = self.housing.stay_or_go(_id=_id).get_phone_url_datas()
|
||||||
|
return self.phone.go(urlcontact=urlcontact, params=params).get_phone()
|
BIN
modules/logicimmo/favicon.png
Normal file
BIN
modules/logicimmo/favicon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.5 KiB |
100
modules/logicimmo/module.py
Normal file
100
modules/logicimmo/module.py
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright(C) 2014 Bezleputh
|
||||||
|
#
|
||||||
|
# This file is part of a woob module.
|
||||||
|
#
|
||||||
|
# This woob module is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This woob module is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
|
from woob.tools.backend import Module
|
||||||
|
from woob.capabilities.housing import (CapHousing, Housing, HousingPhoto,
|
||||||
|
ADVERT_TYPES)
|
||||||
|
from woob.capabilities.base import UserError
|
||||||
|
from woob import __version__ as WOOB_VERSION
|
||||||
|
from .browser import LogicimmoBrowser
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['LogicimmoModule']
|
||||||
|
|
||||||
|
|
||||||
|
class LogicImmoCitiesError(UserError):
|
||||||
|
"""
|
||||||
|
Raised when more than 3 cities are selected
|
||||||
|
"""
|
||||||
|
def __init__(self, msg='You cannot select more than three cities'):
|
||||||
|
UserError.__init__(self, msg)
|
||||||
|
|
||||||
|
|
||||||
|
class LogicimmoModule(Module, CapHousing):
|
||||||
|
NAME = 'logicimmo'
|
||||||
|
DESCRIPTION = u'logicimmo website'
|
||||||
|
MAINTAINER = u'Bezleputh'
|
||||||
|
EMAIL = 'carton_ben@yahoo.fr'
|
||||||
|
LICENSE = 'AGPLv3+'
|
||||||
|
VERSION = WOOB_VERSION
|
||||||
|
|
||||||
|
BROWSER = LogicimmoBrowser
|
||||||
|
|
||||||
|
def get_housing(self, housing):
|
||||||
|
if isinstance(housing, Housing):
|
||||||
|
id = housing.id
|
||||||
|
else:
|
||||||
|
id = housing
|
||||||
|
housing = None
|
||||||
|
housing = self.browser.get_housing(id, housing)
|
||||||
|
return housing
|
||||||
|
|
||||||
|
def search_city(self, pattern):
|
||||||
|
return self.browser.get_cities(pattern)
|
||||||
|
|
||||||
|
def search_housings(self, query):
|
||||||
|
if(len(query.advert_types) == 1 and
|
||||||
|
query.advert_types[0] == ADVERT_TYPES.PERSONAL):
|
||||||
|
# Logic-immo is pro only
|
||||||
|
return list()
|
||||||
|
|
||||||
|
cities_names = ['%s' % c.name.replace(' ', '-') for c in query.cities if c.backend == self.name]
|
||||||
|
cities_ids = ['%s' % c.id for c in query.cities if c.backend == self.name]
|
||||||
|
|
||||||
|
if len(cities_names) == 0:
|
||||||
|
return list()
|
||||||
|
|
||||||
|
if len(cities_names) > 3:
|
||||||
|
raise LogicImmoCitiesError()
|
||||||
|
|
||||||
|
cities = ','.join(cities_names + cities_ids)
|
||||||
|
return self.browser.search_housings(query.type, cities.lower(), query.nb_rooms,
|
||||||
|
query.area_min, query.area_max,
|
||||||
|
query.cost_min, query.cost_max,
|
||||||
|
query.house_types)
|
||||||
|
|
||||||
|
def fill_housing(self, housing, fields):
|
||||||
|
if 'phone' in fields:
|
||||||
|
housing.phone = self.browser.get_phone(housing.id)
|
||||||
|
fields.remove('phone')
|
||||||
|
|
||||||
|
if len(fields) > 0:
|
||||||
|
self.browser.get_housing(housing.id, housing)
|
||||||
|
|
||||||
|
return housing
|
||||||
|
|
||||||
|
def fill_photo(self, photo, fields):
|
||||||
|
if 'data' in fields and photo.url and not photo.data:
|
||||||
|
photo.data = self.browser.open(photo.url).content
|
||||||
|
return photo
|
||||||
|
|
||||||
|
OBJECTS = {Housing: fill_housing,
|
||||||
|
HousingPhoto: fill_photo,
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user