Compare commits
1 Commits
master
...
responsive
Author | SHA1 | Date | |
---|---|---|---|
7309b4da18 |
@ -343,16 +343,16 @@ max-args=5
|
|||||||
ignored-argument-names=_.*
|
ignored-argument-names=_.*
|
||||||
|
|
||||||
# Maximum number of locals for function / method body
|
# Maximum number of locals for function / method body
|
||||||
max-locals=25
|
max-locals=15
|
||||||
|
|
||||||
# Maximum number of return / yield for function / method body
|
# Maximum number of return / yield for function / method body
|
||||||
max-returns=6
|
max-returns=6
|
||||||
|
|
||||||
# Maximum number of branch for function / method body
|
# Maximum number of branch for function / method body
|
||||||
max-branches=20
|
max-branches=12
|
||||||
|
|
||||||
# Maximum number of statements in function / method body
|
# Maximum number of statements in function / method body
|
||||||
max-statements=100
|
max-statements=50
|
||||||
|
|
||||||
# Maximum number of parents for a class (see R0901).
|
# Maximum number of parents for a class (see R0901).
|
||||||
max-parents=7
|
max-parents=7
|
||||||
|
@ -1 +0,0 @@
|
|||||||
data
|
|
@ -1,10 +0,0 @@
|
|||||||
root = true
|
|
||||||
|
|
||||||
[*]
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 4
|
|
||||||
end_of_line = lf
|
|
||||||
insert_final_newline = true
|
|
||||||
|
|
||||||
[*.py]
|
|
||||||
max_line_length=120
|
|
@ -4,9 +4,6 @@
|
|||||||
"env": {
|
"env": {
|
||||||
"browser": true
|
"browser": true
|
||||||
},
|
},
|
||||||
"parserOptions": {
|
|
||||||
"ecmaVersion": 8
|
|
||||||
},
|
|
||||||
rules: {
|
rules: {
|
||||||
'indent': ["error", 4, { 'SwitchCase': 1 }],
|
'indent': ["error", 4, { 'SwitchCase': 1 }],
|
||||||
}
|
}
|
||||||
|
4
.gitignore
vendored
@ -7,7 +7,3 @@ config/
|
|||||||
node_modules
|
node_modules
|
||||||
flatisfy/web/static/assets
|
flatisfy/web/static/assets
|
||||||
data/
|
data/
|
||||||
doc/_build
|
|
||||||
data_rework/
|
|
||||||
.env
|
|
||||||
.htpasswd
|
|
||||||
|
@ -1,19 +0,0 @@
|
|||||||
before_script:
|
|
||||||
- "pip install -r requirements.txt"
|
|
||||||
- "pip install pylint"
|
|
||||||
- "curl -sL https://deb.nodesource.com/setup_10.x | bash -"
|
|
||||||
- "apt-get install -y nodejs jq"
|
|
||||||
- "npm install"
|
|
||||||
|
|
||||||
lint:
|
|
||||||
image: "python:3"
|
|
||||||
stage: "test"
|
|
||||||
script:
|
|
||||||
- "hooks/pre-commit"
|
|
||||||
|
|
||||||
test:
|
|
||||||
image: "python:3"
|
|
||||||
stage: "test"
|
|
||||||
script:
|
|
||||||
- python -m flatisfy init-config | jq '.constraints.default.house_types = ["APART"] | .constraints.default.type = "RENT" | .constraints.default.postal_codes = ["75014"]' > /tmp/config.json
|
|
||||||
- python -m flatisfy test --config /tmp/config.json
|
|
6
.vscode/extensions.json
vendored
@ -1,6 +0,0 @@
|
|||||||
{
|
|
||||||
"recommendations": [
|
|
||||||
"mtxr.sqltools",
|
|
||||||
"mtxr.sqltools-driver-sqlite"
|
|
||||||
]
|
|
||||||
}
|
|
15
.vscode/settings.json
vendored
@ -1,15 +0,0 @@
|
|||||||
{
|
|
||||||
"cSpell.words": [
|
|
||||||
"woob",
|
|
||||||
"flatisfy"
|
|
||||||
],
|
|
||||||
"sqltools.useNodeRuntime": true,
|
|
||||||
"sqltools.connections": [
|
|
||||||
{
|
|
||||||
"previewLimit": 50,
|
|
||||||
"driver": "SQLite",
|
|
||||||
"name": "flatisfy",
|
|
||||||
"database": "${workspaceFolder:flatisfy}/data/flatisfy.db"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
@ -1,16 +1,9 @@
|
|||||||
## TL;DR
|
## TL;DR
|
||||||
|
|
||||||
We have a [code of conduct](CodeOfConduct.md), please make sure to review it
|
|
||||||
prior to contributing.
|
|
||||||
|
|
||||||
* Branch off `master`.
|
* Branch off `master`.
|
||||||
* One feature per commit.
|
* One feature per commit.
|
||||||
* In case of changes request, amend your commit.
|
* In case of changes request, amend your commit.
|
||||||
|
|
||||||
You can either open issues / merge requests on [my
|
|
||||||
Gitlab](https://git.phyks.me/Phyks/flatisfy/) (preferred) or on the [Github
|
|
||||||
mirror](https://github.com/phyks/flatisfy).
|
|
||||||
|
|
||||||
|
|
||||||
## Useful infos
|
## Useful infos
|
||||||
|
|
||||||
@ -50,47 +43,5 @@ locale.
|
|||||||
WIP commits into a single one, to maintain the invariant of "one feature, one
|
WIP commits into a single one, to maintain the invariant of "one feature, one
|
||||||
commit".
|
commit".
|
||||||
|
|
||||||
Thanks!
|
|
||||||
|
|
||||||
|
|
||||||
## Adding support for a new Woob backend
|
|
||||||
|
|
||||||
To enable a new Woob `CapHousing` backend in Flatisfy, you should add it to
|
|
||||||
the list of available backends in
|
|
||||||
[flatisfy/fetch.py#L69-70](https://git.phyks.me/Phyks/flatisfy/blob/master/flatisfy/fetch.py#L69-70)
|
|
||||||
and update the list of `BACKEND_PRECEDENCES` for deduplication in
|
|
||||||
[flatisfy/filters/duplicates.py#L24-31](https://git.phyks.me/Phyks/flatisfy/blob/master/flatisfy/filters/duplicates.py#L24-31).
|
|
||||||
Thats' all!
|
|
||||||
|
|
||||||
|
|
||||||
## Adding new data files
|
|
||||||
|
|
||||||
If you want to add new data files, especially for public transportation stops
|
|
||||||
(to cover more cities), please follow these steps:
|
|
||||||
|
|
||||||
1. Download and put the **original** file in `flatisfy/data_files`. Please,
|
|
||||||
use the original data file to ease tracking licenses and be able to still
|
|
||||||
have a working pipeline, by letting the user download it and place it in
|
|
||||||
the right place, in case of license conflict.
|
|
||||||
2. Mention the added data file and its license in `README.md`, in the
|
|
||||||
dedicated section.
|
|
||||||
3. Write a preprocessing function in `flatisfy/data_files/__init__.py`. You
|
|
||||||
can have a look at the existing functions for a model.
|
|
||||||
|
|
||||||
|
|
||||||
## Adding new migrations
|
|
||||||
|
|
||||||
If you want to change the database schema, you should create a matching
|
|
||||||
migration. Here is the way to do it correctly:
|
|
||||||
|
|
||||||
1. First, edit the `flatisfy/models` files to create / remove the required
|
|
||||||
fields. If you create a new database from scratch, these are the files
|
|
||||||
which will be used.
|
|
||||||
2. Then, run `alembic revision -m "Some description"` in the root of the git
|
|
||||||
repo to create a new migration.
|
|
||||||
3. Finally, edit the newly created migration file under the `migrations/`
|
|
||||||
folder to add the required code to alter the database (both upgrade and
|
|
||||||
downgrade).
|
|
||||||
|
|
||||||
|
|
||||||
Thanks!
|
Thanks!
|
||||||
|
115
CodeOfConduct.md
@ -1,115 +0,0 @@
|
|||||||
# Code of conduct
|
|
||||||
|
|
||||||
This code of conduct outlines our expectations for participants within the
|
|
||||||
Flatisfy community, as well as steps to report unacceptable behavior. We are
|
|
||||||
committed to provide a welcoming and inspiring community for all and expect
|
|
||||||
our code of conduct to be honored. Anyone who violates this code of conduct
|
|
||||||
may be banned from the community.
|
|
||||||
|
|
||||||
Our open source community strives to:
|
|
||||||
|
|
||||||
- Be friendly and patient.
|
|
||||||
- Be welcoming: We strive to be a community that welcomes and supports people
|
|
||||||
of all backgrounds and identities. This includes, but is not limited to
|
|
||||||
members of any race, ethnicity, culture, national origin, colour,
|
|
||||||
immigration status, social and economic class, educational level, sex,
|
|
||||||
sexual orientation, gender identity and expression, age, size, family
|
|
||||||
status, political belief, religion, and mental and physical ability.
|
|
||||||
- Be considerate: Your work will be used by other people, and you in turn will
|
|
||||||
depend on the work of others. Any decision you take will affect users and
|
|
||||||
colleagues, and you should take those consequences into account when making
|
|
||||||
decisions. Remember that we’re a world-wide community, so you might not be
|
|
||||||
communicating in someone else’s primary language.
|
|
||||||
- Be respectful: Not all of us will agree all the time, but disagreement is no
|
|
||||||
excuse for poor behavior and poor manners. We might all experience some
|
|
||||||
frustration now and then, but we cannot allow that frustration to turn into
|
|
||||||
a personal attack. It’s important to remember that a community where people
|
|
||||||
feel uncomfortable or threatened is not a productive one.
|
|
||||||
- Be careful in the words that we choose. Be kind to others. Do not insult or
|
|
||||||
put down other participants. Harassment and other exclusionary behavior
|
|
||||||
aren’t acceptable.
|
|
||||||
- Try to understand why we disagree: Disagreements, both social and technical,
|
|
||||||
happen all the time. It is important that we resolve disagreements and
|
|
||||||
differing views constructively. Remember that we’re different. The strength
|
|
||||||
of our community comes from its diversity, people from a wide range of
|
|
||||||
backgrounds. Different people have different perspectives on issues. Being
|
|
||||||
unable to understand why someone holds a viewpoint doesn’t mean that they’re
|
|
||||||
wrong. Don’t forget that it is human to err and blaming each other doesn’t
|
|
||||||
get us anywhere. Instead, focus on helping to resolve issues and learning
|
|
||||||
from mistakes.
|
|
||||||
|
|
||||||
|
|
||||||
## Definitions
|
|
||||||
|
|
||||||
Harassment includes, but is not limited to:
|
|
||||||
|
|
||||||
- Offensive comments related to gender, gender identity and expression, sexual
|
|
||||||
orientation, disability, mental illness, neuro(a)typicality, physical
|
|
||||||
appearance, body size, race, age, regional discrimination, political or
|
|
||||||
religious affiliation
|
|
||||||
- Unwelcome comments regarding a person’s lifestyle choices and practices,
|
|
||||||
including those related to food, health, parenting, drugs, and employment
|
|
||||||
- Deliberate misgendering. This includes deadnaming or persistently using a
|
|
||||||
pronoun that does not correctly reflect a person’s gender identity. You must
|
|
||||||
address people by the name they give you when not addressing them by their
|
|
||||||
username or handle
|
|
||||||
- Threats of violence, both physical and psychological
|
|
||||||
- Incitement of violence towards any individual, including encouraging a
|
|
||||||
person to commit suicide or to engage in self-harm
|
|
||||||
- Deliberate intimidation
|
|
||||||
- Stalking or following
|
|
||||||
- Harassing photography or recording, including logging online activity for
|
|
||||||
harassment purposes
|
|
||||||
- Sustained disruption of discussion
|
|
||||||
- Unwelcome sexual attention, including gratuitous or off-topic sexual images
|
|
||||||
or behaviour
|
|
||||||
- Pattern of inappropriate social contact, such as requesting/assuming
|
|
||||||
inappropriate levels of intimacy with others
|
|
||||||
- Continued one-on-one communication after requests to cease
|
|
||||||
- Deliberate “outing” of any aspect of a person’s identity without their
|
|
||||||
consent except as necessary to protect others from intentional abuse
|
|
||||||
- Publication of non-harassing private communication
|
|
||||||
|
|
||||||
Our open source community prioritizes marginalized people’s safety over
|
|
||||||
privileged people’s comfort. We will not act on complaints regarding:
|
|
||||||
|
|
||||||
- ‘Reverse’ -isms, including ‘reverse racism,’ ‘reverse sexism,’ and ‘cisphobia’
|
|
||||||
- Reasonable communication of boundaries, such as “leave me alone,” “go away,” or “I’m not discussing this with you”
|
|
||||||
- Refusal to explain or debate social justice concepts
|
|
||||||
- Communicating in a ‘tone’ you don’t find congenial
|
|
||||||
- Criticizing racist, sexist, cissexist, or otherwise oppressive behavior or assumptions
|
|
||||||
|
|
||||||
|
|
||||||
## Diversity Statement
|
|
||||||
|
|
||||||
We encourage everyone to participate and are committed to building a community
|
|
||||||
for all. Although we will fail at times, we seek to treat everyone both as
|
|
||||||
fairly and equally as possible. Whenever a participant has made a mistake, we
|
|
||||||
expect them to take responsibility for it. If someone has been harmed or
|
|
||||||
offended, it is our responsibility to listen carefully and respectfully, and
|
|
||||||
do our best to right the wrong.
|
|
||||||
|
|
||||||
Although this list cannot be exhaustive, we explicitly honor diversity in age,
|
|
||||||
gender, gender identity or expression, culture, ethnicity, language, national
|
|
||||||
origin, political beliefs, profession, race, religion, sexual orientation,
|
|
||||||
socioeconomic status, and technical ability. We will not tolerate
|
|
||||||
discrimination based on any of the protected characteristics above, including
|
|
||||||
participants with disabilities.
|
|
||||||
|
|
||||||
## Reporting Issues
|
|
||||||
|
|
||||||
If you experience or witness unacceptable behavior—or have any other
|
|
||||||
concerns—please report it by contacting us via
|
|
||||||
phyks+[name of this project]@phyks dot me. All reports will be handled with
|
|
||||||
discretion. In your report please include:
|
|
||||||
|
|
||||||
- Your contact information.
|
|
||||||
- Names (real, nicknames, or pseudonyms) of any individuals involved. If there
|
|
||||||
are additional witnesses, please include them as well. Your account of what
|
|
||||||
occurred, and if you believe the incident is ongoing. If there is a publicly
|
|
||||||
available record (e.g. a mailing list archive or a public IRC logger),
|
|
||||||
please include a link.
|
|
||||||
- Any additional information that may be helpful.
|
|
||||||
|
|
||||||
We will respect confidentiality requests for the purpose of protecting victims
|
|
||||||
of abuse.
|
|
38
README.md
@ -1,25 +1,21 @@
|
|||||||
Flatisfy
|
Flatisfy
|
||||||
========
|
========
|
||||||
|
|
||||||
**PROJECT HAS MOVED: Please use
|
|
||||||
[Framagit](https://framagit.org/phyks/Flatisfy) to create issues on the
|
|
||||||
project. You can still create merge requests from either
|
|
||||||
[Github](https://github.com/phyks/flatisfy) or
|
|
||||||
[Framagit](https://framagit.org/phyks/Flatisfy).**
|
|
||||||
|
|
||||||
Flatisfy is your new companion to ease your search of a new housing :)
|
Flatisfy is your new companion to ease your search of a new housing :)
|
||||||
|
|
||||||
<script src="https://liberapay.com/Phyks/widgets/button.js"></script>
|
|
||||||
<noscript><a href="https://liberapay.com/Phyks/donate"><img alt="Donate using Liberapay" src="https://liberapay.com/assets/widgets/donate.svg"></a></noscript>
|
|
||||||
|
|
||||||
**Note**: This software is under heavy development at the moment, and the
|
**Note**: This software is under heavy development at the moment, and the
|
||||||
database schema could change at any time. Do not consider it as being
|
database schema could change at any time. Do not consider it as being
|
||||||
production ready. However, I am currently using it for my own housing search
|
production ready. However, I am currently using it for my own housing search
|
||||||
and it is working fine :)
|
and it is working fine :)
|
||||||
|
|
||||||
|
**Note**: For the moment, it requires [this MR on
|
||||||
|
Weboob](https://git.weboob.org/weboob/devel/merge_requests/31) which has not
|
||||||
|
yet been merged.
|
||||||
|
|
||||||
<img src="doc/img/home.png" width="45%"/> <img src="doc/img/home2.png" width="45%"/>
|
<img src="doc/img/home.png" width="45%"/> <img src="doc/img/home2.png" width="45%"/>
|
||||||
|
|
||||||
It uses [woob](https://gitlab.com/woob/woob/) to get all the housing posts on most of
|
It uses [Weboob](http://weboob.org/) to get all the housing posts on most of
|
||||||
the websites offering housings posts, and then offers a bunch of pipelines to
|
the websites offering housings posts, and then offers a bunch of pipelines to
|
||||||
filter and deduplicate the fetched housings.
|
filter and deduplicate the fetched housings.
|
||||||
|
|
||||||
@ -41,11 +37,7 @@ This code is not restricted to handling flats only!
|
|||||||
|
|
||||||
## Getting started
|
## Getting started
|
||||||
|
|
||||||
See the [getting started guide](doc/0.getting_started.md). If you want to give
|
See the [getting started guide](doc/0.getting_started.md).
|
||||||
it a try quickly, you can have a look at the [Docker image](doc/2.docker.md).
|
|
||||||
|
|
||||||
Documentation for the whole app is available
|
|
||||||
[online](https://doc.phyks.me/flatisfy/).
|
|
||||||
|
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
@ -85,7 +77,7 @@ which covers Paris. If you want to run the script using some other location,
|
|||||||
you might have to change these files by matching datasets.
|
you might have to change these files by matching datasets.
|
||||||
|
|
||||||
* [LaPoste Hexasmal](https://datanova.legroupe.laposte.fr/explore/dataset/laposte_hexasmal/?disjunctive.code_commune_insee&disjunctive.nom_de_la_commune&disjunctive.code_postal&disjunctive.libell_d_acheminement&disjunctive.ligne_5) for the list of cities and postal codes in France.
|
* [LaPoste Hexasmal](https://datanova.legroupe.laposte.fr/explore/dataset/laposte_hexasmal/?disjunctive.code_commune_insee&disjunctive.nom_de_la_commune&disjunctive.code_postal&disjunctive.libell_d_acheminement&disjunctive.ligne_5) for the list of cities and postal codes in France.
|
||||||
* [Navitia public transport datasets](https://navitia.opendatasoft.com/explore/?sort=modified&refine.geographicarea=France) for the list of subway/tram/bus stations with their positions in France. These are the `stops_fr-*.txt` files, extracted from the `NTFS` datasets for each region.
|
* [RATP stations](https://data.ratp.fr/explore/dataset/positions-geographiques-des-stations-du-reseau-ratp/table/?disjunctive.stop_name&disjunctive.code_postal&disjunctive.departement) for the list of subway stations with their positions in Paris and nearby areas.
|
||||||
|
|
||||||
Both datasets are licensed under the Open Data Commons Open Database License
|
Both datasets are licensed under the Open Data Commons Open Database License
|
||||||
(ODbL): https://opendatacommons.org/licenses/odbl/.
|
(ODbL): https://opendatacommons.org/licenses/odbl/.
|
||||||
@ -102,23 +94,9 @@ explicitly mentionned otherwise.
|
|||||||
See the `CONTRIBUTING.md` file for more infos.
|
See the `CONTRIBUTING.md` file for more infos.
|
||||||
|
|
||||||
|
|
||||||
## API
|
|
||||||
|
|
||||||
Your Flatisfy instance is accessible through an API. API documentation is
|
|
||||||
available
|
|
||||||
[here](https://doc.phyks.me/flatisfy/flatisfy.web.routes.html#module-flatisfy.web.routes.api).
|
|
||||||
|
|
||||||
|
|
||||||
## Getting help
|
|
||||||
|
|
||||||
Feel free to open issues. An IRC channel is available at [irc://irc.freenode.net/flatisfy](irc://irc.freenode.net/flatisfy) as well.
|
|
||||||
|
|
||||||
|
|
||||||
## Thanks
|
## Thanks
|
||||||
|
|
||||||
* [Woob](https://gitlab.com/woob/woob/). Note that this is actually the only and best
|
* [Weboob](http://weboob.org/)
|
||||||
software out there to scrape housing posts online. Using it in Flatisfy does
|
|
||||||
not mean the authors of Flatisfy endorse Woob authors' views.
|
|
||||||
* The OpenData providers listed above!
|
* The OpenData providers listed above!
|
||||||
* Navitia for their really cool public transportation API.
|
* Navitia for their really cool public transportation API.
|
||||||
* A lots of Python modules, required for this script (see `requirements.txt`).
|
* A lots of Python modules, required for this script (see `requirements.txt`).
|
||||||
|
74
alembic.ini
@ -1,74 +0,0 @@
|
|||||||
# A generic, single database configuration.
|
|
||||||
|
|
||||||
[alembic]
|
|
||||||
# path to migration scripts
|
|
||||||
script_location = migrations
|
|
||||||
|
|
||||||
# template used to generate migration files
|
|
||||||
# file_template = %%(rev)s_%%(slug)s
|
|
||||||
|
|
||||||
# timezone to use when rendering the date
|
|
||||||
# within the migration file as well as the filename.
|
|
||||||
# string value is passed to dateutil.tz.gettz()
|
|
||||||
# leave blank for localtime
|
|
||||||
# timezone =
|
|
||||||
|
|
||||||
# max length of characters to apply to the
|
|
||||||
# "slug" field
|
|
||||||
#truncate_slug_length = 40
|
|
||||||
|
|
||||||
# set to 'true' to run the environment during
|
|
||||||
# the 'revision' command, regardless of autogenerate
|
|
||||||
# revision_environment = false
|
|
||||||
|
|
||||||
# set to 'true' to allow .pyc and .pyo files without
|
|
||||||
# a source .py file to be detected as revisions in the
|
|
||||||
# versions/ directory
|
|
||||||
# sourceless = false
|
|
||||||
|
|
||||||
# version location specification; this defaults
|
|
||||||
# to migrations/versions. When using multiple version
|
|
||||||
# directories, initial revisions must be specified with --version-path
|
|
||||||
# version_locations = %(here)s/bar %(here)s/bat migrations/versions
|
|
||||||
|
|
||||||
# the output encoding used when revision files
|
|
||||||
# are written from script.py.mako
|
|
||||||
# output_encoding = utf-8
|
|
||||||
|
|
||||||
sqlalchemy.url = sqlite:///data/flatisfy.db
|
|
||||||
|
|
||||||
|
|
||||||
# Logging configuration
|
|
||||||
[loggers]
|
|
||||||
keys = root,sqlalchemy,alembic
|
|
||||||
|
|
||||||
[handlers]
|
|
||||||
keys = console
|
|
||||||
|
|
||||||
[formatters]
|
|
||||||
keys = generic
|
|
||||||
|
|
||||||
[logger_root]
|
|
||||||
level = WARN
|
|
||||||
handlers = console
|
|
||||||
qualname =
|
|
||||||
|
|
||||||
[logger_sqlalchemy]
|
|
||||||
level = WARN
|
|
||||||
handlers =
|
|
||||||
qualname = sqlalchemy.engine
|
|
||||||
|
|
||||||
[logger_alembic]
|
|
||||||
level = INFO
|
|
||||||
handlers =
|
|
||||||
qualname = alembic
|
|
||||||
|
|
||||||
[handler_console]
|
|
||||||
class = StreamHandler
|
|
||||||
args = (sys.stderr,)
|
|
||||||
level = NOTSET
|
|
||||||
formatter = generic
|
|
||||||
|
|
||||||
[formatter_generic]
|
|
||||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
|
||||||
datefmt = %H:%M:%S
|
|
@ -1,65 +1,30 @@
|
|||||||
Getting started
|
Getting started
|
||||||
===============
|
===============
|
||||||
|
|
||||||
|
**Important**: Flatisfy relies on [Weboob](http://weboob.org/) to fetch
|
||||||
|
housing posts from housing websites. Then, you should install the [`devel`
|
||||||
|
branch](https://git.weboob.org/weboob/devel/) and update it regularly,
|
||||||
|
especially if Flatisfy suddenly stops fetching housing posts.
|
||||||
|
|
||||||
## Dependency on Woob
|
**Note**: For the moment, it requires [this MR on
|
||||||
|
Weboob](https://git.weboob.org/weboob/devel/merge_requests/31) which has not
|
||||||
**Important**: Flatisfy relies on [Woob](https://gitlab.com/woob/woob/) to fetch
|
yet been merged.
|
||||||
housing posts from housing websites.
|
|
||||||
|
|
||||||
If you `pip install -r requirements.txt` it will install the latest
|
|
||||||
development version of [Woob](https://gitlab.com/woob/woob/) and the
|
|
||||||
[Woob modules](https://gitlab.com/woob/modules/), which should be the
|
|
||||||
best version available out there. You should update these packages regularly,
|
|
||||||
as they evolve quickly.
|
|
||||||
|
|
||||||
Woob is made of two parts: a core and modules (which is the actual code
|
|
||||||
fetching data from websites). Modules tend to break often and are then updated
|
|
||||||
often, you should keep them up to date. This can be done by installing and
|
|
||||||
upgrading the packages listed in the `requirements.txt` and using the default
|
|
||||||
configuration.
|
|
||||||
|
|
||||||
This is a safe default configuration. However, a better option is usually to
|
|
||||||
clone [Woob git repo](https://gitlab.com/woob/woob/) somewhere, on
|
|
||||||
your disk, to point `modules_path` configuration option to
|
|
||||||
`path_to_woob_git/modules` (see the configuration section below) and to run
|
|
||||||
a `git pull; python setup.py install` in the Woob git repo often.
|
|
||||||
|
|
||||||
A copy of the Woob modules is available in the `modules` directory at the
|
|
||||||
root of this repository, you can use `"modules_path": "/path/to/flatisfy/modules"` to use them.
|
|
||||||
This copy may or may not be more up to date than the current state of official
|
|
||||||
Woob modules. Some changes are made there, which are not backported
|
|
||||||
upstream. Woob official modules are not synced in the `modules` folder on a
|
|
||||||
regular basis, so try both and see which ones match your needs! :)
|
|
||||||
|
|
||||||
|
|
||||||
## TL;DR
|
## TL;DR
|
||||||
|
|
||||||
An alternative method is available using Docker. See [2.docker.md](2.docker.md).
|
|
||||||
|
|
||||||
1. Clone the repository.
|
1. Clone the repository.
|
||||||
2. Install required Python modules: `pip install -r requirements.txt`.
|
2. Install required Python modules: `pip install -r requirements.txt`.
|
||||||
3. Init a configuration file: `python -m flatisfy init-config > config.json`.
|
3. Init a configuration file: `python -m flatisfy init-config > config.json`.
|
||||||
Edit it according to your needs (see below).
|
Edit it according to your needs (see below).
|
||||||
4. Build the required data files:
|
4. Build the required data files:
|
||||||
`python -m flatisfy build-data --config config.json`.
|
`python -m flatisfy build-data --config config.json`.
|
||||||
5. You can now run `python -m flatisfy import --config config.json` to fetch
|
5. Use it to `fetch` (and output a filtered JSON list of flats) or `import`
|
||||||
available flats, filter them and import everything in a SQLite database,
|
(into an SQLite database, for the web visualization) a list of flats
|
||||||
usable with the web visualization.
|
matching your criteria.
|
||||||
6. Install JS libraries and build the webapp:
|
6. Install JS libraries and build the webapp:
|
||||||
`npm install && npm run build:dev` (use `build:prod` in production).
|
`npm install && npm run build:dev` (use `build:prod` in production).
|
||||||
7. Use `python -m flatisfy serve --config config.json` to serve the web app.
|
7. Use `python -m flatisfy serve --config config.json` to serve the web app.
|
||||||
|
|
||||||
_Note_: `Flatisfy` requires an up-to-date Node version. You can find
|
|
||||||
instructions on the [NodeJS website](https://nodejs.org/en/) to install latest
|
|
||||||
LTS version.
|
|
||||||
|
|
||||||
_Note_: Alternatively, you can `python -m flatisfy fetch --config config.json`
|
|
||||||
to fetch available flats, filter them and output them as a filtered JSON list
|
|
||||||
(the web visualization will not be able to display them). This is mainly
|
|
||||||
useful if you plan in integrating Flatisfy in your own pipeline.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Available commands
|
## Available commands
|
||||||
|
|
||||||
@ -76,30 +41,6 @@ The available commands are:
|
|||||||
* `serve` to serve the built-in webapp with the development server. Do not use
|
* `serve` to serve the built-in webapp with the development server. Do not use
|
||||||
in production.
|
in production.
|
||||||
|
|
||||||
_Note:_ Fetching flats can be quite long and take up to a few minutes. This
|
|
||||||
should be better optimized. To get a verbose output and have an hint about the
|
|
||||||
progress, use the `-v` argument. It can remain stuck at "Loading flats for
|
|
||||||
constraint XXX...", which simply means it is fetching flats (using Woob
|
|
||||||
under the hood) and this step can be super long if there are lots of flats to
|
|
||||||
fetch. If this happens to you, you can set `max_entries` in your config to
|
|
||||||
limit the number of flats to fetch.
|
|
||||||
|
|
||||||
|
|
||||||
### Common arguments
|
|
||||||
|
|
||||||
You can pass some command-line arguments to Flatisfy commands, common to all the available commands. These are
|
|
||||||
|
|
||||||
* `--help`/`-h` to get some help message about the current command.
|
|
||||||
* `--data-dir DIR` to overload the `data_directory` value from config.
|
|
||||||
* `--config CONFIG` to use the config file located at `CONFIG`.
|
|
||||||
* `--passes [0, 1, 2, 3]` to overload the `passes` value from config.
|
|
||||||
* `--max-entries N` to overload the `max_entries` value from config.
|
|
||||||
* `-v` to enable verbose output.
|
|
||||||
* `-vv` to enable debug output.
|
|
||||||
* `--constraints` to specify a list of constraints to use (e.g. to restrict
|
|
||||||
import to a subset of available constraints from the config). This list
|
|
||||||
should be passed as a comma-separated list.
|
|
||||||
|
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
@ -117,15 +58,9 @@ List of configuration options:
|
|||||||
means that it will store the database in the default location, in
|
means that it will store the database in the default location, in
|
||||||
`data_directory`.
|
`data_directory`.
|
||||||
* `navitia_api_key` is an API token for [Navitia](https://www.navitia.io/)
|
* `navitia_api_key` is an API token for [Navitia](https://www.navitia.io/)
|
||||||
which is required to compute travel times for `PUBLIC_TRANSPORT` mode.
|
which is required to compute travel times.
|
||||||
* `mapbox_api_key` is an API token for [Mapbox](http://mapbox.com/)
|
* `modules_path` is the path to the Weboob modules. It can be `None` if you
|
||||||
which is required to compute travel times for `WALK`, `BIKE` and `CAR`
|
want Weboob to use the locally pip-installed modules (default value).
|
||||||
modes.
|
|
||||||
* `modules_path` is the path to the Woob modules. It can be `null` if you
|
|
||||||
want Woob to use the locally installed [Woob
|
|
||||||
modules](https://gitlab.com/woob/modules/), which you should install
|
|
||||||
yourself. This is the default value. If it is a string, it should be an
|
|
||||||
absolute path to the folder containing Woob modules.
|
|
||||||
* `port` is the port on which the development webserver should be
|
* `port` is the port on which the development webserver should be
|
||||||
listening (default to `8080`).
|
listening (default to `8080`).
|
||||||
* `host` is the host on which the development webserver should be listening
|
* `host` is the host on which the development webserver should be listening
|
||||||
@ -133,21 +68,8 @@ List of configuration options:
|
|||||||
* `webserver` is a server to use instead of the default Bottle built-in
|
* `webserver` is a server to use instead of the default Bottle built-in
|
||||||
webserver, see [Bottle deployment
|
webserver, see [Bottle deployment
|
||||||
doc](http://bottlepy.org/docs/dev/deployment.html).
|
doc](http://bottlepy.org/docs/dev/deployment.html).
|
||||||
* `backends` is a list of Woob backends to enable. It defaults to any
|
* `backends` is a list of Weboob backends to enable. It defaults to any
|
||||||
available and supported Woob backend.
|
available and supported Weboob backend.
|
||||||
* `store_personal_data` is a boolean indicated whether or not Flatisfy should
|
|
||||||
fetch personal data from housing posts and store them in database. Such
|
|
||||||
personal data include contact phone number for instance. By default,
|
|
||||||
Flatisfy does not store such personal data.
|
|
||||||
* `max_distance_housing_station` is the maximum distance (in meters) between
|
|
||||||
an housing and a public transport station found for this housing (default is
|
|
||||||
`1500`). This is useful to avoid false-positive.
|
|
||||||
* `duplicate_threshold` is the minimum score in the deep duplicate detection
|
|
||||||
step to consider two flats as being duplicates (defaults to `15`).
|
|
||||||
* `serve_images_locally` lets you download all the images from the housings
|
|
||||||
websites when importing the posts. Then, all your Flatisfy works standalone,
|
|
||||||
serving the local copy of the images instead of fetching the images from the
|
|
||||||
remote websites every time you look through the fetched housing posts.
|
|
||||||
|
|
||||||
_Note:_ In production, you can either use the `serve` command with a reliable
|
_Note:_ In production, you can either use the `serve` command with a reliable
|
||||||
webserver instead of the default Bottle webserver (specifying a `webserver`
|
webserver instead of the default Bottle webserver (specifying a `webserver`
|
||||||
@ -160,16 +82,15 @@ You should specify some constraints to filter the resulting housings list,
|
|||||||
under the `constraints` key. The available constraints are:
|
under the `constraints` key. The available constraints are:
|
||||||
|
|
||||||
* `type` is the type of housing you want, either `RENT` (to rent), `SALE` (to
|
* `type` is the type of housing you want, either `RENT` (to rent), `SALE` (to
|
||||||
buy), `SHARING` (for a shared housing), `FURNISHED_RENT` (for a furnished
|
buy) or `SHARING` (for a shared housing).
|
||||||
rent), `VIAGER` (for a viager, lifetime sale).
|
* `housing_types` is a list of house types you are looking for. Values can be
|
||||||
* `house_types` is a list of house types you are looking for. Values can be
|
|
||||||
`APART` (flat), `HOUSE`, `PARKING`, `LAND`, `OTHER` (everything else) or
|
`APART` (flat), `HOUSE`, `PARKING`, `LAND`, `OTHER` (everything else) or
|
||||||
`UNKNOWN` (anything which was not matched with one of the previous
|
`UNKNOWN` (anything which was not matched with one of the previous
|
||||||
categories).
|
categories).
|
||||||
* `area` (in m²), `bedrooms`, `cost` (in currency unit), `rooms`: this is a
|
* `area` (in m²), `bedrooms`, `cost` (in currency unit), `rooms`: this is a
|
||||||
tuple of `(min, max)` values, defining an interval in which the value should
|
tuple of `(min, max)` values, defining an interval in which the value should
|
||||||
lie. A `null` value means that any value is within this bound.
|
lie. A `null` value means that any value is within this bound.
|
||||||
* `postal_codes` (as strings) is a list of postal codes. You should include any postal code
|
* `postal_codes` is a list of postal codes. You should include any postal code
|
||||||
you want, and especially the postal codes close to the precise location you
|
you want, and especially the postal codes close to the precise location you
|
||||||
want.
|
want.
|
||||||
* `time_to` is a dictionary of places to compute travel time to them.
|
* `time_to` is a dictionary of places to compute travel time to them.
|
||||||
@ -179,7 +100,6 @@ under the `constraints` key. The available constraints are:
|
|||||||
"time_to": {
|
"time_to": {
|
||||||
"foobar": {
|
"foobar": {
|
||||||
"gps": [LAT, LNG],
|
"gps": [LAT, LNG],
|
||||||
"mode": A transport mode,
|
|
||||||
"time": [min, max]
|
"time": [min, max]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -188,30 +108,7 @@ under the `constraints` key. The available constraints are:
|
|||||||
means that the housings must be between the `min` and `max` bounds (possibly
|
means that the housings must be between the `min` and `max` bounds (possibly
|
||||||
`null`) from the place identified by the GPS coordinates `LAT` and `LNG`
|
`null`) from the place identified by the GPS coordinates `LAT` and `LNG`
|
||||||
(latitude and longitude), and we call this place `foobar` in human-readable
|
(latitude and longitude), and we call this place `foobar` in human-readable
|
||||||
form. `mode` should be either `PUBLIC_TRANSPORT`, `WALK`, `BIKE` or `CAR`.
|
form. Beware that `time` constraints are in **seconds**.
|
||||||
Beware that `time` constraints are in **seconds**. You should take
|
|
||||||
some margin as the travel time computation is done with found nearby public
|
|
||||||
transport stations, which is only a rough estimate of the flat position. For
|
|
||||||
`PUBLIC_TRANSPORT` the travel time is computed assuming a route the next
|
|
||||||
Monday at 8am.
|
|
||||||
* `minimum_nb_photos` lets you filter out posts with less than this number of
|
|
||||||
photos.
|
|
||||||
* `description_should_contain` lets you specify a list of terms that should
|
|
||||||
be present in the posts descriptions. Typically, if you expect "parking" to
|
|
||||||
be in all the posts Flatisfy fetches for you, you can set
|
|
||||||
`description_should_contain: ["parking"]`. You can also use list of terms
|
|
||||||
which acts as an "or" operation. For example, if you are looking for a flat
|
|
||||||
with a parking and with either a balcony or a terrace, you can use
|
|
||||||
`description_should_contain: ["parking", ["balcony", "terrace"]]`
|
|
||||||
* `description_should_not_contain` lets you specify a list of terms that should
|
|
||||||
never occur in the posts descriptions. Typically, if you wish to avoid
|
|
||||||
"coloc" in the posts Flatisfy fetches for you, you can set
|
|
||||||
`description_should_not_contain: ["coloc"]`.
|
|
||||||
|
|
||||||
|
|
||||||
You can think of constraints as "a set of criterias to filter out flats". You
|
|
||||||
can specify as many constraints as you want, in the configuration file,
|
|
||||||
provided that you name each of them uniquely.
|
|
||||||
|
|
||||||
|
|
||||||
## Building the web assets
|
## Building the web assets
|
||||||
@ -219,30 +116,4 @@ provided that you name each of them uniquely.
|
|||||||
If you want to build the web assets, you can use `npm run build:dev`
|
If you want to build the web assets, you can use `npm run build:dev`
|
||||||
(respectively `npm run watch:dev` to build continuously and monitor changes in
|
(respectively `npm run watch:dev` to build continuously and monitor changes in
|
||||||
source files). You can use `npm run build:prod` (`npm run watch:prod`) to do
|
source files). You can use `npm run build:prod` (`npm run watch:prod`) to do
|
||||||
the same in production mode (main difference is minification of generated
|
the same in production mode (with minification etc).
|
||||||
source code).
|
|
||||||
|
|
||||||
**Note**: If `npm run build:prod` fails, you can fall back to `npm run
|
|
||||||
build:dev` safely.
|
|
||||||
|
|
||||||
|
|
||||||
## Upgrading
|
|
||||||
|
|
||||||
To update the app, you can simply `git pull` the latest version. The database
|
|
||||||
schema might change from time to time. Here is how to update it automatically:
|
|
||||||
|
|
||||||
* First, edit the `alembic.ini` file and ensure the `sqlalchemy.url` entry
|
|
||||||
points to the database URI you are actually using for Flatisfy.
|
|
||||||
* Then, run `alembic upgrade head` to run the required migrations.
|
|
||||||
|
|
||||||
## Misc
|
|
||||||
|
|
||||||
### Other tools more or less connected with Flatisfy
|
|
||||||
|
|
||||||
+ [ZipAround](https://github.com/guix77/ziparound) generates a list of ZIP codes centered on a city name, within a radius of N kilometers and within a certain travel time by car (France only). You can invoke it with:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
npm ziparound
|
|
||||||
# or alternatively
|
|
||||||
npm ziparound --code 75001 --distance 3
|
|
||||||
```
|
|
||||||
|
@ -20,6 +20,9 @@ virtualenv .env && source .env/bin/activate
|
|||||||
# Install required Python modules
|
# Install required Python modules
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
# Clone and install weboob
|
||||||
|
git clone https://git.weboob.org/weboob/devel weboob && cd weboob && python setup.py install && cd ..
|
||||||
|
|
||||||
# Install required JS libraries and build the webapp
|
# Install required JS libraries and build the webapp
|
||||||
npm install && npm run build:prod
|
npm install && npm run build:prod
|
||||||
|
|
||||||
@ -39,19 +42,13 @@ python -m flatisfy build-data --config config/config.json -v
|
|||||||
python -m flatisfy import --config config/config.json -v
|
python -m flatisfy import --config config/config.json -v
|
||||||
```
|
```
|
||||||
|
|
||||||
_Note 1_: In the config, you should set `data_directory` to the absolute path of
|
_Note_: In the config, you should set `data_directory` to the absolute path of
|
||||||
the `data` directory created below. This directory should be writable by the
|
the `data` directory created below. This directory should be writable by the
|
||||||
user running Flatisfy. You should also set `modules_path` to the absolute path
|
user running Flatisfy. You should also set `modules_path` to the absolute path
|
||||||
to the `modules` folder under the previous `woob` clone. Finally, the last
|
to the `modules` folder under the previous `weboob` clone. Finally, the last
|
||||||
`import` command can be `cron`-tasked to automatically fetch available
|
`import` command can be `cron`-tasked to automatically fetch available
|
||||||
housings posts periodically.
|
housings posts periodically.
|
||||||
|
|
||||||
_Note 2_: As of 2019-03-13, building the webapp requires libpng-dev to be able to build pngquant-bin. On Debian Stretch (tested with Node v11.11.0):
|
|
||||||
|
|
||||||
sudo apt install libpng-dev
|
|
||||||
|
|
||||||
_Note 3_: If `npm run build:prod` fails, you can fall back to `npm run
|
|
||||||
build:dev` safely.
|
|
||||||
|
|
||||||
## Use an alternative Bottle backend (production)
|
## Use an alternative Bottle backend (production)
|
||||||
|
|
||||||
@ -224,26 +221,3 @@ setup. You should also set the `.htpasswd` file with users and credentials.
|
|||||||
_Note_: This vhost is really minimalistic and you should adapt it to your
|
_Note_: This vhost is really minimalistic and you should adapt it to your
|
||||||
setup, enforce SSL ciphers for increased security and do such good practices
|
setup, enforce SSL ciphers for increased security and do such good practices
|
||||||
things.
|
things.
|
||||||
|
|
||||||
### If database is in read only
|
|
||||||
|
|
||||||
In the case of you have a "flatisfy" user, and another user runs the webserver, for instance "www-data", you should have problems with the webapp reading, but not writing, the database. Workaround (Debian):
|
|
||||||
|
|
||||||
Add www-data in flatisfy group:
|
|
||||||
|
|
||||||
sudo usermod -a -G flatisfy www-data
|
|
||||||
|
|
||||||
Chmod data dir + DB file:
|
|
||||||
|
|
||||||
sudo chmod 775 data
|
|
||||||
sudo chmod 664 data/flatisfy.db
|
|
||||||
|
|
||||||
Edit /etc/uwsgi/apps-available/flatisfy.ini and add:
|
|
||||||
|
|
||||||
chmod-socket = 664
|
|
||||||
|
|
||||||
Restart:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
systemctl restart uwsgi
|
|
||||||
```
|
|
||||||
|
@ -1,51 +0,0 @@
|
|||||||
Installing Flatisfy using Docker
|
|
||||||
================================
|
|
||||||
|
|
||||||
A basic `Dockerfile` is available for rapid testing. It is still really hacky
|
|
||||||
and should not be used in production.
|
|
||||||
|
|
||||||
|
|
||||||
1\. First, build the docker image:
|
|
||||||
|
|
||||||
```
|
|
||||||
cd docker
|
|
||||||
docker build -t phyks/flatisfy .
|
|
||||||
```
|
|
||||||
|
|
||||||
2\. Then, create some folder to store your Flatisfy data in a permanent way (it
|
|
||||||
will be mount as a Docker volume in next steps), and initialize an empty
|
|
||||||
config:
|
|
||||||
|
|
||||||
```
|
|
||||||
mkdir flatisfy
|
|
||||||
cd flatisfy
|
|
||||||
FLATISFY_VOLUME=$(pwd)
|
|
||||||
docker run --rm -it -e LOCAL_USER_ID=`id -u` -v $FLATISFY_VOLUME:/flatisfy phyks/flatisfy sh -c "cd /home/user/app && python -m flatisfy init-config > /flatisfy/config.json"
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
3\. Then, edit the generated `$FLATISFY_VOLUME/config.json` file according to your needs. See
|
|
||||||
[0.getting_started.md](0.getting_started.md) for more infos on the
|
|
||||||
configuration file format. You will have to define your constraints (at
|
|
||||||
least postal codes, house type and type of post), set `data_directory` to
|
|
||||||
`/flatisfy` and set `host` to `0.0.0.0` to make the web UI accessible from
|
|
||||||
outside the Docker container. The rest is up to you.
|
|
||||||
|
|
||||||
|
|
||||||
4\. Finally, run the docker image to fetch flats and serve the web UI:
|
|
||||||
|
|
||||||
```
|
|
||||||
docker run -it -e LOCAL_USER_ID=`id -u` -v $FLATISFY_VOLUME:/flatisfy -p 8080:8080 phyks/flatisfy
|
|
||||||
```
|
|
||||||
|
|
||||||
Your Flatisfy instance is now available at `localhost:8080`!
|
|
||||||
|
|
||||||
|
|
||||||
To fetch new housing posts, you should manually call
|
|
||||||
|
|
||||||
```
|
|
||||||
docker run --rm -it -e LOCAL_USER_ID=`id -u` -v $FLATISFY_VOLUME:/flatisfy phyks/flatisfy /home/user/fetch.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
This can be done easily in a crontask on your host system, to run it typically
|
|
||||||
every night.
|
|
36
doc/3.faq.md
@ -1,36 +0,0 @@
|
|||||||
FAQ
|
|
||||||
===
|
|
||||||
|
|
||||||
## What happens when duplicates are detected across different backends?
|
|
||||||
|
|
||||||
There is a default precedence defined for each backend. This should be defined
|
|
||||||
so that the backend with highest precedence is the backend that should contain
|
|
||||||
the most precise information usually.
|
|
||||||
|
|
||||||
When deduplicating, the post from the backend with the highest precedence is
|
|
||||||
kept and missing info is taken from the duplicate posts (precedence is used so
|
|
||||||
that in case of conflicts in a field, the data from the backend with highest
|
|
||||||
precedence is used). This post contains as much data as possible, and includes
|
|
||||||
references to all the other "duplicate" posts. These latter duplicate posts
|
|
||||||
are then simply marked as such and never shown anymore.
|
|
||||||
|
|
||||||
All origins are kept in a `urls` field in the remaining post.
|
|
||||||
|
|
||||||
|
|
||||||
## Flatisfy seems to be stuck fetching posts
|
|
||||||
|
|
||||||
Fetching posts can be a long process, depending on your criterias. Run the
|
|
||||||
import command with `-v` argument to get a more verbose output and check
|
|
||||||
things are indeed happening. If fetching the flats is still too long, try to
|
|
||||||
set `max_entries` in your config to limit the number of posts fetched.
|
|
||||||
|
|
||||||
|
|
||||||
## Docker image does not start the webserver at first start?
|
|
||||||
|
|
||||||
When you launch the Docker image, it first updates Woob and fetches the
|
|
||||||
housing posts matching your criterias. The webserver is only started once this
|
|
||||||
is done. As fetching housing posts can take a bit of time (up to 10 minutes),
|
|
||||||
the webserver will not be available right away.
|
|
||||||
|
|
||||||
Once everything is ready, you should see a log message in the console running
|
|
||||||
the Docker image, confirming you that webserver is up and running.
|
|
225
doc/Makefile
@ -1,225 +0,0 @@
|
|||||||
# Makefile for Sphinx documentation
|
|
||||||
#
|
|
||||||
|
|
||||||
# You can set these variables from the command line.
|
|
||||||
SPHINXOPTS =
|
|
||||||
SPHINXBUILD = sphinx-build
|
|
||||||
PAPER =
|
|
||||||
BUILDDIR = _build
|
|
||||||
|
|
||||||
# Internal variables.
|
|
||||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
|
||||||
PAPEROPT_letter = -D latex_paper_size=letter
|
|
||||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
|
||||||
# the i18n builder cannot share the environment and doctrees with the others
|
|
||||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
|
||||||
|
|
||||||
.PHONY: help
|
|
||||||
help:
|
|
||||||
@echo "Please use \`make <target>' where <target> is one of"
|
|
||||||
@echo " html to make standalone HTML files"
|
|
||||||
@echo " dirhtml to make HTML files named index.html in directories"
|
|
||||||
@echo " singlehtml to make a single large HTML file"
|
|
||||||
@echo " pickle to make pickle files"
|
|
||||||
@echo " json to make JSON files"
|
|
||||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
|
||||||
@echo " qthelp to make HTML files and a qthelp project"
|
|
||||||
@echo " applehelp to make an Apple Help Book"
|
|
||||||
@echo " devhelp to make HTML files and a Devhelp project"
|
|
||||||
@echo " epub to make an epub"
|
|
||||||
@echo " epub3 to make an epub3"
|
|
||||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
|
||||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
|
||||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
|
||||||
@echo " text to make text files"
|
|
||||||
@echo " man to make manual pages"
|
|
||||||
@echo " texinfo to make Texinfo files"
|
|
||||||
@echo " info to make Texinfo files and run them through makeinfo"
|
|
||||||
@echo " gettext to make PO message catalogs"
|
|
||||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
|
||||||
@echo " xml to make Docutils-native XML files"
|
|
||||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
|
||||||
@echo " linkcheck to check all external links for integrity"
|
|
||||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
|
||||||
@echo " coverage to run coverage check of the documentation (if enabled)"
|
|
||||||
@echo " dummy to check syntax errors of document sources"
|
|
||||||
|
|
||||||
.PHONY: clean
|
|
||||||
clean:
|
|
||||||
rm -rf $(BUILDDIR)/*
|
|
||||||
|
|
||||||
.PHONY: html
|
|
||||||
html:
|
|
||||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
|
||||||
|
|
||||||
.PHONY: dirhtml
|
|
||||||
dirhtml:
|
|
||||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
|
||||||
|
|
||||||
.PHONY: singlehtml
|
|
||||||
singlehtml:
|
|
||||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
|
||||||
|
|
||||||
.PHONY: pickle
|
|
||||||
pickle:
|
|
||||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
|
||||||
@echo
|
|
||||||
@echo "Build finished; now you can process the pickle files."
|
|
||||||
|
|
||||||
.PHONY: json
|
|
||||||
json:
|
|
||||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
|
||||||
@echo
|
|
||||||
@echo "Build finished; now you can process the JSON files."
|
|
||||||
|
|
||||||
.PHONY: htmlhelp
|
|
||||||
htmlhelp:
|
|
||||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
|
||||||
@echo
|
|
||||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
|
||||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
|
||||||
|
|
||||||
.PHONY: qthelp
|
|
||||||
qthelp:
|
|
||||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
|
||||||
@echo
|
|
||||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
|
||||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
|
||||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Flatisfy.qhcp"
|
|
||||||
@echo "To view the help file:"
|
|
||||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Flatisfy.qhc"
|
|
||||||
|
|
||||||
.PHONY: applehelp
|
|
||||||
applehelp:
|
|
||||||
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
|
|
||||||
@echo "N.B. You won't be able to view it unless you put it in" \
|
|
||||||
"~/Library/Documentation/Help or install it in your application" \
|
|
||||||
"bundle."
|
|
||||||
|
|
||||||
.PHONY: devhelp
|
|
||||||
devhelp:
|
|
||||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
|
||||||
@echo
|
|
||||||
@echo "Build finished."
|
|
||||||
@echo "To view the help file:"
|
|
||||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/Flatisfy"
|
|
||||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Flatisfy"
|
|
||||||
@echo "# devhelp"
|
|
||||||
|
|
||||||
.PHONY: epub
|
|
||||||
epub:
|
|
||||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
|
||||||
|
|
||||||
.PHONY: epub3
|
|
||||||
epub3:
|
|
||||||
$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
|
|
||||||
|
|
||||||
.PHONY: latex
|
|
||||||
latex:
|
|
||||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
|
||||||
@echo
|
|
||||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
|
||||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
|
||||||
"(use \`make latexpdf' here to do that automatically)."
|
|
||||||
|
|
||||||
.PHONY: latexpdf
|
|
||||||
latexpdf:
|
|
||||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
|
||||||
@echo "Running LaTeX files through pdflatex..."
|
|
||||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
|
||||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
|
||||||
|
|
||||||
.PHONY: latexpdfja
|
|
||||||
latexpdfja:
|
|
||||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
|
||||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
|
||||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
|
||||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
|
||||||
|
|
||||||
.PHONY: text
|
|
||||||
text:
|
|
||||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
|
||||||
|
|
||||||
.PHONY: man
|
|
||||||
man:
|
|
||||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
|
||||||
|
|
||||||
.PHONY: texinfo
|
|
||||||
texinfo:
|
|
||||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
|
||||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
|
||||||
"(use \`make info' here to do that automatically)."
|
|
||||||
|
|
||||||
.PHONY: info
|
|
||||||
info:
|
|
||||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
|
||||||
@echo "Running Texinfo files through makeinfo..."
|
|
||||||
make -C $(BUILDDIR)/texinfo info
|
|
||||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
|
||||||
|
|
||||||
.PHONY: gettext
|
|
||||||
gettext:
|
|
||||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
|
||||||
|
|
||||||
.PHONY: changes
|
|
||||||
changes:
|
|
||||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
|
||||||
@echo
|
|
||||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
|
||||||
|
|
||||||
.PHONY: linkcheck
|
|
||||||
linkcheck:
|
|
||||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
|
||||||
@echo
|
|
||||||
@echo "Link check complete; look for any errors in the above output " \
|
|
||||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
|
||||||
|
|
||||||
.PHONY: doctest
|
|
||||||
doctest:
|
|
||||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
|
||||||
@echo "Testing of doctests in the sources finished, look at the " \
|
|
||||||
"results in $(BUILDDIR)/doctest/output.txt."
|
|
||||||
|
|
||||||
.PHONY: coverage
|
|
||||||
coverage:
|
|
||||||
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
|
|
||||||
@echo "Testing of coverage in the sources finished, look at the " \
|
|
||||||
"results in $(BUILDDIR)/coverage/python.txt."
|
|
||||||
|
|
||||||
.PHONY: xml
|
|
||||||
xml:
|
|
||||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
|
||||||
|
|
||||||
.PHONY: pseudoxml
|
|
||||||
pseudoxml:
|
|
||||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
|
||||||
|
|
||||||
.PHONY: dummy
|
|
||||||
dummy:
|
|
||||||
$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
|
|
||||||
@echo
|
|
||||||
@echo "Build finished. Dummy builder generates no files."
|
|
0
doc/_static/.gitkeep
vendored
349
doc/conf.py
@ -1,349 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
|
||||||
# Flatisfy documentation build configuration file, created by
|
|
||||||
# sphinx-quickstart on Tue Dec 5 14:21:46 2017.
|
|
||||||
#
|
|
||||||
# This file is execfile()d with the current directory set to its
|
|
||||||
# containing dir.
|
|
||||||
#
|
|
||||||
# Note that not all possible configuration values are present in this
|
|
||||||
# autogenerated file.
|
|
||||||
#
|
|
||||||
# All configuration values have a default; values that are commented out
|
|
||||||
# serve to show the default.
|
|
||||||
|
|
||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.abspath(".."))
|
|
||||||
|
|
||||||
# -- General configuration ------------------------------------------------
|
|
||||||
|
|
||||||
# If your documentation needs a minimal Sphinx version, state it here.
|
|
||||||
#
|
|
||||||
# needs_sphinx = '1.0'
|
|
||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be
|
|
||||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
|
||||||
# ones.
|
|
||||||
extensions = [
|
|
||||||
"sphinx.ext.autodoc",
|
|
||||||
"sphinx.ext.viewcode",
|
|
||||||
]
|
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
|
||||||
templates_path = ["_templates"]
|
|
||||||
|
|
||||||
# The suffix(es) of source filenames.
|
|
||||||
# You can specify multiple suffix as a list of string:
|
|
||||||
#
|
|
||||||
source_suffix = [".rst", ".md"]
|
|
||||||
source_parsers = {
|
|
||||||
".md": "recommonmark.parser.CommonMarkParser",
|
|
||||||
}
|
|
||||||
|
|
||||||
# The encoding of source files.
|
|
||||||
#
|
|
||||||
# source_encoding = 'utf-8-sig'
|
|
||||||
|
|
||||||
# The master toctree document.
|
|
||||||
master_doc = "index"
|
|
||||||
|
|
||||||
# General information about the project.
|
|
||||||
project = u"Flatisfy"
|
|
||||||
copyright = u"2017, Phyks (Lucas Verney)"
|
|
||||||
author = u"Phyks (Lucas Verney)"
|
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
|
||||||
# |version| and |release|, also used in various other places throughout the
|
|
||||||
# built documents.
|
|
||||||
#
|
|
||||||
# The short X.Y version.
|
|
||||||
version = u"0.1"
|
|
||||||
# The full version, including alpha/beta/rc tags.
|
|
||||||
release = u"0.1"
|
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
|
||||||
# for a list of supported languages.
|
|
||||||
#
|
|
||||||
# This is also used if you do content translation via gettext catalogs.
|
|
||||||
# Usually you set "language" from the command line for these cases.
|
|
||||||
language = None
|
|
||||||
|
|
||||||
# There are two options for replacing |today|: either, you set today to some
|
|
||||||
# non-false value, then it is used:
|
|
||||||
#
|
|
||||||
# today = ''
|
|
||||||
#
|
|
||||||
# Else, today_fmt is used as the format for a strftime call.
|
|
||||||
#
|
|
||||||
# today_fmt = '%B %d, %Y'
|
|
||||||
|
|
||||||
# List of patterns, relative to source directory, that match files and
|
|
||||||
# directories to ignore when looking for source files.
|
|
||||||
# This patterns also effect to html_static_path and html_extra_path
|
|
||||||
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all
|
|
||||||
# documents.
|
|
||||||
#
|
|
||||||
# default_role = None
|
|
||||||
|
|
||||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
|
||||||
#
|
|
||||||
# add_function_parentheses = True
|
|
||||||
|
|
||||||
# If true, the current module name will be prepended to all description
|
|
||||||
# unit titles (such as .. function::).
|
|
||||||
#
|
|
||||||
# add_module_names = True
|
|
||||||
|
|
||||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
|
||||||
# output. They are ignored by default.
|
|
||||||
#
|
|
||||||
# show_authors = False
|
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
|
||||||
pygments_style = "sphinx"
|
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
|
||||||
# modindex_common_prefix = []
|
|
||||||
|
|
||||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
|
||||||
# keep_warnings = False
|
|
||||||
|
|
||||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
|
||||||
todo_include_todos = False
|
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTML output ----------------------------------------------
|
|
||||||
|
|
||||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
|
||||||
# a list of builtin themes.
|
|
||||||
#
|
|
||||||
html_theme = "classic"
|
|
||||||
|
|
||||||
# Theme options are theme-specific and customize the look and feel of a theme
|
|
||||||
# further. For a list of options available for each theme, see the
|
|
||||||
# documentation.
|
|
||||||
#
|
|
||||||
# html_theme_options = {}
|
|
||||||
|
|
||||||
# Add any paths that contain custom themes here, relative to this directory.
|
|
||||||
# html_theme_path = []
|
|
||||||
|
|
||||||
# The name for this set of Sphinx documents.
|
|
||||||
# "<project> v<release> documentation" by default.
|
|
||||||
#
|
|
||||||
# html_title = u'Flatisfy v0.1'
|
|
||||||
|
|
||||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
|
||||||
#
|
|
||||||
# html_short_title = None
|
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top
|
|
||||||
# of the sidebar.
|
|
||||||
#
|
|
||||||
# html_logo = None
|
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to use as a favicon of
|
|
||||||
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
|
||||||
# pixels large.
|
|
||||||
#
|
|
||||||
# html_favicon = None
|
|
||||||
|
|
||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
|
||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
|
||||||
html_static_path = ["_static"]
|
|
||||||
|
|
||||||
# Add any extra paths that contain custom files (such as robots.txt or
|
|
||||||
# .htaccess) here, relative to this directory. These files are copied
|
|
||||||
# directly to the root of the documentation.
|
|
||||||
#
|
|
||||||
# html_extra_path = []
|
|
||||||
|
|
||||||
# If not None, a 'Last updated on:' timestamp is inserted at every page
|
|
||||||
# bottom, using the given strftime format.
|
|
||||||
# The empty string is equivalent to '%b %d, %Y'.
|
|
||||||
#
|
|
||||||
# html_last_updated_fmt = None
|
|
||||||
|
|
||||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
|
||||||
# typographically correct entities.
|
|
||||||
#
|
|
||||||
# html_use_smartypants = True
|
|
||||||
|
|
||||||
# Custom sidebar templates, maps document names to template names.
|
|
||||||
#
|
|
||||||
# html_sidebars = {}
|
|
||||||
|
|
||||||
# Additional templates that should be rendered to pages, maps page names to
|
|
||||||
# template names.
|
|
||||||
#
|
|
||||||
# html_additional_pages = {}
|
|
||||||
|
|
||||||
# If false, no module index is generated.
|
|
||||||
#
|
|
||||||
# html_domain_indices = True
|
|
||||||
|
|
||||||
# If false, no index is generated.
|
|
||||||
#
|
|
||||||
# html_use_index = True
|
|
||||||
|
|
||||||
# If true, the index is split into individual pages for each letter.
|
|
||||||
#
|
|
||||||
# html_split_index = False
|
|
||||||
|
|
||||||
# If true, links to the reST sources are added to the pages.
|
|
||||||
#
|
|
||||||
# html_show_sourcelink = True
|
|
||||||
|
|
||||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
|
||||||
#
|
|
||||||
# html_show_sphinx = True
|
|
||||||
|
|
||||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
|
||||||
#
|
|
||||||
# html_show_copyright = True
|
|
||||||
|
|
||||||
# If true, an OpenSearch description file will be output, and all pages will
|
|
||||||
# contain a <link> tag referring to it. The value of this option must be the
|
|
||||||
# base URL from which the finished HTML is served.
|
|
||||||
#
|
|
||||||
# html_use_opensearch = ''
|
|
||||||
|
|
||||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
|
||||||
# html_file_suffix = None
|
|
||||||
|
|
||||||
# Language to be used for generating the HTML full-text search index.
|
|
||||||
# Sphinx supports the following languages:
|
|
||||||
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
|
|
||||||
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
|
|
||||||
#
|
|
||||||
# html_search_language = 'en'
|
|
||||||
|
|
||||||
# A dictionary with options for the search language support, empty by default.
|
|
||||||
# 'ja' uses this config value.
|
|
||||||
# 'zh' user can custom change `jieba` dictionary path.
|
|
||||||
#
|
|
||||||
# html_search_options = {'type': 'default'}
|
|
||||||
|
|
||||||
# The name of a javascript file (relative to the configuration directory) that
|
|
||||||
# implements a search results scorer. If empty, the default will be used.
|
|
||||||
#
|
|
||||||
# html_search_scorer = 'scorer.js'
|
|
||||||
|
|
||||||
# Output file base name for HTML help builder.
|
|
||||||
htmlhelp_basename = "Flatisfydoc"
|
|
||||||
|
|
||||||
# -- Options for LaTeX output ---------------------------------------------
|
|
||||||
|
|
||||||
latex_elements = {
|
|
||||||
# The paper size ('letterpaper' or 'a4paper').
|
|
||||||
#
|
|
||||||
# 'papersize': 'letterpaper',
|
|
||||||
# The font size ('10pt', '11pt' or '12pt').
|
|
||||||
#
|
|
||||||
# 'pointsize': '10pt',
|
|
||||||
# Additional stuff for the LaTeX preamble.
|
|
||||||
#
|
|
||||||
# 'preamble': '',
|
|
||||||
# Latex figure (float) alignment
|
|
||||||
#
|
|
||||||
# 'figure_align': 'htbp',
|
|
||||||
}
|
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
|
||||||
# (source start file, target name, title,
|
|
||||||
# author, documentclass [howto, manual, or own class]).
|
|
||||||
latex_documents = [
|
|
||||||
(
|
|
||||||
master_doc,
|
|
||||||
"Flatisfy.tex",
|
|
||||||
u"Flatisfy Documentation",
|
|
||||||
u"Phyks (Lucas Verney)",
|
|
||||||
"manual",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
|
||||||
# the title page.
|
|
||||||
#
|
|
||||||
# latex_logo = None
|
|
||||||
|
|
||||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
|
||||||
# not chapters.
|
|
||||||
#
|
|
||||||
# latex_use_parts = False
|
|
||||||
|
|
||||||
# If true, show page references after internal links.
|
|
||||||
#
|
|
||||||
# latex_show_pagerefs = False
|
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
|
||||||
#
|
|
||||||
# latex_show_urls = False
|
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
|
||||||
#
|
|
||||||
# latex_appendices = []
|
|
||||||
|
|
||||||
# It false, will not define \strong, \code, itleref, \crossref ... but only
|
|
||||||
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
|
|
||||||
# packages.
|
|
||||||
#
|
|
||||||
# latex_keep_old_macro_names = True
|
|
||||||
|
|
||||||
# If false, no module index is generated.
|
|
||||||
#
|
|
||||||
# latex_domain_indices = True
|
|
||||||
|
|
||||||
|
|
||||||
# -- Options for manual page output ---------------------------------------
|
|
||||||
|
|
||||||
# One entry per manual page. List of tuples
|
|
||||||
# (source start file, name, description, authors, manual section).
|
|
||||||
man_pages = [(master_doc, "flatisfy", u"Flatisfy Documentation", [author], 1)]
|
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
|
||||||
#
|
|
||||||
# man_show_urls = False
|
|
||||||
|
|
||||||
|
|
||||||
# -- Options for Texinfo output -------------------------------------------
|
|
||||||
|
|
||||||
# Grouping the document tree into Texinfo files. List of tuples
|
|
||||||
# (source start file, target name, title, author,
|
|
||||||
# dir menu entry, description, category)
|
|
||||||
texinfo_documents = [
|
|
||||||
(
|
|
||||||
master_doc,
|
|
||||||
"Flatisfy",
|
|
||||||
u"Flatisfy Documentation",
|
|
||||||
author,
|
|
||||||
"Flatisfy",
|
|
||||||
"One line description of project.",
|
|
||||||
"Miscellaneous",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
|
||||||
#
|
|
||||||
# texinfo_appendices = []
|
|
||||||
|
|
||||||
# If false, no module index is generated.
|
|
||||||
#
|
|
||||||
# texinfo_domain_indices = True
|
|
||||||
|
|
||||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
|
||||||
#
|
|
||||||
# texinfo_show_urls = 'footnote'
|
|
||||||
|
|
||||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
|
||||||
#
|
|
||||||
# texinfo_no_detailmenu = False
|
|
@ -1,10 +0,0 @@
|
|||||||
flatisfy.data_files package
|
|
||||||
===========================
|
|
||||||
|
|
||||||
Module contents
|
|
||||||
---------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.data_files
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
@ -1,38 +0,0 @@
|
|||||||
flatisfy.database package
|
|
||||||
=========================
|
|
||||||
|
|
||||||
Submodules
|
|
||||||
----------
|
|
||||||
|
|
||||||
flatisfy.database.base module
|
|
||||||
-----------------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.database.base
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
flatisfy.database.types module
|
|
||||||
------------------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.database.types
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
flatisfy.database.whooshalchemy module
|
|
||||||
--------------------------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.database.whooshalchemy
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
|
|
||||||
Module contents
|
|
||||||
---------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.database
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
@ -1,38 +0,0 @@
|
|||||||
flatisfy.filters package
|
|
||||||
========================
|
|
||||||
|
|
||||||
Submodules
|
|
||||||
----------
|
|
||||||
|
|
||||||
flatisfy.filters.cache module
|
|
||||||
-----------------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.filters.cache
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
flatisfy.filters.duplicates module
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.filters.duplicates
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
flatisfy.filters.metadata module
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.filters.metadata
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
|
|
||||||
Module contents
|
|
||||||
---------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.filters
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
@ -1,38 +0,0 @@
|
|||||||
flatisfy.models package
|
|
||||||
=======================
|
|
||||||
|
|
||||||
Submodules
|
|
||||||
----------
|
|
||||||
|
|
||||||
flatisfy.models.flat module
|
|
||||||
---------------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.models.flat
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
flatisfy.models.postal_code module
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.models.postal_code
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
flatisfy.models.public_transport module
|
|
||||||
---------------------------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.models.public_transport
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
|
|
||||||
Module contents
|
|
||||||
---------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.models
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
@ -1,97 +0,0 @@
|
|||||||
flatisfy package
|
|
||||||
================
|
|
||||||
|
|
||||||
Subpackages
|
|
||||||
-----------
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
|
|
||||||
flatisfy.data_files
|
|
||||||
flatisfy.database
|
|
||||||
flatisfy.filters
|
|
||||||
flatisfy.models
|
|
||||||
flatisfy.web
|
|
||||||
|
|
||||||
Submodules
|
|
||||||
----------
|
|
||||||
|
|
||||||
flatisfy.cmds module
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.cmds
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
flatisfy.config module
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.config
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
flatisfy.constants module
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.constants
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
flatisfy.data module
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.data
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
flatisfy.email module
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.email
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
flatisfy.exceptions module
|
|
||||||
--------------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.exceptions
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
flatisfy.fetch module
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.fetch
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
flatisfy.tests module
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.tests
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
flatisfy.tools module
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.tools
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
|
|
||||||
Module contents
|
|
||||||
---------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
@ -1,22 +0,0 @@
|
|||||||
flatisfy.web.routes package
|
|
||||||
===========================
|
|
||||||
|
|
||||||
Submodules
|
|
||||||
----------
|
|
||||||
|
|
||||||
flatisfy.web.routes.api module
|
|
||||||
------------------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.web.routes.api
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
|
|
||||||
Module contents
|
|
||||||
---------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.web.routes
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
@ -1,45 +0,0 @@
|
|||||||
flatisfy.web package
|
|
||||||
====================
|
|
||||||
|
|
||||||
Subpackages
|
|
||||||
-----------
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
|
|
||||||
flatisfy.web.routes
|
|
||||||
|
|
||||||
Submodules
|
|
||||||
----------
|
|
||||||
|
|
||||||
flatisfy.web.app module
|
|
||||||
-----------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.web.app
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
flatisfy.web.configplugin module
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.web.configplugin
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
flatisfy.web.dbplugin module
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.web.dbplugin
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
|
|
||||||
Module contents
|
|
||||||
---------------
|
|
||||||
|
|
||||||
.. automodule:: flatisfy.web
|
|
||||||
:members:
|
|
||||||
:undoc-members:
|
|
||||||
:show-inheritance:
|
|
@ -1,26 +0,0 @@
|
|||||||
.. Flatisfy documentation master file, created by
|
|
||||||
sphinx-quickstart on Tue Dec 5 14:21:46 2017.
|
|
||||||
You can adapt this file completely to your liking, but it should at least
|
|
||||||
contain the root `toctree` directive.
|
|
||||||
|
|
||||||
Welcome to Flatisfy's documentation!
|
|
||||||
====================================
|
|
||||||
|
|
||||||
.. automodule:: flatisfy
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
|
|
||||||
0.getting_started.md
|
|
||||||
1.production.md
|
|
||||||
2.docker.md
|
|
||||||
3.faq.md
|
|
||||||
modules.rst
|
|
||||||
|
|
||||||
|
|
||||||
Indices and tables
|
|
||||||
==================
|
|
||||||
|
|
||||||
* :ref:`genindex`
|
|
||||||
* :ref:`modindex`
|
|
||||||
* :ref:`search`
|
|
||||||
|
|
@ -1,7 +0,0 @@
|
|||||||
Flatisfy
|
|
||||||
========
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 4
|
|
||||||
|
|
||||||
flatisfy
|
|
@ -1,40 +0,0 @@
|
|||||||
FROM python:3
|
|
||||||
MAINTAINER Phyks <phyks@phyks.me>
|
|
||||||
|
|
||||||
# Setup layout.
|
|
||||||
RUN useradd -d /home/user -m -s /bin/bash -U user
|
|
||||||
|
|
||||||
# Install OS dependencies.
|
|
||||||
RUN apt-get update && \
|
|
||||||
apt-get install -y git libffi-dev \
|
|
||||||
libxml2-dev libxslt-dev libyaml-dev libtiff-dev libjpeg-dev zlib1g-dev \
|
|
||||||
libfreetype6-dev libwebp-dev build-essential gcc g++ wget;
|
|
||||||
|
|
||||||
# Install latest pip and python dependencies.
|
|
||||||
RUN pip install -U setuptools && \
|
|
||||||
pip install html2text simplejson beautifulsoup4
|
|
||||||
|
|
||||||
# Install node.js.
|
|
||||||
RUN curl -sL https://deb.nodesource.com/setup_10.x | bash - \
|
|
||||||
&& apt-get install -y nodejs
|
|
||||||
|
|
||||||
RUN mkdir -p /flatisfy/data
|
|
||||||
VOLUME /flatisfy
|
|
||||||
|
|
||||||
COPY ./*.sh /home/user/
|
|
||||||
|
|
||||||
# Install Flatisfy, set up directories and permissions.
|
|
||||||
RUN cd /home/user \
|
|
||||||
&& git clone https://framagit.org/phyks/Flatisfy.git/ ./app \
|
|
||||||
&& cd ./app \
|
|
||||||
&& pip install -r requirements.txt \
|
|
||||||
&& npm install \
|
|
||||||
&& npm run build:dev \
|
|
||||||
&& mkdir -p /home/user/.local/share/flatisfy \
|
|
||||||
&& chown user:user -R /home/user \
|
|
||||||
&& chmod +x /home/user/*.sh
|
|
||||||
|
|
||||||
# Run server.
|
|
||||||
EXPOSE 8080
|
|
||||||
ENTRYPOINT ["/home/user/entrypoint.sh"]
|
|
||||||
CMD ["/home/user/run.sh"]
|
|
@ -1,12 +0,0 @@
|
|||||||
version: "3"
|
|
||||||
services:
|
|
||||||
app:
|
|
||||||
build: .
|
|
||||||
# image: phyks/flatisfy
|
|
||||||
environment:
|
|
||||||
- LOCAL_USER_ID=1000
|
|
||||||
volumes:
|
|
||||||
- ./data:/flatisfy
|
|
||||||
ports:
|
|
||||||
- "8080:8080"
|
|
||||||
working_dir: /home/user/app
|
|
@ -1,17 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# Add local user
|
|
||||||
# Either use the LOCAL_USER_ID if passed in at runtime or
|
|
||||||
# fallback
|
|
||||||
USER_ID=${LOCAL_USER_ID:-9001}
|
|
||||||
|
|
||||||
echo "[ENTRYPOINT] Starting with UID : $USER_ID"
|
|
||||||
usermod -u $USER_ID -o user
|
|
||||||
export HOME=/home/user
|
|
||||||
|
|
||||||
echo "[ENTRYPOINT] Setting fake values for git config..."
|
|
||||||
git config --global user.email flatisfy@example.com
|
|
||||||
git config --global user.name "Flatisfy Root"
|
|
||||||
|
|
||||||
exec su user -c "$@"
|
|
@ -1,6 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
echo "Fetching new housing posts..."
|
|
||||||
cd /home/user/app
|
|
||||||
python -m flatisfy import -v --config /flatisfy/config.json
|
|
@ -1,13 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
echo "Building data for Flatisfy..."
|
|
||||||
cd /home/user/app
|
|
||||||
python -m flatisfy build-data -v --config /flatisfy/config.json
|
|
||||||
|
|
||||||
echo "Fetching new housing posts..."
|
|
||||||
cd /home/user/app
|
|
||||||
python -m flatisfy import -v --config /flatisfy/config.json
|
|
||||||
|
|
||||||
echo "Starting web UI..."
|
|
||||||
python -m flatisfy serve -v --config /flatisfy/config.json
|
|
@ -8,17 +8,11 @@ import argparse
|
|||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
logging.basicConfig()
|
|
||||||
|
|
||||||
# pylint: disable=locally-disabled,wrong-import-position
|
|
||||||
import flatisfy.config
|
import flatisfy.config
|
||||||
from flatisfy import cmds
|
from flatisfy import cmds
|
||||||
from flatisfy import data
|
from flatisfy import data
|
||||||
from flatisfy import fetch
|
from flatisfy import fetch
|
||||||
from flatisfy import tools
|
from flatisfy import tools
|
||||||
from flatisfy import tests
|
|
||||||
|
|
||||||
# pylint: enable=locally-disabled,wrong-import-position
|
|
||||||
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger("flatisfy")
|
LOGGER = logging.getLogger("flatisfy")
|
||||||
@ -28,47 +22,64 @@ def parse_args(argv=None):
|
|||||||
"""
|
"""
|
||||||
Create parser and parse arguments.
|
Create parser and parse arguments.
|
||||||
"""
|
"""
|
||||||
parser = argparse.ArgumentParser(prog="Flatisfy", description="Find the perfect flat.")
|
parser = argparse.ArgumentParser(prog="Flatisfy",
|
||||||
|
description="Find the perfect flat.")
|
||||||
|
|
||||||
# Parent parser containing arguments common to any subcommand
|
# Parent parser containing arguments common to any subcommand
|
||||||
parent_parser = argparse.ArgumentParser(add_help=False)
|
parent_parser = argparse.ArgumentParser(add_help=False)
|
||||||
parent_parser.add_argument("--data-dir", help="Location of Flatisfy data directory.")
|
|
||||||
parent_parser.add_argument("--config", help="Configuration file to use.")
|
|
||||||
parent_parser.add_argument(
|
parent_parser.add_argument(
|
||||||
"--passes",
|
"--data-dir",
|
||||||
choices=[0, 1, 2, 3],
|
help="Location of Flatisfy data directory."
|
||||||
type=int,
|
|
||||||
help="Number of passes to do on the filtered data.",
|
|
||||||
)
|
)
|
||||||
parent_parser.add_argument("--max-entries", type=int, help="Maximum number of entries to fetch.")
|
|
||||||
parent_parser.add_argument("-v", "--verbose", action="store_true", help="Verbose logging output.")
|
|
||||||
parent_parser.add_argument("-vv", action="store_true", help="Debug logging output.")
|
|
||||||
parent_parser.add_argument(
|
parent_parser.add_argument(
|
||||||
"--constraints",
|
"--config",
|
||||||
type=str,
|
help="Configuration file to use."
|
||||||
help="Comma-separated list of constraints to consider.",
|
)
|
||||||
|
parent_parser.add_argument(
|
||||||
|
"--passes", choices=[0, 1, 2], type=int,
|
||||||
|
help="Number of passes to do on the filtered data."
|
||||||
|
)
|
||||||
|
parent_parser.add_argument(
|
||||||
|
"--max-entries", type=int,
|
||||||
|
help="Maximum number of entries to fetch."
|
||||||
|
)
|
||||||
|
parent_parser.add_argument(
|
||||||
|
"-v", "--verbose", action="store_true",
|
||||||
|
help="Verbose logging output."
|
||||||
|
)
|
||||||
|
parent_parser.add_argument(
|
||||||
|
"-vv", action="store_true",
|
||||||
|
help="Debug logging output."
|
||||||
)
|
)
|
||||||
|
|
||||||
# Subcommands
|
# Subcommands
|
||||||
subparsers = parser.add_subparsers(dest="cmd", help="Available subcommands")
|
subparsers = parser.add_subparsers(
|
||||||
|
dest="cmd", help="Available subcommands"
|
||||||
|
)
|
||||||
|
|
||||||
# Build data subcommand
|
# Build data subcommand
|
||||||
subparsers.add_parser("build-data", parents=[parent_parser], help="Build necessary data")
|
subparsers.add_parser(
|
||||||
|
"build-data", parents=[parent_parser],
|
||||||
|
help="Build necessary data"
|
||||||
|
)
|
||||||
|
|
||||||
# Init config subcommand
|
# Init config subcommand
|
||||||
parser_init_config = subparsers.add_parser(
|
parser_init_config = subparsers.add_parser(
|
||||||
"init-config", parents=[parent_parser], help="Initialize empty configuration."
|
"init-config", parents=[parent_parser],
|
||||||
|
help="Initialize empty configuration."
|
||||||
|
)
|
||||||
|
parser_init_config.add_argument(
|
||||||
|
"output", nargs="?", help="Output config file. Use '-' for stdout."
|
||||||
)
|
)
|
||||||
parser_init_config.add_argument("output", nargs="?", help="Output config file. Use '-' for stdout.")
|
|
||||||
|
|
||||||
# Fetch subcommand parser
|
# Fetch subcommand parser
|
||||||
subparsers.add_parser("fetch", parents=[parent_parser], help="Fetch housings posts")
|
subparsers.add_parser("fetch", parents=[parent_parser],
|
||||||
|
help="Fetch housings posts")
|
||||||
|
|
||||||
# Filter subcommand parser
|
# Filter subcommand parser
|
||||||
parser_filter = subparsers.add_parser(
|
parser_filter = subparsers.add_parser(
|
||||||
"filter",
|
"filter", parents=[parent_parser],
|
||||||
parents=[parent_parser],
|
help="Filter housings posts according to constraints in config."
|
||||||
help="Filter housings posts according to constraints in config.",
|
|
||||||
)
|
)
|
||||||
parser_filter.add_argument(
|
parser_filter.add_argument(
|
||||||
"--input",
|
"--input",
|
||||||
@ -77,29 +88,24 @@ def parse_args(argv=None):
|
|||||||
"no additional fetching of infos is done, and the script outputs "
|
"no additional fetching of infos is done, and the script outputs "
|
||||||
"a filtered JSON dump on stdout. If not provided, update status "
|
"a filtered JSON dump on stdout. If not provided, update status "
|
||||||
"of the flats in the database."
|
"of the flats in the database."
|
||||||
),
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Import subcommand parser
|
# Import subcommand parser
|
||||||
import_filter = subparsers.add_parser("import", parents=[parent_parser], help="Import housing posts in database.")
|
subparsers.add_parser("import", parents=[parent_parser],
|
||||||
import_filter.add_argument(
|
help="Import housing posts in database.")
|
||||||
"--new-only",
|
|
||||||
action="store_true",
|
|
||||||
help=("Download new housing posts only but do not refresh existing ones"),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Purge subcommand parser
|
# Purge subcommand parser
|
||||||
subparsers.add_parser("purge", parents=[parent_parser], help="Purge database.")
|
subparsers.add_parser("purge", parents=[parent_parser],
|
||||||
|
help="Purge database.")
|
||||||
|
|
||||||
# Serve subcommand parser
|
# Serve subcommand parser
|
||||||
parser_serve = subparsers.add_parser("serve", parents=[parent_parser], help="Serve the web app.")
|
parser_serve = subparsers.add_parser("serve", parents=[parent_parser],
|
||||||
|
help="Serve the web app.")
|
||||||
parser_serve.add_argument("--port", type=int, help="Port to bind to.")
|
parser_serve.add_argument("--port", type=int, help="Port to bind to.")
|
||||||
parser_serve.add_argument("--host", help="Host to listen on.")
|
parser_serve.add_argument("--host", help="Host to listen on.")
|
||||||
|
|
||||||
# Test subcommand parser
|
return parser.parse_args(argv)
|
||||||
subparsers.add_parser("test", parents=[parent_parser], help="Unit testing.")
|
|
||||||
|
|
||||||
return parser, parser.parse_args(argv)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@ -108,91 +114,82 @@ def main():
|
|||||||
"""
|
"""
|
||||||
# pylint: disable=locally-disabled,too-many-branches
|
# pylint: disable=locally-disabled,too-many-branches
|
||||||
# Parse arguments
|
# Parse arguments
|
||||||
parser, args = parse_args()
|
args = parse_args()
|
||||||
|
|
||||||
# Set logger
|
# Set logger
|
||||||
if getattr(args, 'vv', False):
|
if args.vv:
|
||||||
logging.getLogger("").setLevel(logging.DEBUG)
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
logging.getLogger("titlecase").setLevel(logging.INFO)
|
logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG)
|
||||||
logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
|
elif args.verbose:
|
||||||
elif getattr(args, 'verbose', False):
|
logging.basicConfig(level=logging.INFO)
|
||||||
logging.getLogger("").setLevel(logging.INFO)
|
|
||||||
# sqlalchemy INFO level is way too loud, just stick with WARNING
|
# sqlalchemy INFO level is way too loud, just stick with WARNING
|
||||||
logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
|
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING)
|
||||||
else:
|
else:
|
||||||
logging.getLogger("").setLevel(logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
|
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING)
|
||||||
|
|
||||||
# Init-config command
|
# Init-config command
|
||||||
if args.cmd == "init-config":
|
if args.cmd == "init-config":
|
||||||
flatisfy.config.init_config(args.output)
|
flatisfy.config.init_config(args.output)
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
else:
|
else:
|
||||||
if not args.cmd:
|
|
||||||
parser.print_help()
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
# Load config
|
# Load config
|
||||||
if args.cmd == "build-data":
|
config = flatisfy.config.load_config(args)
|
||||||
# Data not yet built, do not use it in config checks
|
|
||||||
config = flatisfy.config.load_config(args, check_with_data=False)
|
|
||||||
else:
|
|
||||||
config = flatisfy.config.load_config(args, check_with_data=True)
|
|
||||||
if config is None:
|
if config is None:
|
||||||
LOGGER.error(
|
LOGGER.error("Invalid configuration. Exiting. "
|
||||||
"Invalid configuration. Exiting. "
|
|
||||||
"Run init-config before if this is the first time "
|
"Run init-config before if this is the first time "
|
||||||
"you run Flatisfy."
|
"you run Flatisfy.")
|
||||||
)
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Purge command
|
# Build data files
|
||||||
if args.cmd == "purge":
|
try:
|
||||||
cmds.purge_db(config)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Build data files command
|
|
||||||
if args.cmd == "build-data":
|
if args.cmd == "build-data":
|
||||||
data.preprocess_data(config, force=True)
|
data.preprocess_data(config, force=True)
|
||||||
return
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
data.preprocess_data(config)
|
||||||
|
except flatisfy.exceptions.DataBuildError:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
# Fetch command
|
# Fetch command
|
||||||
if args.cmd == "fetch":
|
if args.cmd == "fetch":
|
||||||
# Fetch and filter flats list
|
# Fetch and filter flats list
|
||||||
fetched_flats = fetch.fetch_flats(config)
|
flats_list = fetch.fetch_flats_list(config)
|
||||||
fetched_flats = cmds.filter_fetched_flats(config, fetched_flats=fetched_flats, fetch_details=True)
|
flats_list = cmds.filter_flats(config, flats_list=flats_list,
|
||||||
|
fetch_details=True)["new"]
|
||||||
# Sort by cost
|
# Sort by cost
|
||||||
fetched_flats = {k: tools.sort_list_of_dicts_by(v["new"], "cost") for k, v in fetched_flats.items()}
|
flats_list = tools.sort_list_of_dicts_by(flats_list, "cost")
|
||||||
|
|
||||||
print(tools.pretty_json(fetched_flats))
|
print(
|
||||||
return
|
tools.pretty_json(flats_list)
|
||||||
|
)
|
||||||
# Filter command
|
# Filter command
|
||||||
elif args.cmd == "filter":
|
elif args.cmd == "filter":
|
||||||
# Load and filter flats list
|
# Load and filter flats list
|
||||||
if args.input:
|
if args.input:
|
||||||
fetched_flats = fetch.load_flats_from_file(args.input, config)
|
flats_list = fetch.load_flats_list_from_file(args.input)
|
||||||
|
|
||||||
fetched_flats = cmds.filter_fetched_flats(config, fetched_flats=fetched_flats, fetch_details=False)
|
flats_list = cmds.filter_flats(config, flats_list=flats_list,
|
||||||
|
fetch_details=False)["new"]
|
||||||
|
|
||||||
# Sort by cost
|
# Sort by cost
|
||||||
fetched_flats = {k: tools.sort_list_of_dicts_by(v["new"], "cost") for k, v in fetched_flats.items()}
|
flats_list = tools.sort_list_of_dicts_by(flats_list, "cost")
|
||||||
|
|
||||||
# Output to stdout
|
# Output to stdout
|
||||||
print(tools.pretty_json(fetched_flats))
|
print(
|
||||||
|
tools.pretty_json(flats_list)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
cmds.import_and_filter(config, load_from_db=True)
|
cmds.import_and_filter(config, load_from_db=True)
|
||||||
return
|
|
||||||
# Import command
|
# Import command
|
||||||
elif args.cmd == "import":
|
elif args.cmd == "import":
|
||||||
cmds.import_and_filter(config, load_from_db=False, new_only=args.new_only)
|
cmds.import_and_filter(config, load_from_db=False)
|
||||||
return
|
# Purge command
|
||||||
|
elif args.cmd == "purge":
|
||||||
|
cmds.purge_db(config)
|
||||||
# Serve command
|
# Serve command
|
||||||
elif args.cmd == "serve":
|
elif args.cmd == "serve":
|
||||||
cmds.serve(config)
|
cmds.serve(config)
|
||||||
return
|
|
||||||
# Tests command
|
|
||||||
elif args.cmd == "test":
|
|
||||||
tests.run()
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
153
flatisfy/cmds.py
@ -6,49 +6,32 @@ from __future__ import absolute_import, print_function, unicode_literals
|
|||||||
|
|
||||||
import collections
|
import collections
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
|
|
||||||
import flatisfy.filters
|
import flatisfy.filters
|
||||||
from flatisfy import database
|
from flatisfy import database
|
||||||
from flatisfy import email
|
|
||||||
from flatisfy.models import flat as flat_model
|
from flatisfy.models import flat as flat_model
|
||||||
from flatisfy.models import postal_code as postal_code_model
|
|
||||||
from flatisfy.models import public_transport as public_transport_model
|
|
||||||
from flatisfy import fetch
|
from flatisfy import fetch
|
||||||
from flatisfy import tools
|
from flatisfy import tools
|
||||||
from flatisfy.filters import metadata
|
from flatisfy.filters import metadata
|
||||||
from flatisfy.web import app as web_app
|
from flatisfy.web import app as web_app
|
||||||
import time
|
|
||||||
from ratelimit.exception import RateLimitException
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def filter_flats_list(config, constraint_name, flats_list, fetch_details=True, past_flats=None):
|
def filter_flats(config, flats_list, fetch_details=True):
|
||||||
"""
|
"""
|
||||||
Filter the available flats list. Then, filter it according to criteria.
|
Filter the available flats list. Then, filter it according to criteria.
|
||||||
|
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
:param constraint_name: The constraint name that the ``flats_list`` should
|
|
||||||
satisfy.
|
|
||||||
:param flats_list: The initial list of flat objects to filter.
|
|
||||||
:param fetch_details: Whether additional details should be fetched between
|
:param fetch_details: Whether additional details should be fetched between
|
||||||
the two passes.
|
the two passes.
|
||||||
:param past_flats: The list of already fetched flats
|
:param flats_list: The initial list of flat objects to filter.
|
||||||
:return: A dict mapping flat status and list of flat objects.
|
:return: A dict mapping flat status and list of flat objects.
|
||||||
"""
|
"""
|
||||||
|
# pylint: disable=locally-disabled,redefined-variable-type
|
||||||
# Add the flatisfy metadata entry and prepare the flat objects
|
# Add the flatisfy metadata entry and prepare the flat objects
|
||||||
flats_list = metadata.init(flats_list, constraint_name)
|
flats_list = metadata.init(flats_list)
|
||||||
|
|
||||||
# Get the associated constraint from config
|
|
||||||
try:
|
|
||||||
constraint = config["constraints"][constraint_name]
|
|
||||||
except KeyError:
|
|
||||||
LOGGER.error(
|
|
||||||
"Missing constraint %s. Skipping filtering for these posts.",
|
|
||||||
constraint_name,
|
|
||||||
)
|
|
||||||
return {"new": [], "duplicate": [], "ignored": []}
|
|
||||||
|
|
||||||
first_pass_result = collections.defaultdict(list)
|
first_pass_result = collections.defaultdict(list)
|
||||||
second_pass_result = collections.defaultdict(list)
|
second_pass_result = collections.defaultdict(list)
|
||||||
@ -56,122 +39,78 @@ def filter_flats_list(config, constraint_name, flats_list, fetch_details=True, p
|
|||||||
# Do a first pass with the available infos to try to remove as much
|
# Do a first pass with the available infos to try to remove as much
|
||||||
# unwanted postings as possible
|
# unwanted postings as possible
|
||||||
if config["passes"] > 0:
|
if config["passes"] > 0:
|
||||||
first_pass_result = flatisfy.filters.first_pass(flats_list, constraint, config)
|
first_pass_result = flatisfy.filters.first_pass(flats_list,
|
||||||
|
config)
|
||||||
else:
|
else:
|
||||||
first_pass_result["new"] = flats_list
|
first_pass_result["new"] = flats_list
|
||||||
|
|
||||||
# Load additional infos
|
# Load additional infos
|
||||||
if fetch_details:
|
if fetch_details:
|
||||||
past_ids = {x["id"]: x for x in past_flats} if past_flats else {}
|
|
||||||
for i, flat in enumerate(first_pass_result["new"]):
|
for i, flat in enumerate(first_pass_result["new"]):
|
||||||
details = None
|
|
||||||
|
|
||||||
use_cache = past_ids.get(flat["id"])
|
|
||||||
if use_cache:
|
|
||||||
LOGGER.debug("Skipping details download for %s.", flat["id"])
|
|
||||||
details = use_cache
|
|
||||||
else:
|
|
||||||
if flat["id"].split("@")[1] in ["seloger", "leboncoin"]:
|
|
||||||
try:
|
|
||||||
details = fetch.fetch_details_rate_limited(config, flat["id"])
|
|
||||||
except RateLimitException:
|
|
||||||
time.sleep(60)
|
|
||||||
details = fetch.fetch_details_rate_limited(config, flat["id"])
|
|
||||||
else:
|
|
||||||
details = fetch.fetch_details(config, flat["id"])
|
details = fetch.fetch_details(config, flat["id"])
|
||||||
|
|
||||||
first_pass_result["new"][i] = tools.merge_dicts(flat, details)
|
first_pass_result["new"][i] = tools.merge_dicts(flat, details)
|
||||||
|
|
||||||
# Do a second pass to consolidate all the infos we found and make use of
|
# Do a second pass to consolidate all the infos we found and make use of
|
||||||
# additional infos
|
# additional infos
|
||||||
if config["passes"] > 1:
|
if config["passes"] > 1:
|
||||||
second_pass_result = flatisfy.filters.second_pass(first_pass_result["new"], constraint, config)
|
second_pass_result = flatisfy.filters.second_pass(
|
||||||
|
first_pass_result["new"], config
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
second_pass_result["new"] = first_pass_result["new"]
|
second_pass_result["new"] = first_pass_result["new"]
|
||||||
|
|
||||||
# Do a third pass to deduplicate better
|
# Do a third pass to deduplicate better
|
||||||
if config["passes"] > 2:
|
if config["passes"] > 2:
|
||||||
third_pass_result = flatisfy.filters.third_pass(second_pass_result["new"], config)
|
third_pass_result = flatisfy.filters.third_pass(
|
||||||
|
second_pass_result["new"], config
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
third_pass_result["new"] = second_pass_result["new"]
|
third_pass_result["new"] = second_pass_result["new"]
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"new": third_pass_result["new"],
|
"new": third_pass_result["new"],
|
||||||
"duplicate": (
|
"duplicate": (
|
||||||
first_pass_result["duplicate"] + second_pass_result["duplicate"] + third_pass_result["duplicate"]
|
first_pass_result["duplicate"] +
|
||||||
|
second_pass_result["duplicate"] +
|
||||||
|
third_pass_result["duplicate"]
|
||||||
),
|
),
|
||||||
"ignored": (first_pass_result["ignored"] + second_pass_result["ignored"] + third_pass_result["ignored"]),
|
"ignored": (
|
||||||
|
first_pass_result["ignored"] +
|
||||||
|
second_pass_result["ignored"] +
|
||||||
|
third_pass_result["ignored"]
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def filter_fetched_flats(config, fetched_flats, fetch_details=True, past_flats={}):
|
def import_and_filter(config, load_from_db=False):
|
||||||
"""
|
|
||||||
Filter the available flats list. Then, filter it according to criteria.
|
|
||||||
|
|
||||||
:param config: A config dict.
|
|
||||||
:param fetch_details: Whether additional details should be fetched between
|
|
||||||
the two passes.
|
|
||||||
:param fetched_flats: The initial dict mapping constraints to the list of
|
|
||||||
fetched flat objects to filter.
|
|
||||||
:return: A dict mapping constraints to a dict mapping flat status and list
|
|
||||||
of flat objects.
|
|
||||||
"""
|
|
||||||
for constraint_name, flats_list in fetched_flats.items():
|
|
||||||
fetched_flats[constraint_name] = filter_flats_list(
|
|
||||||
config,
|
|
||||||
constraint_name,
|
|
||||||
flats_list,
|
|
||||||
fetch_details,
|
|
||||||
past_flats.get(constraint_name, None),
|
|
||||||
)
|
|
||||||
return fetched_flats
|
|
||||||
|
|
||||||
|
|
||||||
def import_and_filter(config, load_from_db=False, new_only=False):
|
|
||||||
"""
|
"""
|
||||||
Fetch the available flats list. Then, filter it according to criteria.
|
Fetch the available flats list. Then, filter it according to criteria.
|
||||||
Finally, store it in the database.
|
Finally, store it in the database.
|
||||||
|
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
:param load_from_db: Whether to load flats from database or fetch them
|
:param load_from_db: Whether to load flats from database or fetch them
|
||||||
using Woob.
|
using Weboob.
|
||||||
:return: ``None``.
|
:return: ``None``.
|
||||||
"""
|
"""
|
||||||
# Fetch and filter flats list
|
# Fetch and filter flats list
|
||||||
past_flats = fetch.load_flats_from_db(config)
|
|
||||||
if load_from_db:
|
if load_from_db:
|
||||||
fetched_flats = past_flats
|
flats_list = fetch.load_flats_list_from_db(config)
|
||||||
else:
|
else:
|
||||||
fetched_flats = fetch.fetch_flats(config)
|
flats_list = fetch.fetch_flats_list(config)
|
||||||
# Do not fetch additional details if we loaded data from the db.
|
# Do not fetch additional details if we loaded data from the db.
|
||||||
flats_by_status = filter_fetched_flats(
|
flats_list_by_status = filter_flats(config, flats_list=flats_list,
|
||||||
config,
|
fetch_details=(not load_from_db))
|
||||||
fetched_flats=fetched_flats,
|
|
||||||
fetch_details=(not load_from_db),
|
|
||||||
past_flats=past_flats if new_only else {},
|
|
||||||
)
|
|
||||||
# Create database connection
|
# Create database connection
|
||||||
get_session = database.init_db(config["database"], config["search_index"])
|
get_session = database.init_db(config["database"], config["search_index"])
|
||||||
|
|
||||||
new_flats = []
|
|
||||||
result = []
|
|
||||||
|
|
||||||
LOGGER.info("Merging fetched flats in database...")
|
LOGGER.info("Merging fetched flats in database...")
|
||||||
# Flatten the flats_by_status dict
|
|
||||||
flatten_flats_by_status = collections.defaultdict(list)
|
|
||||||
for flats in flats_by_status.values():
|
|
||||||
for status, flats_list in flats.items():
|
|
||||||
flatten_flats_by_status[status].extend(flats_list)
|
|
||||||
|
|
||||||
with get_session() as session:
|
with get_session() as session:
|
||||||
# Set is_expired to true for all existing flats.
|
for status, flats_list in flats_list_by_status.items():
|
||||||
# This will be set back to false if we find them during importing.
|
|
||||||
for flat in session.query(flat_model.Flat).all():
|
|
||||||
flat.is_expired = True
|
|
||||||
|
|
||||||
for status, flats_list in flatten_flats_by_status.items():
|
|
||||||
# Build SQLAlchemy Flat model objects for every available flat
|
# Build SQLAlchemy Flat model objects for every available flat
|
||||||
flats_objects = {flat_dict["id"]: flat_model.Flat.from_dict(flat_dict) for flat_dict in flats_list}
|
flats_objects = {
|
||||||
|
flat_dict["id"]: flat_model.Flat.from_dict(flat_dict)
|
||||||
|
for flat_dict in flats_list
|
||||||
|
}
|
||||||
|
|
||||||
if flats_objects:
|
if flats_objects:
|
||||||
# If there are some flats, try to merge them with the ones in
|
# If there are some flats, try to merge them with the ones in
|
||||||
@ -184,13 +123,11 @@ def import_and_filter(config, load_from_db=False, new_only=False):
|
|||||||
# status if the user defined it
|
# status if the user defined it
|
||||||
flat_object = flats_objects[each.id]
|
flat_object = flats_objects[each.id]
|
||||||
if each.status in flat_model.AUTOMATED_STATUSES:
|
if each.status in flat_model.AUTOMATED_STATUSES:
|
||||||
flat_object.status = getattr(flat_model.FlatStatus, status)
|
flat_object.status = getattr(
|
||||||
|
flat_model.FlatStatus, status
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
flat_object.status = each.status
|
flat_object.status = each.status
|
||||||
|
|
||||||
# Every flat we fetched isn't expired
|
|
||||||
flat_object.is_expired = False
|
|
||||||
|
|
||||||
# For each flat already in the db, merge it (UPDATE)
|
# For each flat already in the db, merge it (UPDATE)
|
||||||
# instead of adding it
|
# instead of adding it
|
||||||
session.merge(flats_objects.pop(each.id))
|
session.merge(flats_objects.pop(each.id))
|
||||||
@ -199,24 +136,9 @@ def import_and_filter(config, load_from_db=False, new_only=False):
|
|||||||
# just set the status field without worrying
|
# just set the status field without worrying
|
||||||
for flat in flats_objects.values():
|
for flat in flats_objects.values():
|
||||||
flat.status = getattr(flat_model.FlatStatus, status)
|
flat.status = getattr(flat_model.FlatStatus, status)
|
||||||
if flat.status == flat_model.FlatStatus.new:
|
|
||||||
new_flats.append(flat)
|
|
||||||
result.append(flat.id)
|
|
||||||
|
|
||||||
session.add_all(flats_objects.values())
|
session.add_all(flats_objects.values())
|
||||||
|
|
||||||
if config["send_email"]:
|
|
||||||
email.send_notification(config, new_flats)
|
|
||||||
|
|
||||||
LOGGER.info(f"Found {len(result)} new flats.")
|
|
||||||
|
|
||||||
# Touch a file to indicate last update timestamp
|
|
||||||
ts_file = os.path.join(config["data_directory"], "timestamp")
|
|
||||||
with open(ts_file, "w"):
|
|
||||||
os.utime(ts_file, None)
|
|
||||||
|
|
||||||
LOGGER.info("Done!")
|
LOGGER.info("Done!")
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def purge_db(config):
|
def purge_db(config):
|
||||||
@ -235,10 +157,6 @@ def purge_db(config):
|
|||||||
# Use (slower) deletion by object, to ensure whoosh index is
|
# Use (slower) deletion by object, to ensure whoosh index is
|
||||||
# updated
|
# updated
|
||||||
session.delete(flat)
|
session.delete(flat)
|
||||||
LOGGER.info("Purge all postal codes from the database.")
|
|
||||||
session.query(postal_code_model.PostalCode).delete()
|
|
||||||
LOGGER.info("Purge all public transportations from the database.")
|
|
||||||
session.query(public_transport_model.PublicTransport).delete()
|
|
||||||
|
|
||||||
|
|
||||||
def serve(config):
|
def serve(config):
|
||||||
@ -256,5 +174,4 @@ def serve(config):
|
|||||||
# standard logging
|
# standard logging
|
||||||
server = web_app.QuietWSGIRefServer
|
server = web_app.QuietWSGIRefServer
|
||||||
|
|
||||||
print("Launching web viewer running on http://%s:%s" % (config["host"], config["port"]))
|
|
||||||
app.run(host=config["host"], port=config["port"], server=server)
|
app.run(host=config["host"], port=config["port"], server=server)
|
||||||
|
@ -15,70 +15,38 @@ import sys
|
|||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
import appdirs
|
import appdirs
|
||||||
from woob.capabilities.housing import POSTS_TYPES, HOUSE_TYPES
|
|
||||||
|
|
||||||
from flatisfy import data
|
|
||||||
from flatisfy import tools
|
from flatisfy import tools
|
||||||
from flatisfy.constants import TimeToModes
|
|
||||||
from flatisfy.models.postal_code import PostalCode
|
|
||||||
|
|
||||||
|
|
||||||
DIRPATH = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
|
|
||||||
|
|
||||||
# Default configuration
|
# Default configuration
|
||||||
DEFAULT_CONFIG = {
|
DEFAULT_CONFIG = {
|
||||||
# Constraints to match
|
# Constraints to match
|
||||||
"constraints": {
|
"constraints": {
|
||||||
"default": {
|
|
||||||
"type": None, # RENT, SALE, SHARING
|
"type": None, # RENT, SALE, SHARING
|
||||||
"house_types": [], # List of house types, must be in APART, HOUSE,
|
"house_types": [], # List of house types, must be in APART, HOUSE,
|
||||||
# PARKING, LAND, OTHER or UNKNOWN
|
# PARKING, LAND, OTHER or UNKNOWN
|
||||||
"postal_codes": [], # List of postal codes
|
"postal_codes": [], # List of postal codes
|
||||||
"insees": [], # List of postal codes
|
|
||||||
"area": (None, None), # (min, max) in m^2
|
"area": (None, None), # (min, max) in m^2
|
||||||
"cost": (None, None), # (min, max) in currency unit
|
"cost": (None, None), # (min, max) in currency unit
|
||||||
"rooms": (None, None), # (min, max)
|
"rooms": (None, None), # (min, max)
|
||||||
"bedrooms": (None, None), # (min, max)
|
"bedrooms": (None, None), # (min, max)
|
||||||
"minimum_nb_photos": None, # min number of photos
|
|
||||||
"description_should_contain": [], # list of terms (str) or list
|
|
||||||
# (acting as an or)
|
|
||||||
"description_should_not_contain": [
|
|
||||||
"vendu",
|
|
||||||
"Vendu",
|
|
||||||
"VENDU",
|
|
||||||
"recherche",
|
|
||||||
],
|
|
||||||
"time_to": {} # Dict mapping names to {"gps": [lat, lng],
|
"time_to": {} # Dict mapping names to {"gps": [lat, lng],
|
||||||
# "time": (min, max),
|
# "time": (min, max) }
|
||||||
# "mode": Valid mode }
|
|
||||||
# Time is in seconds
|
# Time is in seconds
|
||||||
}
|
|
||||||
},
|
},
|
||||||
# Whether or not to store personal data from housing posts (phone number
|
|
||||||
# etc)
|
|
||||||
"store_personal_data": False,
|
|
||||||
# Max distance between an housing and a found station, to avoid
|
|
||||||
# false-positive
|
|
||||||
"max_distance_housing_station": 1500,
|
|
||||||
# Score to consider two flats as being duplicates
|
|
||||||
"duplicate_threshold": 15,
|
|
||||||
# Score to consider two images as being duplicates through hash comparison
|
|
||||||
"duplicate_image_hash_threshold": 10,
|
|
||||||
# Whether images should be downloaded and served locally
|
|
||||||
"serve_images_locally": True,
|
|
||||||
# Navitia API key
|
# Navitia API key
|
||||||
"navitia_api_key": None,
|
"navitia_api_key": None,
|
||||||
# Mapbox API key
|
|
||||||
"mapbox_api_key": None,
|
|
||||||
# Number of filtering passes to run
|
# Number of filtering passes to run
|
||||||
"passes": 3,
|
"passes": 3,
|
||||||
# Maximum number of entries to fetch
|
# Maximum number of entries to fetch
|
||||||
"max_entries": None,
|
"max_entries": None,
|
||||||
# Directory in wich data will be put. ``None`` is XDG default location.
|
# Directory in wich data will be put. ``None`` is XDG default location.
|
||||||
"data_directory": None,
|
"data_directory": None,
|
||||||
# Path to the modules directory containing all Woob modules.
|
# Path to the modules directory containing all Weboob modules. ``None`` if
|
||||||
"modules_path": os.path.join(DIRPATH, '..', 'modules'),
|
# ``weboob_modules`` package is pip-installed, and you want to use
|
||||||
|
# ``pkgresource`` to automatically find it.
|
||||||
|
"modules_path": None,
|
||||||
# SQLAlchemy URI to the database to use
|
# SQLAlchemy URI to the database to use
|
||||||
"database": None,
|
"database": None,
|
||||||
# Path to the Whoosh search index file. Use ``None`` to put it in
|
# Path to the Whoosh search index file. Use ``None`` to put it in
|
||||||
@ -90,43 +58,33 @@ DEFAULT_CONFIG = {
|
|||||||
"host": "127.0.0.1",
|
"host": "127.0.0.1",
|
||||||
# Web server to use to serve the webapp (see Bottle deployment doc)
|
# Web server to use to serve the webapp (see Bottle deployment doc)
|
||||||
"webserver": None,
|
"webserver": None,
|
||||||
# List of Woob backends to use (default to any backend available)
|
# List of Weboob backends to use (default to any backend available)
|
||||||
"backends": None,
|
"backends": None,
|
||||||
# Should email notifications be sent?
|
|
||||||
"send_email": False,
|
|
||||||
"smtp_server": "localhost",
|
|
||||||
"smtp_port": 25,
|
|
||||||
"smtp_username": None,
|
|
||||||
"smtp_password": None,
|
|
||||||
"smtp_from": "noreply@flatisfy.org",
|
|
||||||
"smtp_to": [],
|
|
||||||
"notification_lang": "en",
|
|
||||||
# The web site url, to be used in email notifications. (doesn't matter
|
|
||||||
# whether the trailing slash is present or not)
|
|
||||||
"website_url": "http://127.0.0.1:8080",
|
|
||||||
"ignore_station": False,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def validate_config(config, check_with_data):
|
def validate_config(config):
|
||||||
"""
|
"""
|
||||||
Check that the config passed as argument is a valid configuration.
|
Check that the config passed as argument is a valid configuration.
|
||||||
|
|
||||||
:param config: A config dictionary to fetch.
|
:param config: A config dictionary to fetch.
|
||||||
:param check_with_data: Whether we should use the available OpenData to
|
|
||||||
check the config values.
|
|
||||||
:return: ``True`` if the configuration is valid, ``False`` otherwise.
|
:return: ``True`` if the configuration is valid, ``False`` otherwise.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _check_constraints_bounds(bounds):
|
def _check_constraints_bounds(bounds):
|
||||||
"""
|
"""
|
||||||
Check the bounds for numeric constraints.
|
Check the bounds for numeric constraints.
|
||||||
"""
|
"""
|
||||||
assert isinstance(bounds, list)
|
|
||||||
assert len(bounds) == 2
|
assert len(bounds) == 2
|
||||||
assert all(x is None or (isinstance(x, (float, int)) and x >= 0) for x in bounds)
|
assert all(
|
||||||
|
x is None or
|
||||||
|
(
|
||||||
|
isinstance(x, (float, int)) and
|
||||||
|
x >= 0
|
||||||
|
)
|
||||||
|
for x in bounds
|
||||||
|
)
|
||||||
if bounds[0] is not None and bounds[1] is not None:
|
if bounds[0] is not None and bounds[1] is not None:
|
||||||
assert bounds[1] > bounds[0]
|
assert bounds[1] > bounds[0]
|
||||||
|
|
||||||
@ -136,14 +94,45 @@ def validate_config(config, check_with_data):
|
|||||||
# and use long lines whenever needed, in order to have the full assert
|
# and use long lines whenever needed, in order to have the full assert
|
||||||
# message in the log output.
|
# message in the log output.
|
||||||
# pylint: disable=locally-disabled,line-too-long
|
# pylint: disable=locally-disabled,line-too-long
|
||||||
|
assert "type" in config["constraints"]
|
||||||
|
assert config["constraints"]["type"].upper() in ["RENT",
|
||||||
|
"SALE", "SHARING"]
|
||||||
|
|
||||||
|
assert "house_types" in config["constraints"]
|
||||||
|
assert config["constraints"]["house_types"]
|
||||||
|
for house_type in config["constraints"]["house_types"]:
|
||||||
|
assert house_type.upper() in ["APART", "HOUSE", "PARKING", "LAND",
|
||||||
|
"OTHER", "UNKNOWN"]
|
||||||
|
|
||||||
|
assert "postal_codes" in config["constraints"]
|
||||||
|
assert config["constraints"]["postal_codes"]
|
||||||
|
|
||||||
|
assert "area" in config["constraints"]
|
||||||
|
_check_constraints_bounds(config["constraints"]["area"])
|
||||||
|
|
||||||
|
assert "cost" in config["constraints"]
|
||||||
|
_check_constraints_bounds(config["constraints"]["cost"])
|
||||||
|
|
||||||
|
assert "rooms" in config["constraints"]
|
||||||
|
_check_constraints_bounds(config["constraints"]["rooms"])
|
||||||
|
|
||||||
|
assert "bedrooms" in config["constraints"]
|
||||||
|
_check_constraints_bounds(config["constraints"]["bedrooms"])
|
||||||
|
|
||||||
|
assert "time_to" in config["constraints"]
|
||||||
|
assert isinstance(config["constraints"]["time_to"], dict)
|
||||||
|
for name, item in config["constraints"]["time_to"].items():
|
||||||
|
assert isinstance(name, str)
|
||||||
|
assert "gps" in item
|
||||||
|
assert isinstance(item["gps"], list)
|
||||||
|
assert len(item["gps"]) == 2
|
||||||
|
assert "time" in item
|
||||||
|
_check_constraints_bounds(item["time"])
|
||||||
|
|
||||||
assert config["passes"] in [0, 1, 2, 3]
|
assert config["passes"] in [0, 1, 2, 3]
|
||||||
assert config["max_entries"] is None or (
|
assert config["max_entries"] is None or (isinstance(config["max_entries"], int) and config["max_entries"] > 0) # noqa: E501
|
||||||
isinstance(config["max_entries"], int) and config["max_entries"] > 0
|
|
||||||
) # noqa: E501
|
|
||||||
|
|
||||||
assert config["data_directory"] is None or isinstance(config["data_directory"], str) # noqa: E501
|
assert config["data_directory"] is None or isinstance(config["data_directory"], str) # noqa: E501
|
||||||
assert os.path.isdir(config["data_directory"])
|
|
||||||
assert isinstance(config["search_index"], str)
|
assert isinstance(config["search_index"], str)
|
||||||
assert config["modules_path"] is None or isinstance(config["modules_path"], str) # noqa: E501
|
assert config["modules_path"] is None or isinstance(config["modules_path"], str) # noqa: E501
|
||||||
|
|
||||||
@ -154,115 +143,17 @@ def validate_config(config, check_with_data):
|
|||||||
assert config["webserver"] is None or isinstance(config["webserver"], str) # noqa: E501
|
assert config["webserver"] is None or isinstance(config["webserver"], str) # noqa: E501
|
||||||
assert config["backends"] is None or isinstance(config["backends"], list) # noqa: E501
|
assert config["backends"] is None or isinstance(config["backends"], list) # noqa: E501
|
||||||
|
|
||||||
assert isinstance(config["send_email"], bool)
|
|
||||||
assert config["smtp_server"] is None or isinstance(config["smtp_server"], str) # noqa: E501
|
|
||||||
assert config["smtp_port"] is None or isinstance(config["smtp_port"], int) # noqa: E501
|
|
||||||
assert config["smtp_username"] is None or isinstance(config["smtp_username"], str) # noqa: E501
|
|
||||||
assert config["smtp_password"] is None or isinstance(config["smtp_password"], str) # noqa: E501
|
|
||||||
assert config["smtp_to"] is None or isinstance(config["smtp_to"], list)
|
|
||||||
assert config["notification_lang"] is None or isinstance(config["notification_lang"], str)
|
|
||||||
|
|
||||||
assert isinstance(config["store_personal_data"], bool)
|
|
||||||
assert isinstance(config["max_distance_housing_station"], (int, float))
|
|
||||||
assert isinstance(config["duplicate_threshold"], int)
|
|
||||||
assert isinstance(config["duplicate_image_hash_threshold"], int)
|
|
||||||
|
|
||||||
# API keys
|
|
||||||
assert config["navitia_api_key"] is None or isinstance(config["navitia_api_key"], str) # noqa: E501
|
|
||||||
assert config["mapbox_api_key"] is None or isinstance(config["mapbox_api_key"], str) # noqa: E501
|
|
||||||
|
|
||||||
assert config["ignore_station"] is None or isinstance(config["ignore_station"], bool) # noqa: E501
|
|
||||||
|
|
||||||
# Ensure constraints are ok
|
|
||||||
assert config["constraints"]
|
|
||||||
for constraint in config["constraints"].values():
|
|
||||||
assert "type" in constraint
|
|
||||||
assert isinstance(constraint["type"], str)
|
|
||||||
assert constraint["type"].upper() in POSTS_TYPES.__members__
|
|
||||||
|
|
||||||
assert "minimum_nb_photos" in constraint
|
|
||||||
if constraint["minimum_nb_photos"]:
|
|
||||||
assert isinstance(constraint["minimum_nb_photos"], int)
|
|
||||||
assert constraint["minimum_nb_photos"] >= 0
|
|
||||||
|
|
||||||
assert "description_should_contain" in constraint
|
|
||||||
assert isinstance(constraint["description_should_contain"], list)
|
|
||||||
if constraint["description_should_contain"]:
|
|
||||||
for term in constraint["description_should_contain"]:
|
|
||||||
try:
|
|
||||||
assert isinstance(term, str)
|
|
||||||
except AssertionError:
|
|
||||||
assert isinstance(term, list)
|
|
||||||
assert all(isinstance(x, str) for x in term)
|
|
||||||
|
|
||||||
assert "description_should_not_contain" in constraint
|
|
||||||
assert isinstance(constraint["description_should_not_contain"], list)
|
|
||||||
if constraint["description_should_not_contain"]:
|
|
||||||
for term in constraint["description_should_not_contain"]:
|
|
||||||
assert isinstance(term, str)
|
|
||||||
|
|
||||||
assert "house_types" in constraint
|
|
||||||
assert constraint["house_types"]
|
|
||||||
for house_type in constraint["house_types"]:
|
|
||||||
assert house_type.upper() in HOUSE_TYPES.__members__
|
|
||||||
|
|
||||||
assert "postal_codes" in constraint
|
|
||||||
assert constraint["postal_codes"]
|
|
||||||
assert all(isinstance(x, str) for x in constraint["postal_codes"])
|
|
||||||
if "insee_codes" in constraint:
|
|
||||||
assert constraint["insee_codes"]
|
|
||||||
assert all(isinstance(x, str) for x in constraint["insee_codes"])
|
|
||||||
|
|
||||||
if check_with_data:
|
|
||||||
# Ensure data is built into db
|
|
||||||
data.preprocess_data(config, force=False)
|
|
||||||
# Check postal codes
|
|
||||||
opendata = data.load_data(PostalCode, constraint, config)
|
|
||||||
opendata_postal_codes = [x.postal_code for x in opendata]
|
|
||||||
opendata_insee_codes = [x.insee_code for x in opendata]
|
|
||||||
for postal_code in constraint["postal_codes"]:
|
|
||||||
assert postal_code in opendata_postal_codes # noqa: E501
|
|
||||||
if "insee_codes" in constraint:
|
|
||||||
for insee in constraint["insee_codes"]:
|
|
||||||
assert insee in opendata_insee_codes # noqa: E501
|
|
||||||
|
|
||||||
assert "area" in constraint
|
|
||||||
_check_constraints_bounds(constraint["area"])
|
|
||||||
|
|
||||||
assert "cost" in constraint
|
|
||||||
_check_constraints_bounds(constraint["cost"])
|
|
||||||
|
|
||||||
assert "rooms" in constraint
|
|
||||||
_check_constraints_bounds(constraint["rooms"])
|
|
||||||
|
|
||||||
assert "bedrooms" in constraint
|
|
||||||
_check_constraints_bounds(constraint["bedrooms"])
|
|
||||||
|
|
||||||
assert "time_to" in constraint
|
|
||||||
assert isinstance(constraint["time_to"], dict)
|
|
||||||
for name, item in constraint["time_to"].items():
|
|
||||||
assert isinstance(name, str)
|
|
||||||
assert "gps" in item
|
|
||||||
assert isinstance(item["gps"], list)
|
|
||||||
assert len(item["gps"]) == 2
|
|
||||||
assert "time" in item
|
|
||||||
_check_constraints_bounds(item["time"])
|
|
||||||
if "mode" in item:
|
|
||||||
TimeToModes[item["mode"]]
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
except (AssertionError, KeyError):
|
except (AssertionError, KeyError):
|
||||||
_, _, exc_traceback = sys.exc_info()
|
_, _, exc_traceback = sys.exc_info()
|
||||||
return traceback.extract_tb(exc_traceback)[-1][-1]
|
return traceback.extract_tb(exc_traceback)[-1][-1]
|
||||||
|
|
||||||
|
|
||||||
def load_config(args=None, check_with_data=True):
|
def load_config(args=None):
|
||||||
"""
|
"""
|
||||||
Load the configuration from file.
|
Load the configuration from file.
|
||||||
|
|
||||||
:param args: An argparse args structure.
|
:param args: An argparse args structure.
|
||||||
:param check_with_data: Whether we should use the available OpenData to
|
|
||||||
check the config values. Defaults to ``True``.
|
|
||||||
:return: The loaded config dict.
|
:return: The loaded config dict.
|
||||||
"""
|
"""
|
||||||
LOGGER.info("Initializing configuration...")
|
LOGGER.info("Initializing configuration...")
|
||||||
@ -277,18 +168,22 @@ def load_config(args=None, check_with_data=True):
|
|||||||
config_data.update(json.load(fh))
|
config_data.update(json.load(fh))
|
||||||
except (IOError, ValueError) as exc:
|
except (IOError, ValueError) as exc:
|
||||||
LOGGER.error(
|
LOGGER.error(
|
||||||
"Unable to load configuration from file, using default configuration: %s.",
|
"Unable to load configuration from file, "
|
||||||
exc,
|
"using default configuration: %s.",
|
||||||
|
exc
|
||||||
)
|
)
|
||||||
|
|
||||||
# Overload config with arguments
|
# Overload config with arguments
|
||||||
if args and getattr(args, "passes", None) is not None:
|
if args and getattr(args, "passes", None) is not None:
|
||||||
LOGGER.debug("Overloading number of passes from CLI arguments: %d.", args.passes)
|
LOGGER.debug(
|
||||||
|
"Overloading number of passes from CLI arguments: %d.",
|
||||||
|
args.passes
|
||||||
|
)
|
||||||
config_data["passes"] = args.passes
|
config_data["passes"] = args.passes
|
||||||
if args and getattr(args, "max_entries", None) is not None:
|
if args and getattr(args, "max_entries", None) is not None:
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
"Overloading maximum number of entries from CLI arguments: %d.",
|
"Overloading maximum number of entries from CLI arguments: %d.",
|
||||||
args.max_entries,
|
args.max_entries
|
||||||
)
|
)
|
||||||
config_data["max_entries"] = args.max_entries
|
config_data["max_entries"] = args.max_entries
|
||||||
if args and getattr(args, "port", None) is not None:
|
if args and getattr(args, "port", None) is not None:
|
||||||
@ -303,41 +198,26 @@ def load_config(args=None, check_with_data=True):
|
|||||||
LOGGER.debug("Overloading data directory from CLI arguments.")
|
LOGGER.debug("Overloading data directory from CLI arguments.")
|
||||||
config_data["data_directory"] = args.data_dir
|
config_data["data_directory"] = args.data_dir
|
||||||
elif config_data["data_directory"] is None:
|
elif config_data["data_directory"] is None:
|
||||||
config_data["data_directory"] = appdirs.user_data_dir("flatisfy", "flatisfy")
|
config_data["data_directory"] = appdirs.user_data_dir(
|
||||||
LOGGER.debug("Using default XDG data directory: %s.", config_data["data_directory"])
|
"flatisfy",
|
||||||
|
"flatisfy"
|
||||||
if not os.path.isdir(config_data["data_directory"]):
|
|
||||||
LOGGER.info(
|
|
||||||
"Creating data directory according to config: %s",
|
|
||||||
config_data["data_directory"],
|
|
||||||
)
|
)
|
||||||
os.makedirs(config_data["data_directory"])
|
LOGGER.debug("Using default XDG data directory: %s.",
|
||||||
os.makedirs(os.path.join(config_data["data_directory"], "images"))
|
config_data["data_directory"])
|
||||||
|
|
||||||
if config_data["database"] is None:
|
if config_data["database"] is None:
|
||||||
config_data["database"] = "sqlite:///" + os.path.join(config_data["data_directory"], "flatisfy.db")
|
config_data["database"] = "sqlite:///" + os.path.join(
|
||||||
|
config_data["data_directory"],
|
||||||
|
"flatisfy.db"
|
||||||
|
)
|
||||||
|
|
||||||
if config_data["search_index"] is None:
|
if config_data["search_index"] is None:
|
||||||
config_data["search_index"] = os.path.join(config_data["data_directory"], "search_index")
|
config_data["search_index"] = os.path.join(
|
||||||
|
config_data["data_directory"],
|
||||||
# Handle constraints filtering
|
"search_index"
|
||||||
if args and getattr(args, "constraints", None) is not None:
|
|
||||||
LOGGER.info(
|
|
||||||
(
|
|
||||||
"Filtering constraints from config according to CLI argument. "
|
|
||||||
"Using only the following constraints: %s."
|
|
||||||
),
|
|
||||||
args.constraints.replace(",", ", "),
|
|
||||||
)
|
)
|
||||||
constraints_filter = args.constraints.split(",")
|
|
||||||
config_data["constraints"] = {k: v for k, v in config_data["constraints"].items() if k in constraints_filter}
|
|
||||||
|
|
||||||
# Sanitize website url
|
config_validation = validate_config(config_data)
|
||||||
if config_data["website_url"] is not None:
|
|
||||||
if config_data["website_url"][-1] != "/":
|
|
||||||
config_data["website_url"] += "/"
|
|
||||||
|
|
||||||
config_validation = validate_config(config_data, check_with_data)
|
|
||||||
if config_validation is True:
|
if config_validation is True:
|
||||||
LOGGER.info("Config has been fully initialized.")
|
LOGGER.info("Config has been fully initialized.")
|
||||||
return config_data
|
return config_data
|
||||||
|
@ -1,27 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
"""
|
|
||||||
Constants used across the app.
|
|
||||||
"""
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
# Some backends give more infos than others. Here is the precedence we want to
|
|
||||||
# use. First is most important one, last is the one that will always be
|
|
||||||
# considered as less trustable if two backends have similar info about a
|
|
||||||
# housing.
|
|
||||||
BACKENDS_BY_PRECEDENCE = [
|
|
||||||
"foncia",
|
|
||||||
"seloger",
|
|
||||||
"pap",
|
|
||||||
"leboncoin",
|
|
||||||
"explorimmo",
|
|
||||||
"logicimmo",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class TimeToModes(Enum):
|
|
||||||
PUBLIC_TRANSPORT = -1
|
|
||||||
WALK = 1
|
|
||||||
BIKE = 2
|
|
||||||
CAR = 3
|
|
202
flatisfy/data.py
@ -1,100 +1,166 @@
|
|||||||
# coding: utf-8
|
# coding : utf-8
|
||||||
"""
|
"""
|
||||||
This module contains all the code related to building necessary data files from
|
This module contains all the code related to building necessary data files from
|
||||||
the source opendata files.
|
the source opendata files.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
import flatisfy.exceptions
|
import flatisfy.exceptions
|
||||||
|
|
||||||
from flatisfy import database
|
|
||||||
from flatisfy import data_files
|
|
||||||
from flatisfy.models.postal_code import PostalCode
|
|
||||||
from flatisfy.models.public_transport import PublicTransport
|
|
||||||
from flatisfy.tools import hash_dict
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
MODULE_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
# Try to load lru_cache
|
|
||||||
try:
|
def _preprocess_ratp(output_dir):
|
||||||
from functools import lru_cache
|
"""
|
||||||
except ImportError:
|
Build RATP file from the RATP data.
|
||||||
|
|
||||||
|
:param output_dir: Directory in which the output file should reside.
|
||||||
|
:return: ``True`` on successful build, ``False`` otherwise.
|
||||||
|
"""
|
||||||
|
ratp_data_raw = []
|
||||||
|
# Load opendata file
|
||||||
try:
|
try:
|
||||||
from functools32 import lru_cache
|
with open(os.path.join(MODULE_DIR, "data_files/ratp.json"), "r") as fh:
|
||||||
except ImportError:
|
ratp_data_raw = json.load(fh)
|
||||||
|
except (IOError, ValueError):
|
||||||
|
LOGGER.error("Invalid raw RATP opendata file.")
|
||||||
|
return False
|
||||||
|
|
||||||
def lru_cache(maxsize=None): # pylint: disable=unused-argument
|
# Process it
|
||||||
"""
|
ratp_data = collections.defaultdict(list)
|
||||||
Identity implementation of ``lru_cache`` for fallback.
|
for item in ratp_data_raw:
|
||||||
"""
|
stop_name = item["fields"]["stop_name"].lower()
|
||||||
return lambda func: func
|
ratp_data[stop_name].append({
|
||||||
|
"gps": item["fields"]["coord"],
|
||||||
|
"name": item["fields"]["stop_name"]
|
||||||
|
})
|
||||||
|
|
||||||
LOGGER.warning(
|
# Output it
|
||||||
"`functools.lru_cache` is not available on your system. Consider "
|
with open(os.path.join(output_dir, "ratp.json"), "w") as fh:
|
||||||
"installing `functools32` Python module if using Python2 for "
|
json.dump(ratp_data, fh)
|
||||||
"better performances."
|
|
||||||
)
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _preprocess_laposte(output_dir):
|
||||||
|
"""
|
||||||
|
Build JSON files from the postal codes data.
|
||||||
|
|
||||||
|
:param output_dir: Directory in which the output file should reside.
|
||||||
|
:return: ``True`` on successful build, ``False`` otherwise.
|
||||||
|
"""
|
||||||
|
raw_laposte_data = []
|
||||||
|
# Load opendata file
|
||||||
|
try:
|
||||||
|
with open(
|
||||||
|
os.path.join(MODULE_DIR, "data_files/laposte.json"), "r"
|
||||||
|
) as fh:
|
||||||
|
raw_laposte_data = json.load(fh)
|
||||||
|
except (IOError, ValueError):
|
||||||
|
LOGGER.error("Invalid raw LaPoste opendata file.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Build postal codes to other infos file
|
||||||
|
postal_codes_data = {}
|
||||||
|
for item in raw_laposte_data:
|
||||||
|
try:
|
||||||
|
postal_codes_data[item["fields"]["code_postal"]] = {
|
||||||
|
"gps": item["fields"]["coordonnees_gps"],
|
||||||
|
"nom": item["fields"]["nom_de_la_commune"].title()
|
||||||
|
}
|
||||||
|
except KeyError:
|
||||||
|
LOGGER.info("Missing data for postal code %s, skipping it.",
|
||||||
|
item["fields"]["code_postal"])
|
||||||
|
with open(os.path.join(output_dir, "postal_codes.json"), "w") as fh:
|
||||||
|
json.dump(postal_codes_data, fh)
|
||||||
|
|
||||||
|
# Build city name to postal codes and other infos file
|
||||||
|
cities_data = {}
|
||||||
|
for item in raw_laposte_data:
|
||||||
|
try:
|
||||||
|
cities_data[item["fields"]["nom_de_la_commune"].title()] = {
|
||||||
|
"gps": item["fields"]["coordonnees_gps"],
|
||||||
|
"postal_code": item["fields"]["code_postal"]
|
||||||
|
}
|
||||||
|
except KeyError:
|
||||||
|
LOGGER.info("Missing data for city %s, skipping it.",
|
||||||
|
item["fields"]["nom_de_la_commune"])
|
||||||
|
with open(os.path.join(output_dir, "cities.json"), "w") as fh:
|
||||||
|
json.dump(cities_data, fh)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def preprocess_data(config, force=False):
|
def preprocess_data(config, force=False):
|
||||||
"""
|
"""
|
||||||
Ensures that all the necessary data have been inserted in db from the raw
|
Ensures that all the necessary data files have been built from the raw
|
||||||
opendata files.
|
opendata files.
|
||||||
|
|
||||||
:params config: A config dictionary.
|
:params config: A config dictionary.
|
||||||
:params force: Whether to force rebuild or not.
|
:params force: Whether to force rebuild or not.
|
||||||
:return bool: Whether data have been built or not.
|
|
||||||
"""
|
"""
|
||||||
# Check if a build is required
|
LOGGER.debug("Data directory is %s.", config["data_directory"])
|
||||||
get_session = database.init_db(config["database"], config["search_index"])
|
opendata_directory = os.path.join(config["data_directory"], "opendata")
|
||||||
with get_session() as session:
|
try:
|
||||||
is_built = session.query(PublicTransport).count() > 0 and session.query(PostalCode).count() > 0
|
LOGGER.info("Ensuring the data directory exists.")
|
||||||
if is_built and not force:
|
os.makedirs(opendata_directory)
|
||||||
# No need to rebuild the database, skip
|
LOGGER.debug("Created opendata directory at %s.", opendata_directory)
|
||||||
return False
|
except OSError:
|
||||||
# Otherwise, purge all existing data
|
LOGGER.debug("Opendata directory already existed, doing nothing.")
|
||||||
session.query(PublicTransport).delete()
|
|
||||||
session.query(PostalCode).delete()
|
|
||||||
|
|
||||||
# Build all opendata files
|
is_built_ratp = os.path.isfile(
|
||||||
LOGGER.info("Rebuilding data...")
|
os.path.join(opendata_directory, "ratp.json")
|
||||||
for preprocess in data_files.PREPROCESSING_FUNCTIONS:
|
)
|
||||||
data_objects = preprocess()
|
if not is_built_ratp or force:
|
||||||
if not data_objects:
|
LOGGER.info("Building from RATP data.")
|
||||||
raise flatisfy.exceptions.DataBuildError("Error with %s." % preprocess.__name__)
|
if not _preprocess_ratp(opendata_directory):
|
||||||
with get_session() as session:
|
raise flatisfy.exceptions.DataBuildError("Error with RATP data.")
|
||||||
session.add_all(data_objects)
|
|
||||||
LOGGER.info("Done building data!")
|
is_built_laposte = (
|
||||||
return True
|
os.path.isfile(os.path.join(opendata_directory, "cities.json")) and
|
||||||
|
os.path.isfile(os.path.join(opendata_directory, "postal_codes.json"))
|
||||||
|
)
|
||||||
|
if not is_built_laposte or force:
|
||||||
|
LOGGER.info("Building from LaPoste data.")
|
||||||
|
if not _preprocess_laposte(opendata_directory):
|
||||||
|
raise flatisfy.exceptions.DataBuildError(
|
||||||
|
"Error with LaPoste data."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@hash_dict
|
def load_data(data_type, config):
|
||||||
@lru_cache(maxsize=5)
|
|
||||||
def load_data(model, constraint, config):
|
|
||||||
"""
|
"""
|
||||||
Load data of the specified model from the database. Only load data for the
|
Load a given built data file.
|
||||||
specific areas of the postal codes in config.
|
|
||||||
|
|
||||||
:param model: SQLAlchemy model to load.
|
:param data_type: A valid data identifier.
|
||||||
:param constraint: A constraint from configuration to limit the spatial
|
|
||||||
extension of the loaded data.
|
|
||||||
:param config: A config dictionary.
|
:param config: A config dictionary.
|
||||||
:returns: A list of loaded SQLAlchemy objects from the db
|
:return: The loaded data. ``None`` if the query is incorrect.
|
||||||
"""
|
"""
|
||||||
get_session = database.init_db(config["database"], config["search_index"])
|
if data_type not in ["postal_codes", "cities", "ratp"]:
|
||||||
results = []
|
LOGGER.error("Invalid request. No %s data file.", data_type)
|
||||||
with get_session() as session:
|
return None
|
||||||
areas = []
|
|
||||||
# Get areas to fetch from, using postal codes
|
opendata_directory = os.path.join(config["data_directory"], "opendata")
|
||||||
for postal_code in constraint["postal_codes"]:
|
datafile_path = os.path.join(opendata_directory, "%s.json" % data_type)
|
||||||
areas.append(data_files.french_postal_codes_to_quarter(postal_code))
|
data = {}
|
||||||
# Load data for each area
|
try:
|
||||||
areas = list(set(areas))
|
with open(datafile_path, "r") as fh:
|
||||||
for area in areas:
|
data = json.load(fh)
|
||||||
results.extend(session.query(model).filter(model.area == area).all())
|
except IOError:
|
||||||
# Expunge loaded data from the session to be able to use them
|
LOGGER.error("No such data file: %s.", datafile_path)
|
||||||
# afterwards
|
return None
|
||||||
session.expunge_all()
|
except ValueError:
|
||||||
return results
|
LOGGER.error("Invalid JSON data file: %s.", datafile_path)
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not data:
|
||||||
|
LOGGER.warning("Loading empty data for %s.", data_type)
|
||||||
|
|
||||||
|
return data
|
||||||
|
@ -1,206 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
"""
|
|
||||||
Preprocessing functions to convert input opendata files into SQLAlchemy objects
|
|
||||||
ready to be stored in the database.
|
|
||||||
"""
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
import io
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import titlecase
|
|
||||||
|
|
||||||
from flatisfy.models.postal_code import PostalCode
|
|
||||||
from flatisfy.models.public_transport import PublicTransport
|
|
||||||
from flatisfy.tools import normalize_string
|
|
||||||
|
|
||||||
import csv
|
|
||||||
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
|
||||||
MODULE_DIR = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
|
|
||||||
titlecase.set_small_word_list(
|
|
||||||
# Add French small words
|
|
||||||
r"l|d|un|une|et|à|a|sur|ou|le|la|de|lès|les|"
|
|
||||||
+ titlecase.SMALL
|
|
||||||
)
|
|
||||||
|
|
||||||
TRANSPORT_DATA_FILES = {
|
|
||||||
"FR-IDF": "stops_fr-idf.txt",
|
|
||||||
"FR-NW": "stops_fr-nw.txt",
|
|
||||||
"FR-NE": "stops_fr-ne.txt",
|
|
||||||
"FR-SW": "stops_fr-sw.txt",
|
|
||||||
"FR-SE": "stops_fr-se.txt",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def french_postal_codes_to_quarter(postal_code):
|
|
||||||
"""
|
|
||||||
Convert a French postal code to the main quarter in France this postal
|
|
||||||
code belongs to.
|
|
||||||
|
|
||||||
:param postal_code: The postal code to convert.
|
|
||||||
:returns: The quarter of France or ``None``.
|
|
||||||
"""
|
|
||||||
departement = postal_code[:2]
|
|
||||||
|
|
||||||
# Mapping between areas (main subdivisions in French, ISO 3166-2) and
|
|
||||||
# French departements
|
|
||||||
# Taken from Wikipedia data.
|
|
||||||
department_to_subdivision = {
|
|
||||||
"FR-ARA": [
|
|
||||||
"01",
|
|
||||||
"03",
|
|
||||||
"07",
|
|
||||||
"15",
|
|
||||||
"26",
|
|
||||||
"38",
|
|
||||||
"42",
|
|
||||||
"43",
|
|
||||||
"63",
|
|
||||||
"69",
|
|
||||||
"73",
|
|
||||||
"74",
|
|
||||||
],
|
|
||||||
"FR-BFC": ["21", "25", "39", "58", "70", "71", "89", "90"],
|
|
||||||
"FR-BRE": ["22", "29", "35", "44", "56"],
|
|
||||||
"FR-CVL": ["18", "28", "36", "37", "41", "45"],
|
|
||||||
"FR-COR": ["20"],
|
|
||||||
"FR-GES": ["08", "10", "51", "52", "54", "55", "57", "67", "68", "88"],
|
|
||||||
"FR-HDF": ["02", "59", "60", "62", "80"],
|
|
||||||
"FR-IDF": ["75", "77", "78", "91", "92", "93", "94", "95"],
|
|
||||||
"FR-NOR": ["14", "27", "50", "61", "76"],
|
|
||||||
"FR-NAQ": [
|
|
||||||
"16",
|
|
||||||
"17",
|
|
||||||
"19",
|
|
||||||
"23",
|
|
||||||
"24",
|
|
||||||
"33",
|
|
||||||
"40",
|
|
||||||
"47",
|
|
||||||
"64",
|
|
||||||
"79",
|
|
||||||
"86",
|
|
||||||
"87",
|
|
||||||
],
|
|
||||||
"FR-OCC": [
|
|
||||||
"09",
|
|
||||||
"11",
|
|
||||||
"12",
|
|
||||||
"30",
|
|
||||||
"31",
|
|
||||||
"32",
|
|
||||||
"34",
|
|
||||||
"46",
|
|
||||||
"48",
|
|
||||||
"65",
|
|
||||||
"66",
|
|
||||||
"81",
|
|
||||||
"82",
|
|
||||||
],
|
|
||||||
"FR-PDL": ["44", "49", "53", "72", "85"],
|
|
||||||
"FR-PAC": ["04", "05", "06", "13", "83", "84"],
|
|
||||||
}
|
|
||||||
subdivision_to_quarters = {
|
|
||||||
"FR-IDF": ["FR-IDF"],
|
|
||||||
"FR-NW": ["FR-BRE", "FR-CVL", "FR-NOR", "FR-PDL"],
|
|
||||||
"FR-NE": ["FR-BFC", "FR-GES", "FR-HDF"],
|
|
||||||
"FR-SE": ["FR-ARA", "FR-COR", "FR-PAC", "FR-OCC"],
|
|
||||||
"FR-SW": ["FR-NAQ"],
|
|
||||||
}
|
|
||||||
|
|
||||||
subdivision = next(
|
|
||||||
(i for i, departments in department_to_subdivision.items() if departement in departments),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
return next(
|
|
||||||
(i for i, subdivisions in subdivision_to_quarters.items() if subdivision in subdivisions),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _preprocess_laposte():
|
|
||||||
"""
|
|
||||||
Build SQLAlchemy objects from the postal codes data.
|
|
||||||
|
|
||||||
:return: A list of ``PostalCode`` objects to be inserted in database.
|
|
||||||
"""
|
|
||||||
data_file = "laposte.json"
|
|
||||||
LOGGER.info("Building from %s data.", data_file)
|
|
||||||
|
|
||||||
raw_laposte_data = []
|
|
||||||
# Load opendata file
|
|
||||||
try:
|
|
||||||
with io.open(os.path.join(MODULE_DIR, data_file), "r", encoding="utf-8") as fh:
|
|
||||||
raw_laposte_data = json.load(fh)
|
|
||||||
except (IOError, ValueError):
|
|
||||||
LOGGER.error("Invalid raw LaPoste opendata file.")
|
|
||||||
return []
|
|
||||||
|
|
||||||
# Build postal codes to other infos file
|
|
||||||
postal_codes_data = []
|
|
||||||
# Keep track of seen (postal_codes, names) to avoid inserting useless
|
|
||||||
# duplicates (already in the OpenData file)
|
|
||||||
seen_postal_codes = []
|
|
||||||
for item in raw_laposte_data:
|
|
||||||
fields = item["fields"]
|
|
||||||
try:
|
|
||||||
area = french_postal_codes_to_quarter(fields["code_postal"])
|
|
||||||
if area is None:
|
|
||||||
LOGGER.debug(
|
|
||||||
"No matching area found for postal code %s, skipping it.",
|
|
||||||
fields["code_postal"],
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
name = normalize_string(titlecase.titlecase(fields["nom_de_la_commune"]), lowercase=False)
|
|
||||||
|
|
||||||
if (fields["code_postal"], name) in seen_postal_codes:
|
|
||||||
continue
|
|
||||||
|
|
||||||
seen_postal_codes.append((fields["code_postal"], name))
|
|
||||||
postal_codes_data.append(
|
|
||||||
PostalCode(
|
|
||||||
area=area,
|
|
||||||
postal_code=fields["code_postal"],
|
|
||||||
insee_code=fields["code_commune_insee"],
|
|
||||||
name=name,
|
|
||||||
lat=fields["coordonnees_gps"][0],
|
|
||||||
lng=fields["coordonnees_gps"][1],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
except KeyError:
|
|
||||||
LOGGER.debug("Missing data for postal code %s, skipping it.", fields["code_postal"])
|
|
||||||
|
|
||||||
return postal_codes_data
|
|
||||||
|
|
||||||
|
|
||||||
def _preprocess_public_transport():
|
|
||||||
"""
|
|
||||||
Build SQLAlchemy objects from the Navitia public transport data.
|
|
||||||
|
|
||||||
:return: A list of ``PublicTransport`` objects to be inserted in database.
|
|
||||||
"""
|
|
||||||
public_transport_data = []
|
|
||||||
# Load opendata file
|
|
||||||
for area, data_file in TRANSPORT_DATA_FILES.items():
|
|
||||||
LOGGER.info("Building from public transport data %s.", data_file)
|
|
||||||
try:
|
|
||||||
with io.open(os.path.join(MODULE_DIR, data_file), "r", encoding="utf-8") as fh:
|
|
||||||
filereader = csv.reader(fh)
|
|
||||||
next(filereader, None) # Skip first row (headers)
|
|
||||||
for row in filereader:
|
|
||||||
public_transport_data.append(PublicTransport(name=row[2], area=area, lat=row[3], lng=row[4]))
|
|
||||||
except (IOError, IndexError):
|
|
||||||
LOGGER.error("Invalid raw opendata file: %s.", data_file)
|
|
||||||
return []
|
|
||||||
|
|
||||||
return public_transport_data
|
|
||||||
|
|
||||||
|
|
||||||
# List of all the available preprocessing functions. Order can be important.
|
|
||||||
PREPROCESSING_FUNCTIONS = [_preprocess_laposte, _preprocess_public_transport]
|
|
1
flatisfy/data_files/ratp.json
Normal file
@ -11,7 +11,7 @@ from contextlib import contextmanager
|
|||||||
from sqlalchemy import event, create_engine
|
from sqlalchemy import event, create_engine
|
||||||
from sqlalchemy.engine import Engine
|
from sqlalchemy.engine import Engine
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
from sqlalchemy.exc import OperationalError, SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
|
||||||
import flatisfy.models.flat # noqa: F401
|
import flatisfy.models.flat # noqa: F401
|
||||||
from flatisfy.database.base import BASE
|
from flatisfy.database.base import BASE
|
||||||
@ -47,7 +47,9 @@ def init_db(database_uri=None, search_db_uri=None):
|
|||||||
Session = sessionmaker(bind=engine) # pylint: disable=locally-disabled,invalid-name
|
Session = sessionmaker(bind=engine) # pylint: disable=locally-disabled,invalid-name
|
||||||
|
|
||||||
if search_db_uri:
|
if search_db_uri:
|
||||||
index_service = IndexService(whoosh_base=search_db_uri)
|
index_service = IndexService(
|
||||||
|
whoosh_base=search_db_uri
|
||||||
|
)
|
||||||
index_service.register_class(flatisfy.models.flat.Flat)
|
index_service.register_class(flatisfy.models.flat.Flat)
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
|
@ -16,14 +16,11 @@ class StringyJSON(types.TypeDecorator):
|
|||||||
From
|
From
|
||||||
https://avacariu.me/articles/2016/compiling-json-as-text-for-sqlite-with-sqlalchemy.
|
https://avacariu.me/articles/2016/compiling-json-as-text-for-sqlite-with-sqlalchemy.
|
||||||
|
|
||||||
.. note ::
|
.. note :: The associated field is immutable. That is, changes to the data
|
||||||
|
(typically, changing the value of a dict field) will not trigger an update
|
||||||
The associated field is immutable. That is, changes to the data
|
on the SQL side upon ``commit`` as the reference to the object will not
|
||||||
(typically, changing the value of a dict field) will not trigger an
|
have been updated. One should force the update by forcing an update of the
|
||||||
update on the SQL side upon ``commit`` as the reference to the object
|
reference (by performing a ``copy`` operation on the dict for instance).
|
||||||
will not have been updated. One should force the update by forcing an
|
|
||||||
update of the reference (by performing a ``copy`` operation on the dict
|
|
||||||
for instance).
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
impl = types.TEXT
|
impl = types.TEXT
|
||||||
@ -50,4 +47,4 @@ class StringyJSON(types.TypeDecorator):
|
|||||||
# TypeEngine.with_variant says "use StringyJSON instead when
|
# TypeEngine.with_variant says "use StringyJSON instead when
|
||||||
# connecting to 'sqlite'"
|
# connecting to 'sqlite'"
|
||||||
# pylint: disable=locally-disabled,invalid-name
|
# pylint: disable=locally-disabled,invalid-name
|
||||||
MagicJSON = types.JSON().with_variant(StringyJSON, "sqlite")
|
MagicJSON = types.JSON().with_variant(StringyJSON, 'sqlite')
|
||||||
|
@ -30,6 +30,7 @@ from whoosh.qparser import MultifieldParser
|
|||||||
|
|
||||||
|
|
||||||
class IndexService(object):
|
class IndexService(object):
|
||||||
|
|
||||||
def __init__(self, config=None, whoosh_base=None):
|
def __init__(self, config=None, whoosh_base=None):
|
||||||
if not whoosh_base and config:
|
if not whoosh_base and config:
|
||||||
whoosh_base = config.get("WHOOSH_BASE")
|
whoosh_base = config.get("WHOOSH_BASE")
|
||||||
@ -83,7 +84,8 @@ class IndexService(object):
|
|||||||
primary = field.name
|
primary = field.name
|
||||||
continue
|
continue
|
||||||
if field.name in model_class.__searchable__:
|
if field.name in model_class.__searchable__:
|
||||||
schema[field.name] = whoosh.fields.TEXT(analyzer=StemmingAnalyzer())
|
schema[field.name] = whoosh.fields.TEXT(
|
||||||
|
analyzer=StemmingAnalyzer())
|
||||||
return Schema(**schema), primary
|
return Schema(**schema), primary
|
||||||
|
|
||||||
def before_commit(self, session):
|
def before_commit(self, session):
|
||||||
@ -91,18 +93,21 @@ class IndexService(object):
|
|||||||
|
|
||||||
for model in session.new:
|
for model in session.new:
|
||||||
model_class = model.__class__
|
model_class = model.__class__
|
||||||
if hasattr(model_class, "__searchable__"):
|
if hasattr(model_class, '__searchable__'):
|
||||||
self.to_update.setdefault(model_class.__name__, []).append(("new", model))
|
self.to_update.setdefault(model_class.__name__, []).append(
|
||||||
|
("new", model))
|
||||||
|
|
||||||
for model in session.deleted:
|
for model in session.deleted:
|
||||||
model_class = model.__class__
|
model_class = model.__class__
|
||||||
if hasattr(model_class, "__searchable__"):
|
if hasattr(model_class, '__searchable__'):
|
||||||
self.to_update.setdefault(model_class.__name__, []).append(("deleted", model))
|
self.to_update.setdefault(model_class.__name__, []).append(
|
||||||
|
("deleted", model))
|
||||||
|
|
||||||
for model in session.dirty:
|
for model in session.dirty:
|
||||||
model_class = model.__class__
|
model_class = model.__class__
|
||||||
if hasattr(model_class, "__searchable__"):
|
if hasattr(model_class, '__searchable__'):
|
||||||
self.to_update.setdefault(model_class.__name__, []).append(("changed", model))
|
self.to_update.setdefault(model_class.__name__, []).append(
|
||||||
|
("changed", model))
|
||||||
|
|
||||||
def after_commit(self, session):
|
def after_commit(self, session):
|
||||||
"""
|
"""
|
||||||
@ -123,11 +128,16 @@ class IndexService(object):
|
|||||||
# added as a new doc. Could probably replace this with a whoosh
|
# added as a new doc. Could probably replace this with a whoosh
|
||||||
# update.
|
# update.
|
||||||
|
|
||||||
writer.delete_by_term(primary_field, text_type(getattr(model, primary_field)))
|
writer.delete_by_term(
|
||||||
|
primary_field, text_type(getattr(model, primary_field)))
|
||||||
|
|
||||||
if change_type in ("new", "changed"):
|
if change_type in ("new", "changed"):
|
||||||
attrs = dict((key, getattr(model, key)) for key in searchable)
|
attrs = dict((key, getattr(model, key))
|
||||||
attrs = {attr: text_type(getattr(model, attr)) for attr in attrs.keys()}
|
for key in searchable)
|
||||||
|
attrs = {
|
||||||
|
attr: text_type(getattr(model, attr))
|
||||||
|
for attr in attrs.keys()
|
||||||
|
}
|
||||||
attrs[primary_field] = text_type(getattr(model, primary_field))
|
attrs[primary_field] = text_type(getattr(model, primary_field))
|
||||||
writer.add_document(**attrs)
|
writer.add_document(**attrs)
|
||||||
|
|
||||||
@ -148,7 +158,8 @@ class Searcher(object):
|
|||||||
self.parser = MultifieldParser(list(fields), index.schema)
|
self.parser = MultifieldParser(list(fields), index.schema)
|
||||||
|
|
||||||
def __call__(self, session, query, limit=None):
|
def __call__(self, session, query, limit=None):
|
||||||
results = self.index.searcher().search(self.parser.parse(query), limit=limit)
|
results = self.index.searcher().search(
|
||||||
|
self.parser.parse(query), limit=limit)
|
||||||
|
|
||||||
keys = [x[self.primary] for x in results]
|
keys = [x[self.primary] for x in results]
|
||||||
primary_column = getattr(self.model_class, self.primary)
|
primary_column = getattr(self.model_class, self.primary)
|
||||||
|
@ -1,154 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
"""
|
|
||||||
Email notifications.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
from builtins import str
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import smtplib
|
|
||||||
from money import Money
|
|
||||||
from email.mime.multipart import MIMEMultipart
|
|
||||||
from email.mime.text import MIMEText
|
|
||||||
from email.utils import formatdate, make_msgid
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def send_email(server, port, subject, _from, _to, txt, html, username=None, password=None):
|
|
||||||
"""
|
|
||||||
Send an email
|
|
||||||
|
|
||||||
:param server: SMTP server to use.
|
|
||||||
:param port: SMTP port to use.
|
|
||||||
:param subject: Subject of the email to send.
|
|
||||||
:param _from: Email address of the sender.
|
|
||||||
:param _to: List of email addresses of the receivers.
|
|
||||||
:param txt: Text version of the message.
|
|
||||||
:param html: HTML version of the message.
|
|
||||||
"""
|
|
||||||
if not _to:
|
|
||||||
LOGGER.warn("No recipients for the email notifications, aborting.")
|
|
||||||
return
|
|
||||||
|
|
||||||
server = smtplib.SMTP(server, port)
|
|
||||||
if username or password:
|
|
||||||
server.login(username or "", password or "")
|
|
||||||
|
|
||||||
msg = MIMEMultipart("alternative")
|
|
||||||
msg["Subject"] = subject
|
|
||||||
msg["From"] = _from
|
|
||||||
msg["To"] = ", ".join(_to)
|
|
||||||
msg["Date"] = formatdate()
|
|
||||||
msg["Message-ID"] = make_msgid()
|
|
||||||
|
|
||||||
msg.attach(MIMEText(txt, "plain", "utf-8"))
|
|
||||||
msg.attach(MIMEText(html, "html", "utf-8"))
|
|
||||||
|
|
||||||
server.sendmail(_from, _to, msg.as_string())
|
|
||||||
server.quit()
|
|
||||||
|
|
||||||
|
|
||||||
def send_notification(config, flats):
|
|
||||||
"""
|
|
||||||
Send an email notification about new available flats.
|
|
||||||
|
|
||||||
:param config: A config dict.
|
|
||||||
:param flats: List of flats to include in the notification.
|
|
||||||
"""
|
|
||||||
# Don't send an email if there are no new flats.
|
|
||||||
if not flats:
|
|
||||||
return
|
|
||||||
|
|
||||||
i18n = {
|
|
||||||
"en": {
|
|
||||||
"subject": f"{len(flats)} new flats found!",
|
|
||||||
"hello": "Hello dear user",
|
|
||||||
"following_new_flats": "The following new flats have been found:",
|
|
||||||
"area": "area",
|
|
||||||
"cost": "cost",
|
|
||||||
"signature": "Hope you'll find what you were looking for.",
|
|
||||||
},
|
|
||||||
"fr": {
|
|
||||||
"subject": f"{len(flats)} nouvelles annonces disponibles !",
|
|
||||||
"hello": "Bonjour cher utilisateur",
|
|
||||||
"following_new_flats": "Voici les nouvelles annonces :",
|
|
||||||
"area": "surface",
|
|
||||||
"cost": "coût",
|
|
||||||
"signature": "Bonne recherche",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
trs = i18n.get(config["notification_lang"], "en")
|
|
||||||
|
|
||||||
txt = trs["hello"] + ",\n\n\n\n"
|
|
||||||
html = f"""
|
|
||||||
<html>
|
|
||||||
<head></head>
|
|
||||||
<body>
|
|
||||||
<p>{trs["hello"]}!</p>
|
|
||||||
<p>{trs["following_new_flats"]}
|
|
||||||
|
|
||||||
<ul>
|
|
||||||
"""
|
|
||||||
|
|
||||||
website_url = config["website_url"]
|
|
||||||
|
|
||||||
for flat in flats:
|
|
||||||
title = str(flat.title)
|
|
||||||
flat_id = str(flat.id)
|
|
||||||
try:
|
|
||||||
area = str(int(flat.area))
|
|
||||||
except (TypeError, ValueError):
|
|
||||||
area = None
|
|
||||||
try:
|
|
||||||
cost = int(flat.cost)
|
|
||||||
except (TypeError, ValueError):
|
|
||||||
cost = None
|
|
||||||
currency = str(flat.currency)
|
|
||||||
|
|
||||||
txt += f"- {title}: {website_url}#/flat/{flat_id} "
|
|
||||||
html += f"""
|
|
||||||
<li>
|
|
||||||
<a href="{website_url}#/flat/{flat_id}">{title}</a>
|
|
||||||
"""
|
|
||||||
|
|
||||||
fields = []
|
|
||||||
if area:
|
|
||||||
fields.append(f"{trs['area']}: {area}m²")
|
|
||||||
if cost:
|
|
||||||
if currency == '$':
|
|
||||||
currency = 'USD'
|
|
||||||
if currency == '€':
|
|
||||||
currency = 'EUR'
|
|
||||||
money = Money(cost, currency).format(config["notification_lang"])
|
|
||||||
fields.append(f"{trs['cost']}: {money.format()}")
|
|
||||||
|
|
||||||
if len(fields):
|
|
||||||
txt += f'({", ".join(fields)})'
|
|
||||||
html += f'({", ".join(fields)})'
|
|
||||||
|
|
||||||
html += "</li>"
|
|
||||||
txt += "\n"
|
|
||||||
|
|
||||||
html += "</ul>"
|
|
||||||
|
|
||||||
signature = f"\n{trs['signature']}\n\nBye!\nFlatisfy"
|
|
||||||
txt += signature
|
|
||||||
html += signature.replace("\n", "<br>")
|
|
||||||
|
|
||||||
html += """</p>
|
|
||||||
</body>
|
|
||||||
</html>"""
|
|
||||||
|
|
||||||
send_email(
|
|
||||||
config["smtp_server"],
|
|
||||||
config["smtp_port"],
|
|
||||||
trs["subject"],
|
|
||||||
config["smtp_from"],
|
|
||||||
config["smtp_to"],
|
|
||||||
txt,
|
|
||||||
html,
|
|
||||||
config.get("smtp_username"),
|
|
||||||
config.get("smtp_password"),
|
|
||||||
)
|
|
@ -1,4 +1,4 @@
|
|||||||
# coding: utf-8
|
# coding : utf-8
|
||||||
"""
|
"""
|
||||||
This module contains all the exceptions definitions for the Flatisfy-specific
|
This module contains all the exceptions definitions for the Flatisfy-specific
|
||||||
exceptions.
|
exceptions.
|
||||||
@ -10,5 +10,4 @@ class DataBuildError(Exception):
|
|||||||
"""
|
"""
|
||||||
Error occurring on building a data file.
|
Error occurring on building a data file.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
@ -3,44 +3,40 @@
|
|||||||
This module contains all the code related to fetching and loading flats lists.
|
This module contains all the code related to fetching and loading flats lists.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
from builtins import str
|
|
||||||
|
|
||||||
import collections
|
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from ratelimit import limits
|
|
||||||
|
|
||||||
from flatisfy import database
|
from flatisfy import database
|
||||||
from flatisfy import tools
|
from flatisfy import tools
|
||||||
from flatisfy.constants import BACKENDS_BY_PRECEDENCE
|
|
||||||
from flatisfy.models import flat as flat_model
|
from flatisfy.models import flat as flat_model
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from woob.capabilities.housing import Query, POSTS_TYPES, HOUSE_TYPES
|
from weboob.capabilities.housing import Query
|
||||||
from woob.core.bcall import CallErrors
|
from weboob.core.bcall import CallErrors
|
||||||
from woob.core.ouiboube import WebNip
|
from weboob.core.ouiboube import WebNip
|
||||||
from woob.tools.json import WoobEncoder
|
from weboob.tools.json import WeboobEncoder
|
||||||
except ImportError:
|
except ImportError:
|
||||||
LOGGER.error("Woob is not available on your system. Make sure you installed it.")
|
LOGGER.error("Weboob is not available on your system. Make sure you "
|
||||||
|
"installed it.")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
class WoobProxy(object):
|
class WeboobProxy(object):
|
||||||
"""
|
"""
|
||||||
Wrapper around Woob ``WebNip`` class, to fetch housing posts without
|
Wrapper around Weboob ``WebNip`` class, to fetch housing posts without
|
||||||
having to spawn a subprocess.
|
having to spawn a subprocess.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def version():
|
def version():
|
||||||
"""
|
"""
|
||||||
Get Woob version.
|
Get Weboob version.
|
||||||
|
|
||||||
:return: The installed Woob version.
|
:return: The installed Weboob version.
|
||||||
"""
|
"""
|
||||||
return WebNip.VERSION
|
return WebNip.VERSION
|
||||||
|
|
||||||
@ -64,13 +60,14 @@ class WoobProxy(object):
|
|||||||
|
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
"""
|
"""
|
||||||
Create a Woob handle and try to load the modules.
|
Create a Weboob handle and try to load the modules.
|
||||||
|
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
"""
|
"""
|
||||||
# Default backends
|
# Default backends
|
||||||
if not config["backends"]:
|
if not config["backends"]:
|
||||||
backends = BACKENDS_BY_PRECEDENCE
|
backends = ["seloger", "pap", "leboncoin", "logicimmo",
|
||||||
|
"explorimmo", "entreparticuliers"]
|
||||||
else:
|
else:
|
||||||
backends = config["backends"]
|
backends = config["backends"]
|
||||||
|
|
||||||
@ -78,14 +75,14 @@ class WoobProxy(object):
|
|||||||
self.webnip = WebNip(modules_path=config["modules_path"])
|
self.webnip = WebNip(modules_path=config["modules_path"])
|
||||||
|
|
||||||
# Create backends
|
# Create backends
|
||||||
self.backends = []
|
self.backends = [
|
||||||
for module in backends:
|
self.webnip.load_backend(
|
||||||
try:
|
module,
|
||||||
self.backends.append(
|
module,
|
||||||
self.webnip.load_backend(module, module, params={})
|
params={}
|
||||||
)
|
)
|
||||||
except Exception as exc:
|
for module in backends
|
||||||
raise Exception('Unable to load module ' + module) from exc
|
]
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
return self
|
return self
|
||||||
@ -95,58 +92,63 @@ class WoobProxy(object):
|
|||||||
|
|
||||||
def build_queries(self, constraints_dict):
|
def build_queries(self, constraints_dict):
|
||||||
"""
|
"""
|
||||||
Build Woob ``woob.capabilities.housing.Query`` objects from the
|
Build Weboob ``weboob.capabilities.housing.Query`` objects from the
|
||||||
constraints defined in the configuration. Each query has at most 3
|
constraints defined in the configuration. Each query has at most 3
|
||||||
cities, to comply with housing websites limitations.
|
postal codes, to comply with housing websites limitations.
|
||||||
|
|
||||||
:param constraints_dict: A dictionary of constraints, as defined in the
|
:param constraints_dict: A dictionary of constraints, as defined in the
|
||||||
config.
|
config.
|
||||||
:return: A list of Woob ``woob.capabilities.housing.Query``
|
:return: A list of Weboob ``weboob.capabilities.housing.Query``
|
||||||
objects. Returns ``None`` if an error occurred.
|
objects. Returns ``None`` if an error occurred.
|
||||||
"""
|
"""
|
||||||
queries = []
|
queries = []
|
||||||
|
for postal_codes in tools.batch(constraints_dict["postal_codes"], 3):
|
||||||
# First, find all matching cities for the postal codes in constraints
|
query = Query()
|
||||||
|
query.cities = []
|
||||||
|
for postal_code in postal_codes:
|
||||||
matching_cities = []
|
matching_cities = []
|
||||||
for postal_code in constraints_dict["postal_codes"]:
|
|
||||||
try:
|
try:
|
||||||
for city in self.webnip.do("search_city", postal_code):
|
for city in self.webnip.do("search_city", postal_code):
|
||||||
matching_cities.append(city)
|
matching_cities.append(city)
|
||||||
except CallErrors as exc:
|
except CallErrors as exc:
|
||||||
# If an error occured, just log it
|
# If an error occured, just log it
|
||||||
LOGGER.error(
|
LOGGER.error(
|
||||||
("An error occured while building query for postal code %s: %s"),
|
(
|
||||||
|
"An error occured while building query for "
|
||||||
|
"postal code %s: %s"
|
||||||
|
),
|
||||||
postal_code,
|
postal_code,
|
||||||
str(exc),
|
str(exc)
|
||||||
)
|
)
|
||||||
|
|
||||||
if not matching_cities:
|
if not matching_cities:
|
||||||
# If postal code gave no match, warn the user
|
# If postal code gave no match, warn the user
|
||||||
LOGGER.warn("Postal code %s could not be matched with a city.", postal_code)
|
LOGGER.warn(
|
||||||
|
"Postal code %s could not be matched with a city.",
|
||||||
|
postal_code
|
||||||
|
)
|
||||||
|
|
||||||
# Remove "TOUTES COMMUNES" entry which are duplicates of the individual
|
# Append the matched cities to the query
|
||||||
# cities entries in Logicimmo module.
|
for city in matching_cities:
|
||||||
matching_cities = [
|
query.cities.append(city)
|
||||||
city
|
|
||||||
for city in matching_cities
|
|
||||||
if not (city.backend == "logicimmo" and city.name.startswith("TOUTES COMMUNES"))
|
|
||||||
]
|
|
||||||
|
|
||||||
# Then, build queries by grouping cities by at most 3
|
|
||||||
for cities_batch in tools.batch(matching_cities, 3):
|
|
||||||
query = Query()
|
|
||||||
query.cities = list(cities_batch)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
query.house_types = [
|
query.house_types = [
|
||||||
getattr(HOUSE_TYPES, house_type.upper()) for house_type in constraints_dict["house_types"]
|
getattr(
|
||||||
|
Query.HOUSE_TYPES,
|
||||||
|
house_type.upper()
|
||||||
|
)
|
||||||
|
for house_type in constraints_dict["house_types"]
|
||||||
]
|
]
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
LOGGER.error("Invalid house types constraint.")
|
LOGGER.error("Invalid house types constraint.")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
query.type = getattr(POSTS_TYPES, constraints_dict["type"].upper())
|
query.type = getattr(
|
||||||
|
Query,
|
||||||
|
"TYPE_{}".format(constraints_dict["type"].upper())
|
||||||
|
)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
LOGGER.error("Invalid post type constraint.")
|
LOGGER.error("Invalid post type constraint.")
|
||||||
return None
|
return None
|
||||||
@ -161,141 +163,114 @@ class WoobProxy(object):
|
|||||||
|
|
||||||
return queries
|
return queries
|
||||||
|
|
||||||
def query(self, query, max_entries=None, store_personal_data=False):
|
def query(self, query, max_entries=None):
|
||||||
"""
|
"""
|
||||||
Fetch the housings posts matching a given Woob query.
|
Fetch the housings posts matching a given Weboob query.
|
||||||
|
|
||||||
:param query: A Woob `woob.capabilities.housing.Query`` object.
|
:param query: A Weboob `weboob.capabilities.housing.Query`` object.
|
||||||
:param max_entries: Maximum number of entries to fetch.
|
:param max_entries: Maximum number of entries to fetch.
|
||||||
:param store_personal_data: Whether personal data should be fetched
|
|
||||||
from housing posts (phone number etc).
|
|
||||||
:return: The matching housing posts, dumped as a list of JSON objects.
|
:return: The matching housing posts, dumped as a list of JSON objects.
|
||||||
"""
|
"""
|
||||||
housings = []
|
housings = []
|
||||||
# List the useful backends for this specific query
|
|
||||||
useful_backends = [x.backend for x in query.cities]
|
|
||||||
# TODO: Handle max_entries better
|
# TODO: Handle max_entries better
|
||||||
try:
|
try:
|
||||||
for housing in itertools.islice(
|
for housing in itertools.islice(
|
||||||
self.webnip.do(
|
self.webnip.do('search_housings', query),
|
||||||
"search_housings",
|
max_entries
|
||||||
query,
|
|
||||||
# Only run the call on the required backends.
|
|
||||||
# Otherwise, Woob is doing weird stuff and returning
|
|
||||||
# nonsense.
|
|
||||||
backends=[x for x in self.backends if x.name in useful_backends],
|
|
||||||
),
|
|
||||||
max_entries,
|
|
||||||
):
|
):
|
||||||
if not store_personal_data:
|
housings.append(json.dumps(housing, cls=WeboobEncoder))
|
||||||
housing.phone = None
|
|
||||||
housings.append(json.dumps(housing, cls=WoobEncoder))
|
|
||||||
except CallErrors as exc:
|
except CallErrors as exc:
|
||||||
# If an error occured, just log it
|
# If an error occured, just log it
|
||||||
LOGGER.error("An error occured while fetching the housing posts: %s", str(exc))
|
LOGGER.error(
|
||||||
|
"An error occured while fetching the housing posts: %s",
|
||||||
|
str(exc)
|
||||||
|
)
|
||||||
return housings
|
return housings
|
||||||
|
|
||||||
def info(self, full_flat_id, store_personal_data=False):
|
def info(self, full_flat_id):
|
||||||
"""
|
"""
|
||||||
Get information (details) about an housing post.
|
Get information (details) about an housing post.
|
||||||
|
|
||||||
:param full_flat_id: A Woob housing post id, in complete form
|
:param full_flat_id: A Weboob housing post id, in complete form
|
||||||
(ID@BACKEND)
|
(ID@BACKEND)
|
||||||
:param store_personal_data: Whether personal data should be fetched
|
|
||||||
from housing posts (phone number etc).
|
|
||||||
:return: The details in JSON.
|
:return: The details in JSON.
|
||||||
"""
|
"""
|
||||||
flat_id, backend_name = full_flat_id.rsplit("@", 1)
|
flat_id, backend_name = full_flat_id.rsplit("@", 1)
|
||||||
try:
|
try:
|
||||||
backend = next(backend for backend in self.backends if backend.name == backend_name)
|
backend = next(
|
||||||
|
backend
|
||||||
|
for backend in self.backends
|
||||||
|
if backend.name == backend_name
|
||||||
|
)
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
LOGGER.error("Backend %s is not available.", backend_name)
|
LOGGER.error("Backend %s is not available.", backend_name)
|
||||||
return "{}"
|
return "{}"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
housing = backend.get_housing(flat_id)
|
housing = backend.get_housing(flat_id)
|
||||||
if not store_personal_data:
|
|
||||||
# Ensure phone is cleared
|
|
||||||
housing.phone = None
|
|
||||||
else:
|
|
||||||
# Ensure phone is fetched
|
|
||||||
backend.fillobj(housing, "phone")
|
|
||||||
# Otherwise, we miss the @backend afterwards
|
# Otherwise, we miss the @backend afterwards
|
||||||
housing.id = full_flat_id
|
housing.id = full_flat_id
|
||||||
|
|
||||||
return json.dumps(housing, cls=WoobEncoder)
|
return json.dumps(housing, cls=WeboobEncoder)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except CallErrors as exc:
|
||||||
# If an error occured, just log it
|
# If an error occured, just log it
|
||||||
LOGGER.error("An error occured while fetching housing %s: %s", full_flat_id, str(exc))
|
LOGGER.error(
|
||||||
return "{}"
|
"An error occured while fetching housing %s: %s",
|
||||||
|
full_flat_id,
|
||||||
|
str(exc)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def fetch_flats(config):
|
def fetch_flats_list(config):
|
||||||
"""
|
"""
|
||||||
Fetch the available flats using the Woob config.
|
Fetch the available flats using the Flatboob / Weboob config.
|
||||||
|
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
:return: A dict mapping constraint in config to all available matching
|
:return: A list of all available flats.
|
||||||
flats.
|
|
||||||
"""
|
"""
|
||||||
fetched_flats = {}
|
flats_list = []
|
||||||
|
|
||||||
for constraint_name, constraint in config["constraints"].items():
|
with WeboobProxy(config) as weboob_proxy:
|
||||||
LOGGER.info("Loading flats for constraint %s...", constraint_name)
|
LOGGER.info("Loading flats...")
|
||||||
with WoobProxy(config) as woob_proxy:
|
queries = weboob_proxy.build_queries(config["constraints"])
|
||||||
queries = woob_proxy.build_queries(constraint)
|
|
||||||
housing_posts = []
|
housing_posts = []
|
||||||
for query in queries:
|
for query in queries:
|
||||||
housing_posts.extend(woob_proxy.query(query, config["max_entries"], config["store_personal_data"]))
|
housing_posts.extend(
|
||||||
housing_posts = housing_posts[: config["max_entries"]]
|
weboob_proxy.query(query, config["max_entries"])
|
||||||
|
)
|
||||||
LOGGER.info("Fetched %d flats.", len(housing_posts))
|
LOGGER.info("Fetched %d flats.", len(housing_posts))
|
||||||
|
|
||||||
constraint_flats_list = [json.loads(flat) for flat in housing_posts]
|
flats_list = [json.loads(flat) for flat in housing_posts]
|
||||||
constraint_flats_list = [WoobProxy.restore_decimal_fields(flat) for flat in constraint_flats_list]
|
flats_list = [WeboobProxy.restore_decimal_fields(flat)
|
||||||
fetched_flats[constraint_name] = constraint_flats_list
|
for flat in flats_list]
|
||||||
return fetched_flats
|
return flats_list
|
||||||
|
|
||||||
|
|
||||||
@limits(calls=10, period=60)
|
|
||||||
def fetch_details_rate_limited(config, flat_id):
|
|
||||||
"""
|
|
||||||
Limit flats fetching to at most 10 calls per minute to avoid rate banning
|
|
||||||
"""
|
|
||||||
return fetch_details(config, flat_id)
|
|
||||||
|
|
||||||
|
|
||||||
def fetch_details(config, flat_id):
|
def fetch_details(config, flat_id):
|
||||||
"""
|
"""
|
||||||
Fetch the additional details for a flat using Woob.
|
Fetch the additional details for a flat using Flatboob / Weboob.
|
||||||
|
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
:param flat_id: ID of the flat to fetch details for.
|
:param flat_id: ID of the flat to fetch details for.
|
||||||
:return: A flat dict with all the available data.
|
:return: A flat dict with all the available data.
|
||||||
"""
|
"""
|
||||||
with WoobProxy(config) as woob_proxy:
|
with WeboobProxy(config) as weboob_proxy:
|
||||||
LOGGER.info("Loading additional details for flat %s.", flat_id)
|
LOGGER.info("Loading additional details for flat %s.", flat_id)
|
||||||
woob_output = woob_proxy.info(flat_id, config["store_personal_data"])
|
weboob_output = weboob_proxy.info(flat_id)
|
||||||
|
|
||||||
flat_details = json.loads(woob_output)
|
flat_details = json.loads(weboob_output)
|
||||||
flat_details = WoobProxy.restore_decimal_fields(flat_details)
|
flat_details = WeboobProxy.restore_decimal_fields(flat_details)
|
||||||
LOGGER.info("Fetched details for flat %s.", flat_id)
|
LOGGER.info("Fetched details for flat %s.", flat_id)
|
||||||
|
|
||||||
return flat_details
|
return flat_details
|
||||||
|
|
||||||
|
|
||||||
def load_flats_from_file(json_file, config):
|
def load_flats_list_from_file(json_file):
|
||||||
"""
|
"""
|
||||||
Load a dumped flats list from JSON file.
|
Load a dumped flats list from JSON file.
|
||||||
|
|
||||||
:param json_file: The file to load housings list from.
|
:param json_file: The file to load housings list from.
|
||||||
:return: A dict mapping constraint in config to all available matching
|
:return: A list of all the flats in the dump file.
|
||||||
flats.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
As we do not know which constraint is met by a given flat, all the
|
|
||||||
flats are returned for any available constraint, and they will be
|
|
||||||
filtered out afterwards.
|
|
||||||
"""
|
"""
|
||||||
flats_list = []
|
flats_list = []
|
||||||
try:
|
try:
|
||||||
@ -305,21 +280,21 @@ def load_flats_from_file(json_file, config):
|
|||||||
LOGGER.info("Found %d flats.", len(flats_list))
|
LOGGER.info("Found %d flats.", len(flats_list))
|
||||||
except (IOError, ValueError):
|
except (IOError, ValueError):
|
||||||
LOGGER.error("File %s is not a valid dump file.", json_file)
|
LOGGER.error("File %s is not a valid dump file.", json_file)
|
||||||
return {constraint_name: flats_list for constraint_name in config["constraints"]}
|
return flats_list
|
||||||
|
|
||||||
|
|
||||||
def load_flats_from_db(config):
|
def load_flats_list_from_db(config):
|
||||||
"""
|
"""
|
||||||
Load flats from database.
|
Load flats from database.
|
||||||
|
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
:return: A dict mapping constraint in config to all available matching
|
:return: A list of all the flats in the database.
|
||||||
flats.
|
|
||||||
"""
|
"""
|
||||||
|
flats_list = []
|
||||||
get_session = database.init_db(config["database"], config["search_index"])
|
get_session = database.init_db(config["database"], config["search_index"])
|
||||||
|
|
||||||
loaded_flats = collections.defaultdict(list)
|
|
||||||
with get_session() as session:
|
with get_session() as session:
|
||||||
for flat in session.query(flat_model.Flat).all():
|
# TODO: Better serialization
|
||||||
loaded_flats[flat.flatisfy_constraint].append(flat.json_api_repr())
|
flats_list = [flat.json_api_repr()
|
||||||
return loaded_flats
|
for flat in session.query(flat_model.Flat).all()]
|
||||||
|
return flats_list
|
||||||
|
@ -10,14 +10,13 @@ import logging
|
|||||||
|
|
||||||
from flatisfy import tools
|
from flatisfy import tools
|
||||||
from flatisfy.filters import duplicates
|
from flatisfy.filters import duplicates
|
||||||
from flatisfy.filters import images
|
|
||||||
from flatisfy.filters import metadata
|
from flatisfy.filters import metadata
|
||||||
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def refine_with_housing_criteria(flats_list, constraint):
|
def refine_with_housing_criteria(flats_list, config):
|
||||||
"""
|
"""
|
||||||
Filter a list of flats according to criteria.
|
Filter a list of flats according to criteria.
|
||||||
|
|
||||||
@ -26,7 +25,7 @@ def refine_with_housing_criteria(flats_list, constraint):
|
|||||||
user criteria, and avoid exposing unwanted flats.
|
user criteria, and avoid exposing unwanted flats.
|
||||||
|
|
||||||
:param flats_list: A list of flats dict to filter.
|
:param flats_list: A list of flats dict to filter.
|
||||||
:param constraint: The constraint that the ``flats_list`` should satisfy.
|
:param config: A config dict.
|
||||||
:return: A tuple of flats to keep and flats to delete.
|
:return: A tuple of flats to keep and flats to delete.
|
||||||
"""
|
"""
|
||||||
# For each flat, the associated `is_ok` value indicate whether it should be
|
# For each flat, the associated `is_ok` value indicate whether it should be
|
||||||
@ -36,118 +35,52 @@ def refine_with_housing_criteria(flats_list, constraint):
|
|||||||
for i, flat in enumerate(flats_list):
|
for i, flat in enumerate(flats_list):
|
||||||
# Check postal code
|
# Check postal code
|
||||||
postal_code = flat["flatisfy"].get("postal_code", None)
|
postal_code = flat["flatisfy"].get("postal_code", None)
|
||||||
if postal_code and postal_code not in constraint["postal_codes"]:
|
if (
|
||||||
LOGGER.info(
|
postal_code and
|
||||||
"Postal code %s for flat %s is out of range (%s).",
|
postal_code not in config["constraints"]["postal_codes"]
|
||||||
postal_code,
|
):
|
||||||
flat["id"],
|
LOGGER.info("Postal code for flat %s is out of range.", flat["id"])
|
||||||
", ".join(constraint["postal_codes"]),
|
is_ok[i] = is_ok[i] and False
|
||||||
)
|
|
||||||
is_ok[i] = False
|
|
||||||
# Check insee code
|
|
||||||
insee_code = flat["flatisfy"].get("insee_code", None)
|
|
||||||
if insee_code and "insee_codes" in constraint and insee_code not in constraint["insee_codes"]:
|
|
||||||
LOGGER.info(
|
|
||||||
"insee code %s for flat %s is out of range (%s).",
|
|
||||||
insee_code,
|
|
||||||
flat["id"],
|
|
||||||
", ".join(constraint["insee_codes"]),
|
|
||||||
)
|
|
||||||
is_ok[i] = False
|
|
||||||
|
|
||||||
# Check time_to
|
# Check time_to
|
||||||
for place_name, time in flat["flatisfy"].get("time_to", {}).items():
|
for place_name, time in flat["flatisfy"].get("time_to", {}).items():
|
||||||
time = time["time"]
|
time = time["time"]
|
||||||
is_within_interval = tools.is_within_interval(time, *(constraint["time_to"][place_name]["time"]))
|
is_within_interval = tools.is_within_interval(
|
||||||
if not is_within_interval:
|
|
||||||
LOGGER.info(
|
|
||||||
"Flat %s is too far from place %s: %ds.",
|
|
||||||
flat["id"],
|
|
||||||
place_name,
|
|
||||||
time,
|
time,
|
||||||
|
*(config["constraints"]["time_to"][place_name]["time"])
|
||||||
)
|
)
|
||||||
|
if not is_within_interval:
|
||||||
|
LOGGER.info("Flat %s is too far from place %s: %ds.",
|
||||||
|
flat["id"], place_name, time)
|
||||||
is_ok[i] = is_ok[i] and is_within_interval
|
is_ok[i] = is_ok[i] and is_within_interval
|
||||||
|
|
||||||
# Check other fields
|
# Check other fields
|
||||||
for field in ["area", "cost", "rooms", "bedrooms"]:
|
for field in ["area", "cost", "rooms", "bedrooms"]:
|
||||||
interval = constraint[field]
|
interval = config["constraints"][field]
|
||||||
is_within_interval = tools.is_within_interval(flat.get(field, None), *interval)
|
is_within_interval = tools.is_within_interval(
|
||||||
if not is_within_interval:
|
flat.get(field, None),
|
||||||
LOGGER.info(
|
*interval
|
||||||
"%s %s for flat %s is out of range.", field.capitalize(), str(flat.get(field, None)), flat["id"]
|
|
||||||
)
|
)
|
||||||
|
if not is_within_interval:
|
||||||
|
LOGGER.info("%s for flat %s is out of range.",
|
||||||
|
field.capitalize(), flat["id"])
|
||||||
is_ok[i] = is_ok[i] and is_within_interval
|
is_ok[i] = is_ok[i] and is_within_interval
|
||||||
|
|
||||||
return (
|
return (
|
||||||
[flat for i, flat in enumerate(flats_list) if is_ok[i]],
|
[
|
||||||
[flat for i, flat in enumerate(flats_list) if not is_ok[i]],
|
flat
|
||||||
|
for i, flat in enumerate(flats_list)
|
||||||
|
if is_ok[i]
|
||||||
|
],
|
||||||
|
[
|
||||||
|
flat
|
||||||
|
for i, flat in enumerate(flats_list)
|
||||||
|
if not is_ok[i]
|
||||||
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def refine_with_details_criteria(flats_list, constraint):
|
def first_pass(flats_list, config):
|
||||||
"""
|
|
||||||
Filter a list of flats according to the criteria which require the full
|
|
||||||
details to be fetched. These include minimum number of photos and terms
|
|
||||||
that should appear in description.
|
|
||||||
|
|
||||||
.. note ::
|
|
||||||
|
|
||||||
This has to be done in a separate function and not with the other
|
|
||||||
criterias as photos and full description are only fetched in the second
|
|
||||||
pass.
|
|
||||||
|
|
||||||
:param flats_list: A list of flats dict to filter.
|
|
||||||
:param constraint: The constraint that the ``flats_list`` should satisfy.
|
|
||||||
:return: A tuple of flats to keep and flats to delete.
|
|
||||||
"""
|
|
||||||
# For each flat, the associated `is_ok` value indicate whether it should be
|
|
||||||
# kept or discarded.
|
|
||||||
is_ok = [True for _ in flats_list]
|
|
||||||
|
|
||||||
for i, flat in enumerate(flats_list):
|
|
||||||
# Check number of pictures
|
|
||||||
has_enough_photos = tools.is_within_interval(len(flat.get("photos", [])), constraint["minimum_nb_photos"], None)
|
|
||||||
if not has_enough_photos:
|
|
||||||
LOGGER.info(
|
|
||||||
"Flat %s only has %d photos, it should have at least %d.",
|
|
||||||
flat["id"],
|
|
||||||
len(flat["photos"]),
|
|
||||||
constraint["minimum_nb_photos"],
|
|
||||||
)
|
|
||||||
is_ok[i] = False
|
|
||||||
|
|
||||||
for term in constraint["description_should_contain"]:
|
|
||||||
if isinstance(term, str) and term.lower() not in flat["text"].lower():
|
|
||||||
LOGGER.info(
|
|
||||||
("Description for flat %s does not contain required term '%s'."),
|
|
||||||
flat["id"],
|
|
||||||
term,
|
|
||||||
)
|
|
||||||
is_ok[i] = False
|
|
||||||
elif isinstance(term, list) and all(x.lower() not in flat["text"].lower() for x in term):
|
|
||||||
LOGGER.info(
|
|
||||||
("Description for flat %s does not contain any of required terms '%s'."),
|
|
||||||
flat["id"],
|
|
||||||
term,
|
|
||||||
)
|
|
||||||
is_ok[i] = False
|
|
||||||
for term in constraint["description_should_not_contain"]:
|
|
||||||
if term.lower() in flat["text"].lower():
|
|
||||||
LOGGER.info(
|
|
||||||
("Description for flat %s contains blacklisted term '%s'."),
|
|
||||||
flat["id"],
|
|
||||||
term,
|
|
||||||
)
|
|
||||||
is_ok[i] = False
|
|
||||||
|
|
||||||
return (
|
|
||||||
[flat for i, flat in enumerate(flats_list) if is_ok[i]],
|
|
||||||
[flat for i, flat in enumerate(flats_list) if not is_ok[i]],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@tools.timeit
|
|
||||||
def first_pass(flats_list, constraint, config):
|
|
||||||
"""
|
"""
|
||||||
First filtering pass.
|
First filtering pass.
|
||||||
|
|
||||||
@ -156,7 +89,6 @@ def first_pass(flats_list, constraint, config):
|
|||||||
only request more data for the remaining housings.
|
only request more data for the remaining housings.
|
||||||
|
|
||||||
:param flats_list: A list of flats dict to filter.
|
:param flats_list: A list of flats dict to filter.
|
||||||
:param constraint: The constraint that the ``flats_list`` should satisfy.
|
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
:return: A dict mapping flat status and list of flat objects.
|
:return: A dict mapping flat status and list of flat objects.
|
||||||
"""
|
"""
|
||||||
@ -164,28 +96,32 @@ def first_pass(flats_list, constraint, config):
|
|||||||
|
|
||||||
# Handle duplicates based on ids
|
# Handle duplicates based on ids
|
||||||
# Just remove them (no merge) as they should be the exact same object.
|
# Just remove them (no merge) as they should be the exact same object.
|
||||||
flats_list, _ = duplicates.detect(flats_list, key="id", merge=False, should_intersect=False)
|
flats_list, duplicates_by_id = duplicates.detect(
|
||||||
|
flats_list, key="id", merge=False, should_intersect=False
|
||||||
|
)
|
||||||
# Also merge duplicates based on urls (these may come from different
|
# Also merge duplicates based on urls (these may come from different
|
||||||
# flatboob backends)
|
# flatboob backends)
|
||||||
# This is especially useful as some websites such as entreparticuliers
|
# This is especially useful as some websites such as entreparticuliers
|
||||||
# contains a lot of leboncoin housings posts.
|
# contains a lot of leboncoin housings posts.
|
||||||
flats_list, duplicates_by_urls = duplicates.detect(flats_list, key="urls", merge=True, should_intersect=True)
|
flats_list, duplicates_by_urls = duplicates.detect(
|
||||||
|
flats_list, key="urls", merge=True, should_intersect=True
|
||||||
|
)
|
||||||
|
|
||||||
# Guess the postal codes
|
# Guess the postal codes
|
||||||
flats_list = metadata.guess_postal_code(flats_list, constraint, config)
|
flats_list = metadata.guess_postal_code(flats_list, config)
|
||||||
|
|
||||||
if not config["ignore_station"]:
|
|
||||||
# Try to match with stations
|
# Try to match with stations
|
||||||
flats_list = metadata.guess_stations(flats_list, constraint, config)
|
flats_list = metadata.guess_stations(flats_list, config)
|
||||||
|
|
||||||
# Remove returned housing posts that do not match criteria
|
# Remove returned housing posts that do not match criteria
|
||||||
flats_list, ignored_list = refine_with_housing_criteria(flats_list, constraint)
|
flats_list, ignored_list = refine_with_housing_criteria(flats_list, config)
|
||||||
|
|
||||||
return {"new": flats_list, "ignored": ignored_list, "duplicate": duplicates_by_urls}
|
return {
|
||||||
|
"new": flats_list,
|
||||||
|
"ignored": ignored_list,
|
||||||
|
"duplicate": duplicates_by_id + duplicates_by_urls
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@tools.timeit
|
def second_pass(flats_list, config):
|
||||||
def second_pass(flats_list, constraint, config):
|
|
||||||
"""
|
"""
|
||||||
Second filtering pass.
|
Second filtering pass.
|
||||||
|
|
||||||
@ -197,7 +133,6 @@ def second_pass(flats_list, constraint, config):
|
|||||||
possible from the fetched housings.
|
possible from the fetched housings.
|
||||||
|
|
||||||
:param flats_list: A list of flats dict to filter.
|
:param flats_list: A list of flats dict to filter.
|
||||||
:param constraint: The constraint that the ``flats_list`` should satisfy.
|
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
:return: A dict mapping flat status and list of flat objects.
|
:return: A dict mapping flat status and list of flat objects.
|
||||||
"""
|
"""
|
||||||
@ -206,29 +141,24 @@ def second_pass(flats_list, constraint, config):
|
|||||||
# left and we already tried to find postal code and nearby stations.
|
# left and we already tried to find postal code and nearby stations.
|
||||||
|
|
||||||
# Confirm postal code
|
# Confirm postal code
|
||||||
flats_list = metadata.guess_postal_code(flats_list, constraint, config)
|
flats_list = metadata.guess_postal_code(flats_list, config)
|
||||||
|
|
||||||
# Better match with stations (confirm and check better)
|
# Better match with stations (confirm and check better)
|
||||||
if not config["ignore_station"]:
|
flats_list = metadata.guess_stations(flats_list, config)
|
||||||
flats_list = metadata.guess_stations(flats_list, constraint, config)
|
|
||||||
|
|
||||||
# Compute travel time to specified points
|
# Compute travel time to specified points
|
||||||
flats_list = metadata.compute_travel_times(flats_list, constraint, config)
|
flats_list = metadata.compute_travel_times(flats_list, config)
|
||||||
|
|
||||||
# Remove returned housing posts that do not match criteria
|
# Remove returned housing posts that do not match criteria
|
||||||
flats_list, ignored_list = refine_with_housing_criteria(flats_list, constraint)
|
flats_list, ignored_list = refine_with_housing_criteria(flats_list, config)
|
||||||
|
|
||||||
# Remove returned housing posts which do not match criteria relying on
|
return {
|
||||||
# fetched details.
|
"new": flats_list,
|
||||||
flats_list, ignored_list = refine_with_details_criteria(flats_list, constraint)
|
"ignored": ignored_list,
|
||||||
|
"duplicate": []
|
||||||
if config["serve_images_locally"]:
|
}
|
||||||
images.download_images(flats_list, config)
|
|
||||||
|
|
||||||
return {"new": flats_list, "ignored": ignored_list, "duplicate": []}
|
|
||||||
|
|
||||||
|
|
||||||
@tools.timeit
|
|
||||||
def third_pass(flats_list, config):
|
def third_pass(flats_list, config):
|
||||||
"""
|
"""
|
||||||
Third filtering pass.
|
Third filtering pass.
|
||||||
@ -240,9 +170,11 @@ def third_pass(flats_list, config):
|
|||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
:return: A dict mapping flat status and list of flat objects.
|
:return: A dict mapping flat status and list of flat objects.
|
||||||
"""
|
"""
|
||||||
LOGGER.info("Running third filtering pass.")
|
|
||||||
|
|
||||||
# Deduplicate the list using every available data
|
# Deduplicate the list using every available data
|
||||||
flats_list, duplicate_flats = duplicates.deep_detect(flats_list, config)
|
flats_list, duplicate_flats = duplicates.deep_detect(flats_list)
|
||||||
|
|
||||||
return {"new": flats_list, "ignored": [], "duplicate": duplicate_flats}
|
return {
|
||||||
|
"new": flats_list,
|
||||||
|
"ignored": [],
|
||||||
|
"duplicate": duplicate_flats
|
||||||
|
}
|
||||||
|
@ -1,145 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
"""
|
|
||||||
Caching function for pictures.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import collections
|
|
||||||
import hashlib
|
|
||||||
import os
|
|
||||||
import requests
|
|
||||||
import logging
|
|
||||||
from io import BytesIO
|
|
||||||
|
|
||||||
import PIL.Image
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class MemoryCache(object):
|
|
||||||
"""
|
|
||||||
A cache in memory.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def on_miss(key):
|
|
||||||
"""
|
|
||||||
Method to be called whenever an object is requested from the cache but
|
|
||||||
was not already cached. Typically, make a HTTP query to fetch it.
|
|
||||||
|
|
||||||
:param key: Key of the requested object.
|
|
||||||
:return: The object content.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.hits = 0
|
|
||||||
self.misses = 0
|
|
||||||
self.map = collections.OrderedDict()
|
|
||||||
|
|
||||||
def get(self, key):
|
|
||||||
"""
|
|
||||||
Get an element from cache. Eventually call ``on_miss`` if the item is
|
|
||||||
not already cached.
|
|
||||||
|
|
||||||
:param key: Key of the element to retrieve.
|
|
||||||
:return: Requested element.
|
|
||||||
"""
|
|
||||||
cached = self.map.get(key, None)
|
|
||||||
if cached is not None:
|
|
||||||
self.hits += 1
|
|
||||||
return cached
|
|
||||||
|
|
||||||
item = self.map[key] = self.on_miss(key)
|
|
||||||
self.misses += 1
|
|
||||||
return item
|
|
||||||
|
|
||||||
def total(self):
|
|
||||||
"""
|
|
||||||
Get the total number of calls (with hits to the cache, or miss and
|
|
||||||
fetching with ``on_miss``) to the cache.
|
|
||||||
|
|
||||||
:return: Total number of item accessing.
|
|
||||||
"""
|
|
||||||
return self.hits + self.misses
|
|
||||||
|
|
||||||
def hit_rate(self):
|
|
||||||
"""
|
|
||||||
Get the hit rate, that is the rate at which we requested an item which
|
|
||||||
was already in the cache.
|
|
||||||
|
|
||||||
:return: The hit rate, in percents.
|
|
||||||
"""
|
|
||||||
assert self.total() > 0
|
|
||||||
return 100 * self.hits // self.total()
|
|
||||||
|
|
||||||
def miss_rate(self):
|
|
||||||
"""
|
|
||||||
Get the miss rate, that is the rate at which we requested an item which
|
|
||||||
was not already in the cache.
|
|
||||||
|
|
||||||
:return: The miss rate, in percents.
|
|
||||||
"""
|
|
||||||
assert self.total() > 0
|
|
||||||
return 100 * self.misses // self.total()
|
|
||||||
|
|
||||||
|
|
||||||
class ImageCache(MemoryCache):
|
|
||||||
"""
|
|
||||||
A cache for images, stored in memory.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def compute_filename(url):
|
|
||||||
"""
|
|
||||||
Compute filename (hash of the URL) for the cached image.
|
|
||||||
|
|
||||||
:param url: The URL of the image.
|
|
||||||
:return: The filename, with its extension.
|
|
||||||
"""
|
|
||||||
# Always store as JPEG
|
|
||||||
return "%s.jpg" % hashlib.sha1(url.encode("utf-8")).hexdigest()
|
|
||||||
|
|
||||||
def on_miss(self, url):
|
|
||||||
"""
|
|
||||||
Helper to actually retrieve photos if not already cached.
|
|
||||||
"""
|
|
||||||
# If two many items in the cache, pop one
|
|
||||||
if len(self.map.keys()) > self.max_items:
|
|
||||||
self.map.popitem(last=False)
|
|
||||||
|
|
||||||
if url.endswith(".svg"):
|
|
||||||
# Skip SVG photo which are unsupported and unlikely to be relevant
|
|
||||||
return None
|
|
||||||
|
|
||||||
filepath = None
|
|
||||||
# Try to load from local folder
|
|
||||||
if self.storage_dir:
|
|
||||||
filepath = os.path.join(self.storage_dir, self.compute_filename(url))
|
|
||||||
if os.path.isfile(filepath):
|
|
||||||
return PIL.Image.open(filepath)
|
|
||||||
# Otherwise, fetch it
|
|
||||||
try:
|
|
||||||
LOGGER.debug(f"Download photo from {url} to {filepath}")
|
|
||||||
req = requests.get(url)
|
|
||||||
req.raise_for_status()
|
|
||||||
image = PIL.Image.open(BytesIO(req.content))
|
|
||||||
if filepath:
|
|
||||||
image.save(filepath, format=image.format)
|
|
||||||
return image
|
|
||||||
except (requests.HTTPError, IOError) as exc:
|
|
||||||
LOGGER.info(f"Download photo from {url} failed: {exc}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def __init__(self, max_items=200, storage_dir=None):
|
|
||||||
"""
|
|
||||||
:param max_items: Max number of items in the cache, to prevent Out Of
|
|
||||||
Memory errors.
|
|
||||||
:param storage_dir: Directory in which images should be stored.
|
|
||||||
"""
|
|
||||||
self.max_items = max_items
|
|
||||||
self.storage_dir = storage_dir
|
|
||||||
if self.storage_dir and not os.path.isdir(self.storage_dir):
|
|
||||||
os.makedirs(self.storage_dir)
|
|
||||||
super(ImageCache, self).__init__()
|
|
@ -7,114 +7,85 @@ from __future__ import absolute_import, print_function, unicode_literals
|
|||||||
import collections
|
import collections
|
||||||
import itertools
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
from io import BytesIO
|
||||||
|
|
||||||
import imagehash
|
import imagehash
|
||||||
|
import PIL.Image
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from flatisfy import tools
|
from flatisfy import tools
|
||||||
from flatisfy.constants import BACKENDS_BY_PRECEDENCE
|
|
||||||
from flatisfy.filters.cache import ImageCache
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Some backends give more infos than others. Here is the precedence we want to
|
||||||
|
# use.
|
||||||
|
BACKENDS_PRECEDENCE = [
|
||||||
|
"seloger",
|
||||||
|
"pap",
|
||||||
|
"leboncoin",
|
||||||
|
"explorimmo",
|
||||||
|
"logicimmo",
|
||||||
|
"entreparticuliers"
|
||||||
|
]
|
||||||
|
|
||||||
def homogeneize_phone_number(numbers):
|
|
||||||
|
def homogeneize_phone_number(number):
|
||||||
"""
|
"""
|
||||||
Homogeneize the phone numbers, by stripping any space, dash or dot as well
|
Homogeneize the phone numbers, by stripping any space, dash or dot as well
|
||||||
as the international prefix. Assumes it is dealing with French phone
|
as the international prefix. Assumes it is dealing with French phone
|
||||||
numbers (starting with a zero and having 10 characters).
|
numbers (starting with a zero and having 10 characters).
|
||||||
|
|
||||||
:param numbers: The phone number string to homogeneize (can contain
|
:param number: The phone number to homogeneize.
|
||||||
multiple phone numbers).
|
|
||||||
:return: The cleaned phone number. ``None`` if the number is not valid.
|
:return: The cleaned phone number. ``None`` if the number is not valid.
|
||||||
"""
|
"""
|
||||||
if not numbers:
|
if not number:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
clean_numbers = []
|
|
||||||
|
|
||||||
for number in numbers.split(","):
|
|
||||||
number = number.strip()
|
|
||||||
number = number.replace(".", "")
|
number = number.replace(".", "")
|
||||||
number = number.replace(" ", "")
|
number = number.replace(" ", "")
|
||||||
number = number.replace("-", "")
|
number = number.replace("-", "")
|
||||||
number = number.replace("(", "")
|
number = number.replace("(", "")
|
||||||
number = number.replace(")", "")
|
number = number.replace(")", "")
|
||||||
number = re.sub(r"^\+\d\d", "", number)
|
number = re.sub(r'^\+\d\d', "", number)
|
||||||
|
|
||||||
if not number.startswith("0"):
|
if not number.startswith("0"):
|
||||||
number = "0" + number
|
number = "0" + number
|
||||||
|
|
||||||
if len(number) == 10:
|
if len(number) != 10:
|
||||||
clean_numbers.append(number)
|
|
||||||
|
|
||||||
if not clean_numbers:
|
|
||||||
return None
|
return None
|
||||||
return ", ".join(clean_numbers)
|
|
||||||
|
return number
|
||||||
|
|
||||||
|
|
||||||
def get_or_compute_photo_hash(photo, photo_cache):
|
def find_number_common_photos(flat1_photos, flat2_photos):
|
||||||
"""
|
|
||||||
Get the computed hash from the photo dict or compute it if not found.
|
|
||||||
|
|
||||||
:param photo: A photo, as a ``dict`` with (at least) a ``url`` key.
|
|
||||||
:param photo_cache: An instance of ``ImageCache`` to use to cache images.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Try to get the computed hash from the photo dict
|
|
||||||
return photo["hash"]
|
|
||||||
except KeyError:
|
|
||||||
# Otherwise, get the image and compute the hash
|
|
||||||
image = photo_cache.get(photo["url"])
|
|
||||||
if not image:
|
|
||||||
return None
|
|
||||||
photo["hash"] = imagehash.average_hash(image)
|
|
||||||
return photo["hash"]
|
|
||||||
|
|
||||||
|
|
||||||
def compare_photos(photo1, photo2, photo_cache, hash_threshold):
|
|
||||||
"""
|
|
||||||
Compares two photos with average hash method.
|
|
||||||
|
|
||||||
:param photo1: First photo url.
|
|
||||||
:param photo2: Second photo url.
|
|
||||||
:param photo_cache: An instance of ``ImageCache`` to use to cache images.
|
|
||||||
:param hash_threshold: The hash threshold between two images. Usually two
|
|
||||||
different photos have a hash difference of 30.
|
|
||||||
:return: ``True`` if the photos are identical, else ``False``.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
hash1 = get_or_compute_photo_hash(photo1, photo_cache)
|
|
||||||
hash2 = get_or_compute_photo_hash(photo2, photo_cache)
|
|
||||||
|
|
||||||
return hash1 - hash2 < hash_threshold
|
|
||||||
except (IOError, requests.exceptions.RequestException, TypeError):
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def find_number_common_photos(flat1_photos, flat2_photos, photo_cache, hash_threshold):
|
|
||||||
"""
|
"""
|
||||||
Compute the number of common photos between the two lists of photos for the
|
Compute the number of common photos between the two lists of photos for the
|
||||||
flats.
|
flats.
|
||||||
|
|
||||||
Fetch the photos and compare them with average hash method.
|
Fetch the photos and compare them with dHash method.
|
||||||
|
|
||||||
:param flat1_photos: First list of flat photos. Each photo should be a
|
:param flat1_photos: First list of flat photos. Each photo should be a
|
||||||
``dict`` with (at least) a ``url`` key.
|
``dict`` with a ``url`` key.
|
||||||
:param flat2_photos: Second list of flat photos. Each photo should be a
|
:param flat2_photos: First list of flat photos. Each photo should be a
|
||||||
``dict`` with (at least) a ``url`` key.
|
``dict`` with a ``url`` key.
|
||||||
:param photo_cache: An instance of ``ImageCache`` to use to cache images.
|
|
||||||
:param hash_threshold: The hash threshold between two images.
|
|
||||||
:return: The found number of common photos.
|
:return: The found number of common photos.
|
||||||
"""
|
"""
|
||||||
n_common_photos = 0
|
n_common_photos = 0
|
||||||
|
|
||||||
for photo1, photo2 in itertools.product(flat1_photos, flat2_photos):
|
for photo1, photo2 in itertools.product(flat1_photos, flat2_photos):
|
||||||
if compare_photos(photo1, photo2, photo_cache, hash_threshold):
|
try:
|
||||||
n_common_photos += 1
|
req1 = requests.get(photo1["url"])
|
||||||
|
im1 = PIL.Image.open(BytesIO(req1.content))
|
||||||
|
hash1 = imagehash.average_hash(im1)
|
||||||
|
|
||||||
|
req2 = requests.get(photo2["url"])
|
||||||
|
im2 = PIL.Image.open(BytesIO(req2.content))
|
||||||
|
hash2 = imagehash.average_hash(im2)
|
||||||
|
|
||||||
|
if hash1 - hash2 == 0:
|
||||||
|
n_common_photos += 1
|
||||||
|
except (IOError, requests.exceptions.RequestException):
|
||||||
|
pass
|
||||||
return n_common_photos
|
return n_common_photos
|
||||||
|
|
||||||
|
|
||||||
@ -134,12 +105,12 @@ def detect(flats_list, key="id", merge=True, should_intersect=False):
|
|||||||
:param merge: Whether the found duplicates should be merged or we should
|
:param merge: Whether the found duplicates should be merged or we should
|
||||||
only keep one of them.
|
only keep one of them.
|
||||||
:param should_intersect: Set to ``True`` if the values in the flat dicts
|
:param should_intersect: Set to ``True`` if the values in the flat dicts
|
||||||
are lists and you want to deduplicate on non-empty intersection
|
are lists and you want to deduplicate on non-empty intersection (typically
|
||||||
(typically if they have a common url).
|
if they have a common url).
|
||||||
|
|
||||||
:return: A tuple of the deduplicated list of flat dicts and the list of all
|
:return: A tuple of the deduplicated list of flat dicts and the list of all
|
||||||
the flats objects that should be removed and considered as duplicates
|
the flats objects that should be removed and considered as duplicates (they
|
||||||
(they were already merged).
|
were already merged).
|
||||||
"""
|
"""
|
||||||
# ``seen`` is a dict mapping aggregating the flats by the deduplication
|
# ``seen`` is a dict mapping aggregating the flats by the deduplication
|
||||||
# keys. We basically make buckets of flats for every key value. Flats in
|
# keys. We basically make buckets of flats for every key value. Flats in
|
||||||
@ -161,7 +132,7 @@ def detect(flats_list, key="id", merge=True, should_intersect=False):
|
|||||||
|
|
||||||
for flat_key, matching_flats in seen.items():
|
for flat_key, matching_flats in seen.items():
|
||||||
if flat_key is None:
|
if flat_key is None:
|
||||||
# If the key is None, it means Woob could not load the data. In
|
# If the key is None, it means Weboob could not load the data. In
|
||||||
# this case, we consider every matching item as being independant
|
# this case, we consider every matching item as being independant
|
||||||
# of the others, to avoid over-deduplication.
|
# of the others, to avoid over-deduplication.
|
||||||
unique_flats_list.extend(matching_flats)
|
unique_flats_list.extend(matching_flats)
|
||||||
@ -169,21 +140,22 @@ def detect(flats_list, key="id", merge=True, should_intersect=False):
|
|||||||
# Sort matching flats by backend precedence
|
# Sort matching flats by backend precedence
|
||||||
matching_flats.sort(
|
matching_flats.sort(
|
||||||
key=lambda flat: next(
|
key=lambda flat: next(
|
||||||
i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE) if flat["id"].endswith(backend)
|
i for (i, backend) in enumerate(BACKENDS_PRECEDENCE)
|
||||||
|
if flat["id"].endswith(backend)
|
||||||
),
|
),
|
||||||
reverse=True,
|
reverse=True
|
||||||
)
|
)
|
||||||
|
|
||||||
if len(matching_flats) > 1:
|
if len(matching_flats) > 1:
|
||||||
LOGGER.info(
|
LOGGER.info("Found duplicates using key \"%s\": %s.",
|
||||||
'Found duplicates using key "%s": %s.',
|
|
||||||
key,
|
key,
|
||||||
[flat["id"] for flat in matching_flats],
|
[flat["id"] for flat in matching_flats])
|
||||||
)
|
|
||||||
# Otherwise, check the policy
|
# Otherwise, check the policy
|
||||||
if merge:
|
if merge:
|
||||||
# If a merge is requested, do the merge
|
# If a merge is requested, do the merge
|
||||||
unique_flats_list.append(tools.merge_dicts(*matching_flats))
|
unique_flats_list.append(
|
||||||
|
tools.merge_dicts(*matching_flats)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
# Otherwise, just keep the most important of them
|
# Otherwise, just keep the most important of them
|
||||||
unique_flats_list.append(matching_flats[-1])
|
unique_flats_list.append(matching_flats[-1])
|
||||||
@ -197,22 +169,32 @@ def detect(flats_list, key="id", merge=True, should_intersect=False):
|
|||||||
if should_intersect:
|
if should_intersect:
|
||||||
# We added some flats twice with the above method, let's deduplicate on
|
# We added some flats twice with the above method, let's deduplicate on
|
||||||
# id.
|
# id.
|
||||||
unique_flats_list, _ = detect(unique_flats_list, key="id", merge=True, should_intersect=False)
|
unique_flats_list, _ = detect(unique_flats_list, key="id", merge=True,
|
||||||
|
should_intersect=False)
|
||||||
|
|
||||||
return unique_flats_list, duplicate_flats
|
return unique_flats_list, duplicate_flats
|
||||||
|
|
||||||
|
|
||||||
def get_duplicate_score(flat1, flat2, photo_cache, hash_threshold):
|
def deep_detect(flats_list):
|
||||||
"""
|
"""
|
||||||
Compute the duplicate score between two flats. The higher the score, the
|
Deeper detection of duplicates based on any available data.
|
||||||
more likely the two flats to be duplicates.
|
|
||||||
|
|
||||||
:param flat1: First flat dict.
|
:param flats_list: A list of flats dicts.
|
||||||
:param flat2: Second flat dict.
|
:return: A tuple of the deduplicated list of flat dicts and the list of all
|
||||||
:param photo_cache: An instance of ``ImageCache`` to use to cache images.
|
the flats objects that should be removed and considered as duplicates (they
|
||||||
:param hash_threshold: The hash threshold between two images.
|
were already merged).
|
||||||
:return: The duplicate score as ``int``.
|
|
||||||
"""
|
"""
|
||||||
|
LOGGER.info("Running deep duplicates detection.")
|
||||||
|
matching_flats = collections.defaultdict(list)
|
||||||
|
for i, flat1 in enumerate(flats_list):
|
||||||
|
matching_flats[flat1["id"]].append(flat1["id"])
|
||||||
|
for j, flat2 in enumerate(flats_list):
|
||||||
|
if i <= j:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if flat2["id"] in matching_flats[flat1["id"]]:
|
||||||
|
continue
|
||||||
|
|
||||||
n_common_items = 0
|
n_common_items = 0
|
||||||
try:
|
try:
|
||||||
# They should have the same area, up to one unit
|
# They should have the same area, up to one unit
|
||||||
@ -243,108 +225,59 @@ def get_duplicate_score(flat1, flat2, photo_cache, hash_threshold):
|
|||||||
|
|
||||||
# They should have the same postal code, if available
|
# They should have the same postal code, if available
|
||||||
if (
|
if (
|
||||||
"flatisfy" in flat1
|
flat1["flatisfy"].get("postal_code", None) and
|
||||||
and "flatisfy" in flat2
|
flat2["flatisfy"].get("postal_code", None)
|
||||||
and flat1["flatisfy"].get("postal_code", None)
|
|
||||||
and flat2["flatisfy"].get("postal_code", None)
|
|
||||||
):
|
):
|
||||||
assert flat1["flatisfy"]["postal_code"] == flat2["flatisfy"]["postal_code"]
|
assert (
|
||||||
|
flat1["flatisfy"]["postal_code"] ==
|
||||||
|
flat2["flatisfy"]["postal_code"]
|
||||||
|
)
|
||||||
n_common_items += 1
|
n_common_items += 1
|
||||||
|
|
||||||
# TODO: Better text comparison (one included in the other, fuzzymatch)
|
# TODO: Compare texts (one is included in another? fuzzymatch?)
|
||||||
flat1_text = tools.normalize_string(flat1.get("text", ""))
|
|
||||||
flat2_text = tools.normalize_string(flat2.get("text", ""))
|
|
||||||
if flat1_text and flat2_text and flat1_text == flat2_text:
|
|
||||||
n_common_items += 1
|
|
||||||
|
|
||||||
# They should have the same phone number if it was fetched for
|
# They should have the same phone number if it was fetched for
|
||||||
# both
|
# both
|
||||||
flat1_phone = homogeneize_phone_number(flat1["phone"])
|
flat1_phone = homogeneize_phone_number(flat1["phone"])
|
||||||
flat2_phone = homogeneize_phone_number(flat2["phone"])
|
flat2_phone = homogeneize_phone_number(flat2["phone"])
|
||||||
if flat1_phone and flat2_phone:
|
if flat1_phone and flat2_phone:
|
||||||
# Use an "in" test as there could be multiple phone numbers
|
assert flat1_phone == flat2_phone
|
||||||
# returned by a Woob module
|
n_common_items += 10 # Counts much more that the rest
|
||||||
if flat1_phone in flat2_phone or flat2_phone in flat1_phone:
|
|
||||||
n_common_items += 4 # Counts much more than the rest
|
|
||||||
|
|
||||||
# If the two flats are from the same website and have a
|
# They should have at least one photo in common if there
|
||||||
# different float part, consider they cannot be duplicates. See
|
# are some photos
|
||||||
# https://framagit.org/phyks/Flatisfy/issues/100.
|
if flat1["photos"] and flat2["photos"]:
|
||||||
both_are_from_same_backend = flat1["id"].split("@")[-1] == flat2["id"].split("@")[-1]
|
max_number_photos = max(len(flat1["photos"]),
|
||||||
both_have_float_part = (flat1["area"] % 1) > 0 and (flat2["area"] % 1) > 0
|
len(flat2["photos"]))
|
||||||
both_have_equal_float_part = (flat1["area"] % 1) == (flat2["area"] % 1)
|
n_common_photos = find_number_common_photos(
|
||||||
if both_have_float_part and both_are_from_same_backend:
|
flat1["photos"],
|
||||||
assert both_have_equal_float_part
|
flat2["photos"]
|
||||||
|
)
|
||||||
|
assert n_common_photos > 1
|
||||||
|
n_common_items += int(
|
||||||
|
20 * n_common_photos / max_number_photos
|
||||||
|
)
|
||||||
|
|
||||||
if flat1.get("photos", []) and flat2.get("photos", []):
|
# Minimal score to consider they are duplicates
|
||||||
n_common_photos = find_number_common_photos(flat1["photos"], flat2["photos"], photo_cache, hash_threshold)
|
assert n_common_items >= 15
|
||||||
|
|
||||||
min_number_photos = min(len(flat1["photos"]), len(flat2["photos"]))
|
|
||||||
|
|
||||||
# Either all the photos are the same, or there are at least
|
|
||||||
# three common photos.
|
|
||||||
if n_common_photos == min_number_photos:
|
|
||||||
n_common_items += 15
|
|
||||||
else:
|
|
||||||
n_common_items += 5 * min(n_common_photos, 3)
|
|
||||||
except (AssertionError, TypeError):
|
except (AssertionError, TypeError):
|
||||||
# Skip and consider as not duplicates whenever the conditions
|
# Skip and consider as not duplicates whenever the conditions
|
||||||
# are not met
|
# are not met
|
||||||
# TypeError occurs when an area or a cost is None, which should
|
# TypeError occurs when an area or a cost is None, which should
|
||||||
# not be considered as duplicates
|
# not be considered as duplicates
|
||||||
n_common_items = 0
|
|
||||||
|
|
||||||
return n_common_items
|
|
||||||
|
|
||||||
|
|
||||||
def deep_detect(flats_list, config):
|
|
||||||
"""
|
|
||||||
Deeper detection of duplicates based on any available data.
|
|
||||||
|
|
||||||
:param flats_list: A list of flats dicts.
|
|
||||||
:param config: A config dict.
|
|
||||||
:return: A tuple of the deduplicated list of flat dicts and the list of all
|
|
||||||
the flats objects that should be removed and considered as duplicates
|
|
||||||
(they were already merged).
|
|
||||||
"""
|
|
||||||
if config["serve_images_locally"]:
|
|
||||||
storage_dir = os.path.join(config["data_directory"], "images")
|
|
||||||
else:
|
|
||||||
storage_dir = None
|
|
||||||
photo_cache = ImageCache(storage_dir=storage_dir)
|
|
||||||
|
|
||||||
LOGGER.info("Running deep duplicates detection.")
|
|
||||||
matching_flats = collections.defaultdict(list)
|
|
||||||
for i, flat1 in enumerate(flats_list):
|
|
||||||
matching_flats[flat1["id"]].append(flat1["id"])
|
|
||||||
for j, flat2 in enumerate(flats_list):
|
|
||||||
if i <= j:
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if flat2["id"] in matching_flats[flat1["id"]]:
|
|
||||||
continue
|
|
||||||
|
|
||||||
n_common_items = get_duplicate_score(flat1, flat2, photo_cache, config["duplicate_image_hash_threshold"])
|
|
||||||
|
|
||||||
# Minimal score to consider they are duplicates
|
|
||||||
if n_common_items >= config["duplicate_threshold"]:
|
|
||||||
# Mark flats as duplicates
|
# Mark flats as duplicates
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
("Found duplicates using deep detection: (%s, %s). Score is %d."),
|
("Found duplicates using deep detection: (%s, %s). "
|
||||||
|
"Score is %d."),
|
||||||
flat1["id"],
|
flat1["id"],
|
||||||
flat2["id"],
|
flat2["id"],
|
||||||
n_common_items,
|
n_common_items
|
||||||
)
|
)
|
||||||
matching_flats[flat1["id"]].append(flat2["id"])
|
matching_flats[flat1["id"]].append(flat2["id"])
|
||||||
matching_flats[flat2["id"]].append(flat1["id"])
|
matching_flats[flat2["id"]].append(flat1["id"])
|
||||||
|
|
||||||
if photo_cache.total():
|
|
||||||
LOGGER.debug(
|
|
||||||
"Photo cache: hits: %d%% / misses: %d%%.",
|
|
||||||
photo_cache.hit_rate(),
|
|
||||||
photo_cache.miss_rate(),
|
|
||||||
)
|
|
||||||
|
|
||||||
seen_ids = []
|
seen_ids = []
|
||||||
duplicate_flats = []
|
duplicate_flats = []
|
||||||
unique_flats_list = []
|
unique_flats_list = []
|
||||||
@ -354,11 +287,16 @@ def deep_detect(flats_list, config):
|
|||||||
|
|
||||||
seen_ids.extend(matching_flats[flat_id])
|
seen_ids.extend(matching_flats[flat_id])
|
||||||
to_merge = sorted(
|
to_merge = sorted(
|
||||||
[flat for flat in flats_list if flat["id"] in matching_flats[flat_id]],
|
[
|
||||||
|
flat
|
||||||
|
for flat in flats_list
|
||||||
|
if flat["id"] in matching_flats[flat_id]
|
||||||
|
],
|
||||||
key=lambda flat: next(
|
key=lambda flat: next(
|
||||||
i for (i, backend) in enumerate(BACKENDS_BY_PRECEDENCE) if flat["id"].endswith(backend)
|
i for (i, backend) in enumerate(BACKENDS_PRECEDENCE)
|
||||||
|
if flat["id"].endswith(backend)
|
||||||
),
|
),
|
||||||
reverse=True,
|
reverse=True
|
||||||
)
|
)
|
||||||
unique_flats_list.append(tools.merge_dicts(*to_merge))
|
unique_flats_list.append(tools.merge_dicts(*to_merge))
|
||||||
# The ID of the added merged flat will be the one of the last item
|
# The ID of the added merged flat will be the one of the last item
|
||||||
|
@ -1,33 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
"""
|
|
||||||
Filtering functions to handle images.
|
|
||||||
|
|
||||||
This includes functions to download images.
|
|
||||||
"""
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
from flatisfy.filters.cache import ImageCache
|
|
||||||
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def download_images(flats_list, config):
|
|
||||||
"""
|
|
||||||
Download images for all flats in the list, to serve them locally.
|
|
||||||
|
|
||||||
:param flats_list: A list of flats dicts.
|
|
||||||
:param config: A config dict.
|
|
||||||
"""
|
|
||||||
photo_cache = ImageCache(storage_dir=os.path.join(config["data_directory"], "images"))
|
|
||||||
for flat in flats_list:
|
|
||||||
for photo in flat["photos"]:
|
|
||||||
# Download photo
|
|
||||||
image = photo_cache.get(photo["url"])
|
|
||||||
# And store the local image
|
|
||||||
# Only add it if fetching was successful
|
|
||||||
if image:
|
|
||||||
photo["local"] = photo_cache.compute_filename(photo["url"])
|
|
@ -12,30 +12,24 @@ import re
|
|||||||
|
|
||||||
from flatisfy import data
|
from flatisfy import data
|
||||||
from flatisfy import tools
|
from flatisfy import tools
|
||||||
from flatisfy.constants import TimeToModes
|
|
||||||
from flatisfy.models.postal_code import PostalCode
|
|
||||||
from flatisfy.models.public_transport import PublicTransport
|
|
||||||
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def init(flats_list, constraint):
|
def init(flats_list):
|
||||||
"""
|
"""
|
||||||
Create a flatisfy key containing a dict of metadata fetched by flatisfy for
|
Create a flatisfy key containing a dict of metadata fetched by flatisfy for
|
||||||
each flat in the list. Also perform some basic transform on flat objects to
|
each flat in the list. Also perform some basic transform on flat objects to
|
||||||
prepare for the metadata fetching.
|
prepare for the metadata fetching.
|
||||||
|
|
||||||
:param flats_list: A list of flats dict.
|
:param flats_list: A list of flats dict.
|
||||||
:param constraint: The constraint that the ``flats_list`` should satisfy.
|
|
||||||
:return: The updated list
|
:return: The updated list
|
||||||
"""
|
"""
|
||||||
for flat in flats_list:
|
for flat in flats_list:
|
||||||
# Init flatisfy key
|
# Init flatisfy key
|
||||||
if "flatisfy" not in flat:
|
if "flatisfy" not in flat:
|
||||||
flat["flatisfy"] = {}
|
flat["flatisfy"] = {}
|
||||||
if "constraint" not in flat["flatisfy"]:
|
|
||||||
flat["flatisfy"]["constraint"] = constraint
|
|
||||||
# Move url key to urls
|
# Move url key to urls
|
||||||
if "urls" not in flat:
|
if "urls" not in flat:
|
||||||
if "url" in flat:
|
if "url" in flat:
|
||||||
@ -55,31 +49,30 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
|
|||||||
|
|
||||||
:param query: The string to match.
|
:param query: The string to match.
|
||||||
:param choices: The list of strings to match with.
|
:param choices: The list of strings to match with.
|
||||||
:param limit: The maximum number of items to return. Set to ``None`` to
|
:param limit: The maximum number of items to return.
|
||||||
return all values above threshold.
|
|
||||||
:param threshold: The score threshold to use.
|
:param threshold: The score threshold to use.
|
||||||
|
|
||||||
:return: Tuples of matching items and associated confidence.
|
:return: Tuples of matching items and associated confidence.
|
||||||
|
|
||||||
.. note ::
|
.. note :: This function works by removing any fancy character from the
|
||||||
|
``query`` and ``choices`` strings (replacing any non alphabetic and non
|
||||||
This function works by removing any fancy character from the ``query``
|
numeric characters by space), converting to lower case and normalizing them
|
||||||
and ``choices`` strings (replacing any non alphabetic and non numeric
|
(collapsing multiple spaces etc). It also converts any roman numerals to
|
||||||
characters by space), converting to lower case and normalizing them
|
decimal system. It then compares the string and look for the longest string
|
||||||
(collapsing multiple spaces etc). It also converts any roman numerals
|
in ``choices`` which is a substring of ``query``. The longest one gets a
|
||||||
to decimal system. It then compares the string and look for the longest
|
confidence of 100. The shorter ones get a confidence proportional to their
|
||||||
string in ``choices`` which is a substring of ``query``. The longest
|
length.
|
||||||
one gets a confidence of 100. The shorter ones get a confidence
|
|
||||||
proportional to their length.
|
|
||||||
|
|
||||||
.. seealso :: flatisfy.tools.normalize_string
|
.. seealso :: flatisfy.tools.normalize_string
|
||||||
|
|
||||||
Example::
|
.. todo :: Is there a better confidence measure?
|
||||||
|
|
||||||
>>> fuzzy_match("Paris 14ème", ["Ris", "ris", "Paris 14"], limit=1)
|
:Example:
|
||||||
|
|
||||||
|
>>> match("Paris 14ème", ["Ris", "ris", "Paris 14"], limit=1)
|
||||||
[("Paris 14", 100)
|
[("Paris 14", 100)
|
||||||
|
|
||||||
>>> fuzzy_match( \
|
>>> match( \
|
||||||
"Saint-Jacques, Denfert-Rochereau (Colonel Rol-Tanguy), " \
|
"Saint-Jacques, Denfert-Rochereau (Colonel Rol-Tanguy), " \
|
||||||
"Mouton-Duvernet", \
|
"Mouton-Duvernet", \
|
||||||
["saint-jacques", "denfert rochereau", "duvernet", "toto"], \
|
["saint-jacques", "denfert rochereau", "duvernet", "toto"], \
|
||||||
@ -87,9 +80,8 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
|
|||||||
)
|
)
|
||||||
[('denfert rochereau', 100), ('saint-jacques', 76)]
|
[('denfert rochereau', 100), ('saint-jacques', 76)]
|
||||||
"""
|
"""
|
||||||
# TODO: Is there a better confidence measure?
|
normalized_query = tools.normalize_string(query)
|
||||||
normalized_query = tools.normalize_string(query).replace("saint", "st")
|
normalized_choices = [tools.normalize_string(choice) for choice in choices]
|
||||||
normalized_choices = [tools.normalize_string(choice).replace("saint", "st") for choice in choices]
|
|
||||||
|
|
||||||
# Remove duplicates in the choices list
|
# Remove duplicates in the choices list
|
||||||
unique_normalized_choices = tools.uniqify(normalized_choices)
|
unique_normalized_choices = tools.uniqify(normalized_choices)
|
||||||
@ -97,111 +89,65 @@ def fuzzy_match(query, choices, limit=3, threshold=75):
|
|||||||
# Get the matches (normalized strings)
|
# Get the matches (normalized strings)
|
||||||
# Keep only ``limit`` matches.
|
# Keep only ``limit`` matches.
|
||||||
matches = sorted(
|
matches = sorted(
|
||||||
[(choice, len(choice)) for choice in tools.uniqify(unique_normalized_choices) if choice in normalized_query],
|
[
|
||||||
|
(choice, len(choice))
|
||||||
|
for choice in tools.uniqify(unique_normalized_choices)
|
||||||
|
if choice in normalized_query
|
||||||
|
],
|
||||||
key=lambda x: x[1],
|
key=lambda x: x[1],
|
||||||
reverse=True,
|
reverse=True
|
||||||
)
|
)[:limit]
|
||||||
if limit:
|
|
||||||
matches = matches[:limit]
|
|
||||||
|
|
||||||
# Update confidence
|
# Update confidence
|
||||||
if matches:
|
if matches:
|
||||||
max_confidence = max(match[1] for match in matches)
|
max_confidence = max(match[1] for match in matches)
|
||||||
matches = [(x[0], int(x[1] / max_confidence * 100)) for x in matches]
|
matches = [
|
||||||
|
(x[0], int(x[1] / max_confidence * 100))
|
||||||
|
for x in matches
|
||||||
|
]
|
||||||
|
|
||||||
# Convert back matches to original strings
|
# Convert back matches to original strings
|
||||||
# Also filter out matches below threshold
|
# Also filter out matches below threshold
|
||||||
matches = [(choices[normalized_choices.index(x[0])], x[1]) for x in matches if x[1] >= threshold]
|
matches = [
|
||||||
|
(choices[normalized_choices.index(x[0])], x[1])
|
||||||
|
for x in matches
|
||||||
|
if x[1] >= threshold
|
||||||
|
]
|
||||||
|
|
||||||
return matches
|
return matches
|
||||||
|
|
||||||
|
|
||||||
def guess_location_position(location, cities, constraint, must_match):
|
def guess_postal_code(flats_list, config, distance_threshold=20000):
|
||||||
# try to find a city
|
|
||||||
# Find all fuzzy-matching cities
|
|
||||||
postal_code = None
|
|
||||||
insee_code = None
|
|
||||||
position = None
|
|
||||||
|
|
||||||
matched_cities = fuzzy_match(location, [x.name for x in cities], limit=None)
|
|
||||||
if matched_cities:
|
|
||||||
# Find associated postal codes
|
|
||||||
matched_postal_codes = []
|
|
||||||
for matched_city_name, _ in matched_cities:
|
|
||||||
postal_code_objects_for_city = [x for x in cities if x.name == matched_city_name]
|
|
||||||
insee_code = [pc.insee_code for pc in postal_code_objects_for_city][0]
|
|
||||||
matched_postal_codes.extend(pc.postal_code for pc in postal_code_objects_for_city)
|
|
||||||
# Try to match them with postal codes in config constraint
|
|
||||||
matched_postal_codes_in_config = set(matched_postal_codes) & set(constraint["postal_codes"])
|
|
||||||
if matched_postal_codes_in_config:
|
|
||||||
# If there are some matched postal codes which are also in
|
|
||||||
# config, use them preferentially. This avoid ignoring
|
|
||||||
# incorrectly some flats in cities with multiple postal
|
|
||||||
# codes, see #110.
|
|
||||||
postal_code = next(iter(matched_postal_codes_in_config))
|
|
||||||
else:
|
|
||||||
# Otherwise, simply take any matched postal code.
|
|
||||||
postal_code = matched_postal_codes[0]
|
|
||||||
|
|
||||||
# take the city position
|
|
||||||
for matched_city_name, _ in matched_cities:
|
|
||||||
postal_code_objects_for_city = [
|
|
||||||
x for x in cities if x.name == matched_city_name and x.postal_code == postal_code
|
|
||||||
]
|
|
||||||
if len(postal_code_objects_for_city):
|
|
||||||
position = {
|
|
||||||
"lat": postal_code_objects_for_city[0].lat,
|
|
||||||
"lng": postal_code_objects_for_city[0].lng,
|
|
||||||
}
|
|
||||||
LOGGER.debug(("Found position %s using city %s."), position, matched_city_name)
|
|
||||||
break
|
|
||||||
|
|
||||||
if not postal_code and must_match:
|
|
||||||
postal_code = cities[0].postal_code
|
|
||||||
position = {
|
|
||||||
"lat": cities[0].lat,
|
|
||||||
"lng": cities[0].lng,
|
|
||||||
}
|
|
||||||
insee_code = cities[0].insee_code
|
|
||||||
|
|
||||||
return (postal_code, insee_code, position)
|
|
||||||
|
|
||||||
|
|
||||||
def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
|
|
||||||
"""
|
"""
|
||||||
Try to guess the postal code from the location of the flats.
|
Try to guess the postal code from the location of the flats.
|
||||||
|
|
||||||
:param flats_list: A list of flats dict.
|
:param flats_list: A list of flats dict.
|
||||||
:param constraint: The constraint that the ``flats_list`` should satisfy.
|
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
:param distance_threshold: Maximum distance in meters between the
|
:param distance_threshold: Maximum distance in meters between the
|
||||||
constraint postal codes (from config) and the one found by this
|
constraint postal codes (from config) and the one found by this function,
|
||||||
function, to avoid bad fuzzy matching. Can be ``None`` to disable
|
to avoid bad fuzzy matching. Can be ``None`` to disable thresholding.
|
||||||
thresholding.
|
|
||||||
|
|
||||||
:return: An updated list of flats dict with guessed postal code.
|
:return: An updated list of flats dict with guessed postal code.
|
||||||
"""
|
"""
|
||||||
opendata = {"postal_codes": data.load_data(PostalCode, constraint, config)}
|
opendata = {
|
||||||
|
"cities": data.load_data("cities", config),
|
||||||
|
"postal_codes": data.load_data("postal_codes", config)
|
||||||
|
}
|
||||||
|
|
||||||
for flat in flats_list:
|
for flat in flats_list:
|
||||||
location = flat.get("location", None)
|
location = flat.get("location", None)
|
||||||
if not location:
|
|
||||||
addr = flat.get("address", None)
|
|
||||||
if addr:
|
|
||||||
location = addr["full_address"]
|
|
||||||
if not location:
|
if not location:
|
||||||
# Skip everything if empty location
|
# Skip everything if empty location
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
("No location field for flat %s, skipping postal code lookup. (%s)"),
|
(
|
||||||
flat["id"],
|
"No location field for flat %s, skipping postal "
|
||||||
flat.get("address"),
|
"code lookup."
|
||||||
|
),
|
||||||
|
flat["id"]
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
postal_code = None
|
postal_code = None
|
||||||
insee_code = None
|
|
||||||
position = None
|
|
||||||
|
|
||||||
# Try to find a postal code directly
|
# Try to find a postal code directly
|
||||||
try:
|
try:
|
||||||
postal_code = re.search(r"[0-9]{5}", location)
|
postal_code = re.search(r"[0-9]{5}", location)
|
||||||
@ -209,22 +155,33 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
|
|||||||
postal_code = postal_code.group(0)
|
postal_code = postal_code.group(0)
|
||||||
|
|
||||||
# Check the postal code is within the db
|
# Check the postal code is within the db
|
||||||
assert postal_code in [x.postal_code for x in opendata["postal_codes"]]
|
assert postal_code in opendata["postal_codes"]
|
||||||
|
|
||||||
LOGGER.debug(
|
LOGGER.info(
|
||||||
"Found postal code directly in location field for flat %s: %s.",
|
"Found postal code in location field for flat %s: %s.",
|
||||||
flat["id"],
|
flat["id"], postal_code
|
||||||
postal_code,
|
|
||||||
)
|
)
|
||||||
except AssertionError:
|
except AssertionError:
|
||||||
postal_code = None
|
postal_code = None
|
||||||
|
|
||||||
# Then fetch position (and postal_code is couldn't be found earlier)
|
# If not found, try to find a city
|
||||||
cities = opendata["postal_codes"]
|
if not postal_code:
|
||||||
if postal_code:
|
matched_city = fuzzy_match(
|
||||||
cities = [x for x in cities if x.postal_code == postal_code]
|
location,
|
||||||
(postal_code, insee_code, position) = guess_location_position(
|
opendata["cities"].keys(),
|
||||||
location, cities, constraint, postal_code is not None
|
limit=1
|
||||||
|
)
|
||||||
|
if matched_city:
|
||||||
|
# Store the matching postal code
|
||||||
|
matched_city = matched_city[0]
|
||||||
|
matched_city_name = matched_city[0]
|
||||||
|
postal_code = (
|
||||||
|
opendata["cities"][matched_city_name]["postal_code"]
|
||||||
|
)
|
||||||
|
LOGGER.info(
|
||||||
|
("Found postal code in location field through city lookup "
|
||||||
|
"for flat %s: %s."),
|
||||||
|
flat["id"], postal_code
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check that postal code is not too far from the ones listed in config,
|
# Check that postal code is not too far from the ones listed in config,
|
||||||
@ -232,28 +189,19 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
|
|||||||
if postal_code and distance_threshold:
|
if postal_code and distance_threshold:
|
||||||
distance = min(
|
distance = min(
|
||||||
tools.distance(
|
tools.distance(
|
||||||
next((x.lat, x.lng) for x in opendata["postal_codes"] if x.postal_code == postal_code),
|
opendata["postal_codes"][postal_code]["gps"],
|
||||||
next((x.lat, x.lng) for x in opendata["postal_codes"] if x.postal_code == constraint_postal_code),
|
opendata["postal_codes"][constraint]["gps"],
|
||||||
)
|
)
|
||||||
for constraint_postal_code in constraint["postal_codes"]
|
for constraint in config["constraints"]["postal_codes"]
|
||||||
)
|
)
|
||||||
|
|
||||||
if distance > distance_threshold:
|
if distance > distance_threshold:
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
(
|
("Postal code %s found for flat %s is off-constraints. "
|
||||||
"Postal code %s found for flat %s @ %s is off-constraints "
|
"Min distance is %f."),
|
||||||
"(distance is %dm > %dm). Let's consider it is an "
|
postal_code, flat["id"], distance
|
||||||
"artifact match and keep the post without this postal "
|
|
||||||
"code."
|
|
||||||
),
|
|
||||||
postal_code,
|
|
||||||
flat["id"],
|
|
||||||
location,
|
|
||||||
int(distance),
|
|
||||||
int(distance_threshold),
|
|
||||||
)
|
)
|
||||||
postal_code = None
|
postal_code = None
|
||||||
position = None
|
|
||||||
|
|
||||||
# Store it
|
# Store it
|
||||||
if postal_code:
|
if postal_code:
|
||||||
@ -261,45 +209,29 @@ def guess_postal_code(flats_list, constraint, config, distance_threshold=20000):
|
|||||||
if existing_postal_code and existing_postal_code != postal_code:
|
if existing_postal_code and existing_postal_code != postal_code:
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"Replacing previous postal code %s by %s for flat %s.",
|
"Replacing previous postal code %s by %s for flat %s.",
|
||||||
existing_postal_code,
|
existing_postal_code, postal_code, flat["id"]
|
||||||
postal_code,
|
|
||||||
flat["id"],
|
|
||||||
)
|
)
|
||||||
flat["flatisfy"]["postal_code"] = postal_code
|
flat["flatisfy"]["postal_code"] = postal_code
|
||||||
else:
|
else:
|
||||||
LOGGER.info("No postal code found for flat %s.", flat["id"])
|
LOGGER.info("No postal code found for flat %s.", flat["id"])
|
||||||
|
|
||||||
if insee_code:
|
|
||||||
flat["flatisfy"]["insee_code"] = insee_code
|
|
||||||
|
|
||||||
if position:
|
|
||||||
flat["flatisfy"]["position"] = position
|
|
||||||
LOGGER.debug(
|
|
||||||
"found postal_code=%s insee_code=%s position=%s for flat %s (%s).",
|
|
||||||
postal_code,
|
|
||||||
insee_code,
|
|
||||||
position,
|
|
||||||
flat["id"],
|
|
||||||
location,
|
|
||||||
)
|
|
||||||
|
|
||||||
return flats_list
|
return flats_list
|
||||||
|
|
||||||
|
|
||||||
def guess_stations(flats_list, constraint, config):
|
def guess_stations(flats_list, config, distance_threshold=1500):
|
||||||
"""
|
"""
|
||||||
Try to match the station field with a list of available stations nearby.
|
Try to match the station field with a list of available stations nearby.
|
||||||
|
|
||||||
:param flats_list: A list of flats dict.
|
:param flats_list: A list of flats dict.
|
||||||
:param constraint: The constraint that the ``flats_list`` should satisfy.
|
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
|
:param distance_threshold: Maximum distance (in meters) between the center
|
||||||
|
of the postal code and the station to consider it ok.
|
||||||
|
|
||||||
:return: An updated list of flats dict with guessed nearby stations.
|
:return: An updated list of flats dict with guessed nearby stations.
|
||||||
"""
|
"""
|
||||||
distance_threshold = config["max_distance_housing_station"]
|
|
||||||
opendata = {
|
opendata = {
|
||||||
"postal_codes": data.load_data(PostalCode, constraint, config),
|
"postal_codes": data.load_data("postal_codes", config),
|
||||||
"stations": data.load_data(PublicTransport, constraint, config),
|
"stations": data.load_data("ratp", config)
|
||||||
}
|
}
|
||||||
|
|
||||||
for flat in flats_list:
|
for flat in flats_list:
|
||||||
@ -307,28 +239,19 @@ def guess_stations(flats_list, constraint, config):
|
|||||||
|
|
||||||
if not flat_station:
|
if not flat_station:
|
||||||
# Skip everything if empty station
|
# Skip everything if empty station
|
||||||
LOGGER.info("No stations field for flat %s, skipping stations lookup.", flat["id"])
|
LOGGER.info(
|
||||||
|
"No station field for flat %s, skipping stations lookup.",
|
||||||
|
flat["id"]
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Woob modules can return several stations in a comma-separated list.
|
matched_stations = fuzzy_match(
|
||||||
flat_stations = flat_station.split(",")
|
flat_station,
|
||||||
# But some stations containing a comma exist, so let's add the initial
|
opendata["stations"].keys(),
|
||||||
# value to the list of stations to check if there was one.
|
|
||||||
if len(flat_stations) > 1:
|
|
||||||
flat_stations.append(flat_station)
|
|
||||||
|
|
||||||
matched_stations = []
|
|
||||||
for tentative_station in flat_stations:
|
|
||||||
matched_stations += fuzzy_match(
|
|
||||||
tentative_station,
|
|
||||||
[x.name for x in opendata["stations"]],
|
|
||||||
limit=10,
|
limit=10,
|
||||||
threshold=50,
|
threshold=50
|
||||||
)
|
)
|
||||||
|
|
||||||
# Keep only one occurrence of each station
|
|
||||||
matched_stations = list(set(matched_stations))
|
|
||||||
|
|
||||||
# Filter out the stations that are obviously too far and not well
|
# Filter out the stations that are obviously too far and not well
|
||||||
# guessed
|
# guessed
|
||||||
good_matched_stations = []
|
good_matched_stations = []
|
||||||
@ -336,64 +259,75 @@ def guess_stations(flats_list, constraint, config):
|
|||||||
if postal_code:
|
if postal_code:
|
||||||
# If there is a postal code, check that the matched station is
|
# If there is a postal code, check that the matched station is
|
||||||
# closed to it
|
# closed to it
|
||||||
postal_code_gps = next((x.lat, x.lng) for x in opendata["postal_codes"] if x.postal_code == postal_code)
|
postal_code_gps = opendata["postal_codes"][postal_code]["gps"]
|
||||||
for station in matched_stations:
|
for station in matched_stations:
|
||||||
# Note that multiple stations with the same name exist in a
|
# opendata["stations"] is a dict mapping station names to list
|
||||||
# city, hence the list of stations objects for a given matching
|
# of coordinates, for efficiency. Note that multiple stations
|
||||||
# station name.
|
# with the same name exist in a city, hence the list of
|
||||||
stations_objects = [x for x in opendata["stations"] if x.name == station[0]]
|
# coordinates.
|
||||||
for station_data in stations_objects:
|
for station_data in opendata["stations"][station[0]]:
|
||||||
distance = tools.distance((station_data.lat, station_data.lng), postal_code_gps)
|
distance = tools.distance(station_data["gps"],
|
||||||
|
postal_code_gps)
|
||||||
if distance < distance_threshold:
|
if distance < distance_threshold:
|
||||||
# If at least one of the coordinates for a given
|
# If at least one of the coordinates for a given
|
||||||
# station is close enough, that's ok and we can add
|
# station is close enough, that's ok and we can add
|
||||||
# the station
|
# the station
|
||||||
good_matched_stations.append(
|
good_matched_stations.append({
|
||||||
{
|
|
||||||
"key": station[0],
|
"key": station[0],
|
||||||
"name": station_data.name,
|
"name": station_data["name"],
|
||||||
"confidence": station[1],
|
"confidence": station[1],
|
||||||
"gps": (station_data.lat, station_data.lng),
|
"gps": station_data["gps"]
|
||||||
}
|
})
|
||||||
)
|
|
||||||
break
|
break
|
||||||
LOGGER.info(
|
LOGGER.debug(
|
||||||
("Station %s is too far from flat %s (%dm > %dm), discarding this station."),
|
"Station %s is too far from flat %s, discarding it.",
|
||||||
station[0],
|
station[0], flat["id"]
|
||||||
flat["id"],
|
|
||||||
int(distance),
|
|
||||||
int(distance_threshold),
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
LOGGER.info("No postal code for flat %s, skipping stations detection.", flat["id"])
|
|
||||||
|
|
||||||
if not good_matched_stations:
|
|
||||||
# No stations found, log it and cotninue with next housing
|
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
"No stations found for flat %s, matching %s.",
|
("No postal code for flat %s, keeping all the matched "
|
||||||
flat["id"],
|
"stations with half confidence."),
|
||||||
flat["station"],
|
flat["id"]
|
||||||
)
|
)
|
||||||
continue
|
# Otherwise, we keep every matching station but with half
|
||||||
|
# confidence
|
||||||
|
good_matched_stations = [
|
||||||
|
{
|
||||||
|
"name": station[0],
|
||||||
|
"confidence": station[1] * 0.5,
|
||||||
|
"gps": station_gps
|
||||||
|
}
|
||||||
|
for station in matched_stations
|
||||||
|
for station_gps in opendata["stations"][station[0]]
|
||||||
|
]
|
||||||
|
|
||||||
|
# Store matched stations and the associated confidence
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
"Found stations for flat %s: %s (matching %s).",
|
"Found stations for flat %s: %s.",
|
||||||
flat["id"],
|
flat["id"],
|
||||||
", ".join(x["name"] for x in good_matched_stations),
|
", ".join(x["name"] for x in good_matched_stations)
|
||||||
flat["station"],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# If some stations were already filled in and the result is different,
|
# If some stations were already filled in and the result is different,
|
||||||
# display some warning to the user
|
# display some warning to the user
|
||||||
if "matched_stations" in flat["flatisfy"] and (
|
if (
|
||||||
|
"matched_stations" in flat["flatisfy"] and
|
||||||
|
(
|
||||||
# Do a set comparison, as ordering is not important
|
# Do a set comparison, as ordering is not important
|
||||||
set([station["name"] for station in flat["flatisfy"]["matched_stations"]])
|
set([
|
||||||
!= set([station["name"] for station in good_matched_stations])
|
station["name"]
|
||||||
|
for station in flat["flatisfy"]["matched_stations"]
|
||||||
|
]) !=
|
||||||
|
set([
|
||||||
|
station["name"]
|
||||||
|
for station in good_matched_stations
|
||||||
|
])
|
||||||
|
)
|
||||||
):
|
):
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"Replacing previously fetched stations for flat %s. Found "
|
"Replacing previously fetched stations for flat %s. Found "
|
||||||
"stations differ from the previously found ones.",
|
"stations differ from the previously found ones.",
|
||||||
flat["id"],
|
flat["id"]
|
||||||
)
|
)
|
||||||
|
|
||||||
flat["flatisfy"]["matched_stations"] = good_matched_stations
|
flat["flatisfy"]["matched_stations"] = good_matched_stations
|
||||||
@ -401,13 +335,12 @@ def guess_stations(flats_list, constraint, config):
|
|||||||
return flats_list
|
return flats_list
|
||||||
|
|
||||||
|
|
||||||
def compute_travel_times(flats_list, constraint, config):
|
def compute_travel_times(flats_list, config):
|
||||||
"""
|
"""
|
||||||
Compute the travel time between each flat and the points listed in the
|
Compute the travel time between each flat and the points listed in the
|
||||||
constraints.
|
constraints.
|
||||||
|
|
||||||
:param flats_list: A list of flats dict.
|
:param flats_list: A list of flats dict.
|
||||||
:param constraint: The constraint that the ``flats_list`` should satisfy.
|
|
||||||
:param config: A config dict.
|
:param config: A config dict.
|
||||||
|
|
||||||
:return: An updated list of flats dict with computed travel times.
|
:return: An updated list of flats dict with computed travel times.
|
||||||
@ -418,8 +351,9 @@ def compute_travel_times(flats_list, constraint, config):
|
|||||||
if not flat["flatisfy"].get("matched_stations", []):
|
if not flat["flatisfy"].get("matched_stations", []):
|
||||||
# Skip any flat without matched stations
|
# Skip any flat without matched stations
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
"Skipping travel time computation for flat %s. No matched stations.",
|
"Skipping travel time computation for flat %s. No matched "
|
||||||
flat["id"],
|
"stations.",
|
||||||
|
flat["id"]
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -429,28 +363,22 @@ def compute_travel_times(flats_list, constraint, config):
|
|||||||
|
|
||||||
# For each place, loop over the stations close to the flat, and find
|
# For each place, loop over the stations close to the flat, and find
|
||||||
# the minimum travel time.
|
# the minimum travel time.
|
||||||
for place_name, place in constraint["time_to"].items():
|
for place_name, place in config["constraints"]["time_to"].items():
|
||||||
mode = place.get("mode", "PUBLIC_TRANSPORT")
|
time_to_place = None
|
||||||
time_to_place_dict = None
|
|
||||||
for station in flat["flatisfy"]["matched_stations"]:
|
for station in flat["flatisfy"]["matched_stations"]:
|
||||||
# Time from station is a dict with time and route
|
time_from_station = tools.get_travel_time_between(
|
||||||
time_from_station_dict = tools.get_travel_time_between(
|
station["gps"],
|
||||||
station["gps"], place["gps"], TimeToModes[mode], config
|
place["gps"],
|
||||||
|
config
|
||||||
)
|
)
|
||||||
if time_from_station_dict and (
|
if time_from_station and (time_from_station < time_to_place or
|
||||||
time_from_station_dict["time"] < time_to_place_dict or time_to_place_dict is None
|
time_to_place is None):
|
||||||
):
|
time_to_place = time_from_station
|
||||||
# If starting from this station makes the route to the
|
|
||||||
# specified place shorter, update
|
|
||||||
time_to_place_dict = time_from_station_dict
|
|
||||||
|
|
||||||
if time_to_place_dict:
|
if time_to_place:
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
"Travel time between %s and flat %s by %s is %ds.",
|
"Travel time between %s and flat %s is %ds.",
|
||||||
place_name,
|
place_name, flat["id"], time_to_place["time"]
|
||||||
flat["id"],
|
|
||||||
mode,
|
|
||||||
time_to_place_dict["time"],
|
|
||||||
)
|
)
|
||||||
flat["flatisfy"]["time_to"][place_name] = time_to_place_dict
|
flat["flatisfy"]["time_to"][place_name] = time_to_place
|
||||||
return flats_list
|
return flats_list
|
||||||
|
@ -6,22 +6,13 @@ This modules defines an SQLAlchemy ORM model for a flat.
|
|||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import enum
|
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
|
import enum
|
||||||
|
|
||||||
from sqlalchemy import (
|
from sqlalchemy import (
|
||||||
Boolean,
|
Column, DateTime, Enum, Float, SmallInteger, String, Text
|
||||||
Column,
|
|
||||||
DateTime,
|
|
||||||
Enum,
|
|
||||||
Float,
|
|
||||||
SmallInteger,
|
|
||||||
String,
|
|
||||||
Text,
|
|
||||||
inspect,
|
|
||||||
)
|
)
|
||||||
from sqlalchemy.orm import validates
|
|
||||||
|
|
||||||
from flatisfy.database.base import BASE
|
from flatisfy.database.base import BASE
|
||||||
from flatisfy.database.types import MagicJSON
|
from flatisfy.database.types import MagicJSON
|
||||||
@ -34,7 +25,6 @@ class FlatUtilities(enum.Enum):
|
|||||||
"""
|
"""
|
||||||
An enum of the possible utilities status for a flat entry.
|
An enum of the possible utilities status for a flat entry.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
included = 10
|
included = 10
|
||||||
unknown = 0
|
unknown = 0
|
||||||
excluded = -10
|
excluded = -10
|
||||||
@ -44,7 +34,6 @@ class FlatStatus(enum.Enum):
|
|||||||
"""
|
"""
|
||||||
An enum of the possible status for a flat entry.
|
An enum of the possible status for a flat entry.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
user_deleted = -100
|
user_deleted = -100
|
||||||
duplicate = -20
|
duplicate = -20
|
||||||
ignored = -10
|
ignored = -10
|
||||||
@ -57,18 +46,21 @@ class FlatStatus(enum.Enum):
|
|||||||
|
|
||||||
# List of statuses that are automatically handled, and which the user cannot
|
# List of statuses that are automatically handled, and which the user cannot
|
||||||
# manually set through the UI.
|
# manually set through the UI.
|
||||||
AUTOMATED_STATUSES = [FlatStatus.new, FlatStatus.duplicate, FlatStatus.ignored]
|
AUTOMATED_STATUSES = [
|
||||||
|
FlatStatus.new,
|
||||||
|
FlatStatus.duplicate,
|
||||||
|
FlatStatus.ignored
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class Flat(BASE):
|
class Flat(BASE):
|
||||||
"""
|
"""
|
||||||
SQLAlchemy ORM model to store a flat.
|
SQLAlchemy ORM model to store a flat.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__tablename__ = "flats"
|
__tablename__ = "flats"
|
||||||
__searchable__ = ["title", "text", "station", "location", "details", "notes"]
|
__searchable__ = ["title", "text", "station", "location", "details"]
|
||||||
|
|
||||||
# Woob data
|
# Weboob data
|
||||||
id = Column(String, primary_key=True)
|
id = Column(String, primary_key=True)
|
||||||
area = Column(Float)
|
area = Column(Float)
|
||||||
bedrooms = Column(Float)
|
bedrooms = Column(Float)
|
||||||
@ -88,95 +80,16 @@ class Flat(BASE):
|
|||||||
merged_ids = Column(MagicJSON)
|
merged_ids = Column(MagicJSON)
|
||||||
notes = Column(Text)
|
notes = Column(Text)
|
||||||
notation = Column(SmallInteger, default=0)
|
notation = Column(SmallInteger, default=0)
|
||||||
is_expired = Column(Boolean, default=False)
|
|
||||||
|
|
||||||
# Flatisfy data
|
# Flatisfy data
|
||||||
# TODO: Should be in another table with relationships
|
# TODO: Should be in another table with relationships
|
||||||
flatisfy_stations = Column(MagicJSON)
|
flatisfy_stations = Column(MagicJSON)
|
||||||
flatisfy_postal_code = Column(String)
|
flatisfy_postal_code = Column(String)
|
||||||
flatisfy_time_to = Column(MagicJSON)
|
flatisfy_time_to = Column(MagicJSON)
|
||||||
flatisfy_constraint = Column(String)
|
|
||||||
flatisfy_position = Column(MagicJSON)
|
|
||||||
|
|
||||||
# Status
|
# Status
|
||||||
status = Column(Enum(FlatStatus), default=FlatStatus.new)
|
status = Column(Enum(FlatStatus), default=FlatStatus.new)
|
||||||
|
|
||||||
# Date for visit
|
|
||||||
visit_date = Column(DateTime)
|
|
||||||
|
|
||||||
@validates("utilities")
|
|
||||||
def validate_utilities(self, _, utilities):
|
|
||||||
"""
|
|
||||||
Utilities validation method
|
|
||||||
"""
|
|
||||||
if isinstance(utilities, FlatUtilities):
|
|
||||||
return utilities
|
|
||||||
|
|
||||||
if utilities == "C.C.":
|
|
||||||
return FlatUtilities.included
|
|
||||||
elif utilities == "H.C.":
|
|
||||||
return FlatUtilities.excluded
|
|
||||||
return FlatUtilities.unknown
|
|
||||||
|
|
||||||
@validates("status")
|
|
||||||
def validate_status(self, _, status):
|
|
||||||
"""
|
|
||||||
Status validation method
|
|
||||||
"""
|
|
||||||
if isinstance(status, FlatStatus):
|
|
||||||
return status
|
|
||||||
|
|
||||||
try:
|
|
||||||
return getattr(FlatStatus, status)
|
|
||||||
except (AttributeError, TypeError):
|
|
||||||
LOGGER.warn("Unkown flat status %s, ignoring it.", status)
|
|
||||||
return self.status.default.arg
|
|
||||||
|
|
||||||
@validates("notation")
|
|
||||||
def validate_notation(self, _, notation):
|
|
||||||
"""
|
|
||||||
Notation validation method
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
notation = int(notation)
|
|
||||||
assert notation >= 0 and notation <= 5
|
|
||||||
except (ValueError, AssertionError):
|
|
||||||
raise ValueError("notation should be an integer between 0 and 5")
|
|
||||||
return notation
|
|
||||||
|
|
||||||
@validates("date")
|
|
||||||
def validate_date(self, _, date):
|
|
||||||
"""
|
|
||||||
Date validation method
|
|
||||||
"""
|
|
||||||
if date:
|
|
||||||
return arrow.get(date).naive
|
|
||||||
return None
|
|
||||||
|
|
||||||
@validates("visit_date")
|
|
||||||
def validate_visit_date(self, _, visit_date):
|
|
||||||
"""
|
|
||||||
Visit date validation method
|
|
||||||
"""
|
|
||||||
if visit_date:
|
|
||||||
return arrow.get(visit_date).naive
|
|
||||||
return None
|
|
||||||
|
|
||||||
@validates("photos")
|
|
||||||
def validate_photos(self, _, photos):
|
|
||||||
"""
|
|
||||||
Photos validation method
|
|
||||||
"""
|
|
||||||
if not photos:
|
|
||||||
photos = []
|
|
||||||
for photo in photos:
|
|
||||||
try:
|
|
||||||
# Remove computed hash to avoid storing it in db
|
|
||||||
del photo["hash"]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
return photos
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_dict(flat_dict):
|
def from_dict(flat_dict):
|
||||||
"""
|
"""
|
||||||
@ -186,25 +99,58 @@ class Flat(BASE):
|
|||||||
# Handle flatisfy metadata
|
# Handle flatisfy metadata
|
||||||
flat_dict = flat_dict.copy()
|
flat_dict = flat_dict.copy()
|
||||||
if "flatisfy" in flat_dict:
|
if "flatisfy" in flat_dict:
|
||||||
flat_dict["flatisfy_stations"] = flat_dict["flatisfy"].get("matched_stations", [])
|
flat_dict["flatisfy_stations"] = (
|
||||||
flat_dict["flatisfy_postal_code"] = flat_dict["flatisfy"].get("postal_code", None)
|
flat_dict["flatisfy"].get("matched_stations", [])
|
||||||
flat_dict["flatisfy_position"] = flat_dict["flatisfy"].get("position", None)
|
)
|
||||||
flat_dict["flatisfy_time_to"] = flat_dict["flatisfy"].get("time_to", {})
|
flat_dict["flatisfy_postal_code"] = (
|
||||||
flat_dict["flatisfy_constraint"] = flat_dict["flatisfy"].get("constraint", "default")
|
flat_dict["flatisfy"].get("postal_code", None)
|
||||||
|
)
|
||||||
|
flat_dict["flatisfy_time_to"] = (
|
||||||
|
flat_dict["flatisfy"].get("time_to", {})
|
||||||
|
)
|
||||||
del flat_dict["flatisfy"]
|
del flat_dict["flatisfy"]
|
||||||
|
|
||||||
flat_dict = {k: v for k, v in flat_dict.items() if k in inspect(Flat).columns.keys()}
|
# Handle utilities field
|
||||||
return Flat(**flat_dict)
|
if not isinstance(flat_dict["utilities"], FlatUtilities):
|
||||||
|
if flat_dict["utilities"] == "C.C.":
|
||||||
|
flat_dict["utilities"] = FlatUtilities.included
|
||||||
|
elif flat_dict["utilities"] == "H.C.":
|
||||||
|
flat_dict["utilities"] = FlatUtilities.excluded
|
||||||
|
else:
|
||||||
|
flat_dict["utilities"] = FlatUtilities.unknown
|
||||||
|
|
||||||
|
# Handle status field
|
||||||
|
flat_status = flat_dict.get("status", "new")
|
||||||
|
if not isinstance(flat_status, FlatStatus):
|
||||||
|
try:
|
||||||
|
flat_dict["status"] = getattr(FlatStatus, flat_status)
|
||||||
|
except AttributeError:
|
||||||
|
if "status" in flat_dict:
|
||||||
|
del flat_dict["status"]
|
||||||
|
LOGGER.warn("Unkown flat status %s, ignoring it.",
|
||||||
|
flat_status)
|
||||||
|
|
||||||
|
# Handle date field
|
||||||
|
flat_dict["date"] = arrow.get(flat_dict["date"]).naive
|
||||||
|
|
||||||
|
flat_object = Flat()
|
||||||
|
flat_object.__dict__.update(flat_dict)
|
||||||
|
return flat_object
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<Flat(id=%s, urls=%s)>" % (self.id, self.urls)
|
return "<Flat(id=%s, urls=%s)>" % (self.id, self.urls)
|
||||||
|
|
||||||
|
|
||||||
def json_api_repr(self):
|
def json_api_repr(self):
|
||||||
"""
|
"""
|
||||||
Return a dict representation of this flat object that is JSON
|
Return a dict representation of this flat object that is JSON
|
||||||
serializable.
|
serializable.
|
||||||
"""
|
"""
|
||||||
flat_repr = {k: v for k, v in self.__dict__.items() if not k.startswith("_")}
|
flat_repr = {
|
||||||
|
k: v
|
||||||
|
for k, v in self.__dict__.items()
|
||||||
|
if not k.startswith("_")
|
||||||
|
}
|
||||||
if isinstance(flat_repr["status"], FlatStatus):
|
if isinstance(flat_repr["status"], FlatStatus):
|
||||||
flat_repr["status"] = flat_repr["status"].name
|
flat_repr["status"] = flat_repr["status"].name
|
||||||
if isinstance(flat_repr["utilities"], FlatUtilities):
|
if isinstance(flat_repr["utilities"], FlatUtilities):
|
||||||
|
@ -1,44 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
"""
|
|
||||||
This modules defines an SQLAlchemy ORM model for a postal code opendata.
|
|
||||||
"""
|
|
||||||
# pylint: disable=locally-disabled,invalid-name,too-few-public-methods
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from sqlalchemy import Column, Float, Integer, String, UniqueConstraint
|
|
||||||
|
|
||||||
from flatisfy.database.base import BASE
|
|
||||||
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class PostalCode(BASE):
|
|
||||||
"""
|
|
||||||
SQLAlchemy ORM model to store a postal code opendata.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "postal_codes"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
# Area is an identifier to prevent loading unnecessary stops. For now it is
|
|
||||||
# following ISO 3166-2.
|
|
||||||
area = Column(String, index=True)
|
|
||||||
postal_code = Column(String, index=True)
|
|
||||||
insee_code = Column(String, index=True)
|
|
||||||
name = Column(String, index=True)
|
|
||||||
lat = Column(Float)
|
|
||||||
lng = Column(Float)
|
|
||||||
UniqueConstraint("postal_code", "name")
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<PostalCode(id=%s)>" % self.id
|
|
||||||
|
|
||||||
def json_api_repr(self):
|
|
||||||
"""
|
|
||||||
Return a dict representation of this postal code object that is JSON
|
|
||||||
serializable.
|
|
||||||
"""
|
|
||||||
return {k: v for k, v in self.__dict__.items() if not k.startswith("_")}
|
|
@ -1,34 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
"""
|
|
||||||
This modules defines an SQLAlchemy ORM model for public transport opendata.
|
|
||||||
"""
|
|
||||||
# pylint: disable=locally-disabled,invalid-name,too-few-public-methods
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from sqlalchemy import Column, Float, Integer, String
|
|
||||||
|
|
||||||
from flatisfy.database.base import BASE
|
|
||||||
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class PublicTransport(BASE):
|
|
||||||
"""
|
|
||||||
SQLAlchemy ORM model to store public transport opendata.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "public_transports"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
# Area is an identifier to prevent loading unnecessary stops. For now it is
|
|
||||||
# following ISO 3166-2.
|
|
||||||
area = Column(String, index=True)
|
|
||||||
name = Column(String)
|
|
||||||
lat = Column(Float)
|
|
||||||
lng = Column(Float)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<PublicTransport(id=%s)>" % self.id
|
|
@ -1,52 +0,0 @@
|
|||||||
{
|
|
||||||
"id": "122509451@seloger",
|
|
||||||
"url": "http://www.seloger.com/annonces/achat/appartement/rennes-35/cleunay-arsenal-redon/122509451.htm?p=",
|
|
||||||
"title": "Appartement 3 pi\u00e8ces 78m\u00b2 - Rennes",
|
|
||||||
"area": 78,
|
|
||||||
"cost": 211000,
|
|
||||||
"price_per_meter": 2705.128205128205128205128205,
|
|
||||||
"currency": "\u20ac",
|
|
||||||
"utilities": "",
|
|
||||||
"date": "2018-01-19T22:39:00",
|
|
||||||
"location": " Rennes (35000)",
|
|
||||||
"station": "Arsenal - Redon",
|
|
||||||
"text": "Appartement quartier Arsenal Redon, \u00e0 vendre, type 3 de 78 m\u00b2. Il se compose d'une entr\u00e9e, d'un salon-s\u00e9jour lumineux de 33 m\u00b2 orient\u00e9 sud donnant sur une terrasse, de deux chambres, d'une cuisine ind\u00e9pendante, d'une salle de bains et d'un toilette. Vous disposerez d'un garage ferm\u00e9. Situ\u00e9 entre le centre ville et la future station m\u00e9tro Mabilais (ligne B), proximit\u00e9 imm\u00e9diate des commerces, \u00e9coles.. Bien soumis au statut de la copropri\u00e9t\u00e9. Charges annuelles courantes: 962e Agence immobili\u00e8re ERA Rennes Aristide Briand Agent Commercial: Guillaume DE KERANFLECH RSAC: 818942955.",
|
|
||||||
"phone": null,
|
|
||||||
"photos": [{
|
|
||||||
"id": "0oj57y4pvtz7537ibvjq1agi9hrpctm96o30wknpc.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/0/o/j/5/0oj57y4pvtz7537ibvjq1agi9hrpctm96o30wknpc.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "0s0kr6fw0hbqkwm5m2oxhi8yysk6mfxb9ctcrx2bk.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/0/s/0/k/0s0kr6fw0hbqkwm5m2oxhi8yysk6mfxb9ctcrx2bk.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "0z8q9eq4rprqfymp0mpcezrq6kxp8uxknf5pgrx8g.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/0/z/8/q/0z8q9eq4rprqfymp0mpcezrq6kxp8uxknf5pgrx8g.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "01ti2ovzcuyx4e14qfqqgatynges1grnalb4eau4g.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/0/1/t/i/01ti2ovzcuyx4e14qfqqgatynges1grnalb4eau4g.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "250ckvp15x8eeetuynem2kj7x8z12y66kay9okf0g.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/2/5/0/c/250ckvp15x8eeetuynem2kj7x8z12y66kay9okf0g.jpg",
|
|
||||||
"data": null
|
|
||||||
}],
|
|
||||||
"rooms": 3,
|
|
||||||
"bedrooms": 2,
|
|
||||||
"details": {
|
|
||||||
"Box": "1",
|
|
||||||
"Cuisine": "s\u00e9par\u00e9e",
|
|
||||||
"Pi\u00e8ces": "3",
|
|
||||||
"Etage": "RDC",
|
|
||||||
"Reference": "872GK-01",
|
|
||||||
"Chambres": "2",
|
|
||||||
"Chauffage": "individuel",
|
|
||||||
"Entr\u00e9e": "",
|
|
||||||
"Surface": "78 m\u00b2",
|
|
||||||
"Terrasse": "1",
|
|
||||||
"Etages": "5",
|
|
||||||
"Salle de S\u00e9jour": ""
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,60 +0,0 @@
|
|||||||
{
|
|
||||||
"id": "123312807@seloger",
|
|
||||||
"url": "http://www.seloger.com/annonces/achat/appartement/rennes-35/brequigny/123312807.htm?p=",
|
|
||||||
"title": "Appartement 3 pi\u00e8ces 58m\u00b2 - Rennes",
|
|
||||||
"area": 58,
|
|
||||||
"cost": 131440,
|
|
||||||
"price_per_meter": 2266.206896551724137931034483,
|
|
||||||
"currency": "\u20ac",
|
|
||||||
"utilities": "",
|
|
||||||
"date": "2018-01-20T22:35:00",
|
|
||||||
"location": " Rennes (35200)",
|
|
||||||
"station": "Cl\u00e9menceau",
|
|
||||||
"text": "OGIMM vous propose \u00e0 l'achat un appartement de type 3 dans une petite copropri\u00e9t\u00e9 de 4 \u00e9tages. Bien situ\u00e9, proche du boulevard Cl\u00e9menceau et des Bus C5 et C3, de la rue de Nantes, il est en tr\u00e8s bon \u00e9tat et au calme. Il est compos\u00e9 de: une entr\u00e9e avec placards, une cuisine s\u00e9par\u00e9e am\u00e9nag\u00e9e et \u00e9quip\u00e9e (possibilit\u00e9 d'ouverture), d'un balcon loggia, d'un s\u00e9jour lumineux au sud, de 2 chambres, d'une salle d'eau et d'un WC s\u00e9par\u00e9. Pr\u00e9sence d'une cave et d'un parking ext\u00e9rieur. Station de M\u00e9tro la plus proche Cl\u00e9menceau. Copropri\u00e9t\u00e9 saine et bien tenue, les charges de 1745.88e par an comprenant le chauffage (avec compteur individuel), l'eau chaude et froide, et l'entretien de l'immeuble. Copropri\u00e9t\u00e9 de 16 appartements. Actuellement lou\u00e9 650e charges comprises. A voir rapidement ! Dont 6.00 % honoraires TTC \u00e0 la charge de l'acqu\u00e9reur.",
|
|
||||||
"phone": null,
|
|
||||||
"photos": [{
|
|
||||||
"id": "1ir7ortudferww8to788kd38lmlnpx52ia5st7280.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/1/i/r/7/1ir7ortudferww8to788kd38lmlnpx52ia5st7280.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "08wbr1ivnz26gnyeofyjg02zi0d1vd1eijszcrgg0.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/0/8/w/b/08wbr1ivnz26gnyeofyjg02zi0d1vd1eijszcrgg0.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "0np6439w3557sclwu7b4sq7h7hntm9tizwrrtdr7k.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/0/n/p/6/0np6439w3557sclwu7b4sq7h7hntm9tizwrrtdr7k.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "0rc6ac2jlit0r27d1tmy2y8pqbdzps7gnzzmdds00.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/0/r/c/6/0rc6ac2jlit0r27d1tmy2y8pqbdzps7gnzzmdds00.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "19ebzllpk308rw1ei43a0t59fnjxohnidtvc5thq8.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/1/9/e/b/19ebzllpk308rw1ei43a0t59fnjxohnidtvc5thq8.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "07ize6lu9ssyv1ltjiux8gs56rgbyweai9wboor9c.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/0/7/i/z/07ize6lu9ssyv1ltjiux8gs56rgbyweai9wboor9c.jpg",
|
|
||||||
"data": null
|
|
||||||
}],
|
|
||||||
"rooms": 3,
|
|
||||||
"bedrooms": 2,
|
|
||||||
"details": {
|
|
||||||
"Cuisine": "s\u00e9par\u00e9e",
|
|
||||||
"Pi\u00e8ces": "3",
|
|
||||||
"Salle de S\u00e9jour": "",
|
|
||||||
"Reference": "OG9242",
|
|
||||||
"Chambres": "2",
|
|
||||||
"Salle d'eau": "1",
|
|
||||||
"Entr\u00e9e": "",
|
|
||||||
"Balcon": "1",
|
|
||||||
"Surface": "58 m\u00b2",
|
|
||||||
"Ann\u00e9e de construction": "1963",
|
|
||||||
"Calme": "",
|
|
||||||
"Etages": "4",
|
|
||||||
"Parking": "1",
|
|
||||||
"Rangements": "",
|
|
||||||
"Toilette": "1",
|
|
||||||
"Orientation": "Est, Sud"
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,62 +0,0 @@
|
|||||||
{
|
|
||||||
"id": "123314207@seloger",
|
|
||||||
"url": "http://www.seloger.com/annonces/achat/appartement/rennes-35/brequigny/123314207.htm?p=",
|
|
||||||
"title": "Appartement 3 pi\u00e8ces 58m\u00b2 - Rennes",
|
|
||||||
"area": 58,
|
|
||||||
"cost": 131440,
|
|
||||||
"price_per_meter": 2266.206896551724137931034483,
|
|
||||||
"currency": "\u20ac",
|
|
||||||
"utilities": "",
|
|
||||||
"date": "2018-01-20T22:35:00",
|
|
||||||
"location": " Rennes (35200)",
|
|
||||||
"station": "Cl\u00e9menceau",
|
|
||||||
"text": "OGIMM vous propose \u00e0 l'achat un appartement de type 3 au 1er \u00e9tage d'une petite r\u00e9sidence de 4 \u00e9tages. Au calme, propre, il est proche de la rue de Nantes, des Bus C5 et C3. La station de M\u00e9tro la plus proche est Cl\u00e9menceau. Vous aurez: une entr\u00e9e avec placards, une cuisine am\u00e9nag\u00e9e et \u00e9quip\u00e9e, un balcon loggia, une salle d'eau, un WC s\u00e9par\u00e9, 2 chambres, une cave et un parking. Les charges de copropri\u00e9t\u00e9 de 1526.58e par an comprennent le chauffage et l'eau chaude et froide avec comptage individuel. Locataire en place avec un loyer de 650e par mois. Copropri\u00e9t\u00e9 de 12 appartements. A voir vite ! Dont 6.00 % honoraires TTC \u00e0 la charge de l'acqu\u00e9reur.",
|
|
||||||
"phone": null,
|
|
||||||
"photos": [{
|
|
||||||
"id": "18a4t9w050xd7welkm25tg5ytv0wjbflrkyun1p1c.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/1/8/a/4/18a4t9w050xd7welkm25tg5ytv0wjbflrkyun1p1c.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "21q7r77zylh8k4mdlumg3cfcgmd4y3ixr9ggipk3k.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/2/1/q/7/21q7r77zylh8k4mdlumg3cfcgmd4y3ixr9ggipk3k.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "0eysaqsq7ti47y42lakhzwr2s9jdkvwsvvoqfq8e8.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/0/e/y/s/0eysaqsq7ti47y42lakhzwr2s9jdkvwsvvoqfq8e8.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "02tt2n650l5m908yiqkre3vu0cl9cxwqtg26xtwqo.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/0/2/t/t/02tt2n650l5m908yiqkre3vu0cl9cxwqtg26xtwqo.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "03wsh6bojie9eunp1ef9tynop2zkanx1qgm6lq41s.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/0/3/w/s/03wsh6bojie9eunp1ef9tynop2zkanx1qgm6lq41s.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "170whetachmm8357xz30ll7e3flrrqedc3ld2u0hs.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/1/7/0/w/170whetachmm8357xz30ll7e3flrrqedc3ld2u0hs.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "1unpbelnbrnsxxoxy0zd0me8nf4jgd124yomnbvnk.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/1/u/n/p/1unpbelnbrnsxxoxy0zd0me8nf4jgd124yomnbvnk.jpg",
|
|
||||||
"data": null
|
|
||||||
}],
|
|
||||||
"rooms": 3,
|
|
||||||
"bedrooms": 2,
|
|
||||||
"details": {
|
|
||||||
"Pi\u00e8ces": "3",
|
|
||||||
"Etage": "1",
|
|
||||||
"Reference": "OG9243",
|
|
||||||
"Chambres": "2",
|
|
||||||
"Salle d'eau": "1",
|
|
||||||
"Chauffage": "radiateur",
|
|
||||||
"Entr\u00e9e": "",
|
|
||||||
"Surface": "58 m\u00b2",
|
|
||||||
"Ann\u00e9e de construction": "1963",
|
|
||||||
"Calme": "",
|
|
||||||
"Etages": "4",
|
|
||||||
"Rangements": "",
|
|
||||||
"Toilette": "1",
|
|
||||||
"Orientation": "Est, Sud"
|
|
||||||
}
|
|
||||||
}
|
|
Before Width: | Height: | Size: 26 KiB |
@ -1,67 +0,0 @@
|
|||||||
{
|
|
||||||
"id": "124910113@seloger",
|
|
||||||
"url": "http://www.seloger.com/annonces/achat/appartement/rennes-35/maurepas-patton/124910113.htm?p=",
|
|
||||||
"title": "Appartement 3 pi\u00e8ces 65m\u00b2 - Rennes",
|
|
||||||
"area": 65,
|
|
||||||
"cost": 145275,
|
|
||||||
"price_per_meter": 2235,
|
|
||||||
"currency": "\u20ac",
|
|
||||||
"utilities": "",
|
|
||||||
"date": "2018-01-20T02:09:00",
|
|
||||||
"location": "225 RUE DE FOUGERES Rennes (35700)",
|
|
||||||
"station": "",
|
|
||||||
"text": "Rennes en exclusivit\u00e9 rue de Foug\u00e8res - Grand Appartement 3 pi\u00e8ces avec Balcon dans une copropri\u00e9t\u00e9 avec ascenseur - Travaux \u00e0 pr\u00e9voir - 2 chambres - Cave et garage.",
|
|
||||||
"phone": null,
|
|
||||||
"photos": [{
|
|
||||||
"id": "17b055i7hici1wxr951unlycfs5rhai73sbnnv2ki.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/cdn/x/visuels/1/7/b/0/17b055i7hici1wxr951unlycfs5rhai73sbnnv2ki.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "1s5t0lal78twswu22mahad9vtc75y3s5utuit2yte.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/cdn/x/visuels/1/s/5/t/1s5t0lal78twswu22mahad9vtc75y3s5utuit2yte.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "282rrcholht5full009yb8a5k1xe2jx0yiwtqyite.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/cdn/x/visuels/2/8/2/r/282rrcholht5full009yb8a5k1xe2jx0yiwtqyite.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "0wskjpe0511ak2ynzxual2qa0fp3bmz3ccaoqc5oi.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/cdn/x/visuels/0/w/s/k/0wskjpe0511ak2ynzxual2qa0fp3bmz3ccaoqc5oi.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "0kfne4iignt712pcunkcu2u9e497vt6oi11l30hxe.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/cdn/x/visuels/0/k/f/n/0kfne4iignt712pcunkcu2u9e497vt6oi11l30hxe.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "1jvyyiua1l843w1ohymxcbs9gj9zxvtfiajjfvwle.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/cdn/x/visuels/1/j/v/y/1jvyyiua1l843w1ohymxcbs9gj9zxvtfiajjfvwle.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "1ihj8ufsfdxgfecq03c154hcsj5jo5ysts29wjnia.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/cdn/x/visuels/1/i/h/j/1ihj8ufsfdxgfecq03c154hcsj5jo5ysts29wjnia.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "1g9yb1xe0bc8se0w8jys8ouiscpwer6y6lccd1ltu.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/cdn/x/visuels/1/g/9/y/1g9yb1xe0bc8se0w8jys8ouiscpwer6y6lccd1ltu.jpg",
|
|
||||||
"data": null
|
|
||||||
}],
|
|
||||||
"rooms": 3,
|
|
||||||
"bedrooms": 2,
|
|
||||||
"details": {
|
|
||||||
"Box": "1",
|
|
||||||
"Pi\u00e8ces": "3",
|
|
||||||
"Etage": "1",
|
|
||||||
"Reference": "MT0135140",
|
|
||||||
"Chambres": "2",
|
|
||||||
"Salle d'eau": "1",
|
|
||||||
"Cave": "",
|
|
||||||
"Ascenseur": "",
|
|
||||||
"Surface": "65 m\u00b2",
|
|
||||||
"Balcon": "1",
|
|
||||||
"Travaux \u00c0 Pr\u00e9voir": "",
|
|
||||||
"Ann\u00e9e de construction": "1968",
|
|
||||||
"Toilettes S\u00e9par\u00e9es": "",
|
|
||||||
"Etages": "6",
|
|
||||||
"Toilette": "1"
|
|
||||||
}
|
|
||||||
}
|
|
Before Width: | Height: | Size: 122 KiB |
Before Width: | Height: | Size: 114 KiB |
Before Width: | Height: | Size: 24 KiB |
Before Width: | Height: | Size: 81 KiB |
@ -1,41 +0,0 @@
|
|||||||
{
|
|
||||||
"id": "127028739@seloger",
|
|
||||||
"url": "http://www.seloger.com/annonces/achat/appartement/rennes-35/centre/127028739.htm?p=",
|
|
||||||
"title": "Appartement 3 pièces 67m² - Rennes",
|
|
||||||
"area": 67,
|
|
||||||
"cost": 155700,
|
|
||||||
"price_per_meter": 2323.8805970149256,
|
|
||||||
"currency": "€",
|
|
||||||
"utilities": "",
|
|
||||||
"date": "2018-01-12T02:10:00",
|
|
||||||
"location": "17 PLACE MARECHAL JUIN Rennes (35000)",
|
|
||||||
"station": "",
|
|
||||||
"text": "Exclusivité Nexity Dans un immeuble de standing, en étage élevé avec ascenseur, Appartement Type 3 de 67 m² exposé Sud / Ouest, un séjour avec balcon et double exposition vue dégagée. Deux chambres dont une avec balcon, salle de douches, WC séparé, cave et parking en sous-sol.",
|
|
||||||
"phone": null,
|
|
||||||
"photos": [
|
|
||||||
{
|
|
||||||
"id": "0an3yarge9y446j653dewxu0jwy33pmwar47k2qym.jpg",
|
|
||||||
"url": "flatisfy/test_files/127028739@seloger.jpg",
|
|
||||||
"data": null
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"rooms": 3,
|
|
||||||
"bedrooms": 2,
|
|
||||||
"details": {
|
|
||||||
"Vue": "",
|
|
||||||
"Pièces": "3",
|
|
||||||
"Etage": "15",
|
|
||||||
"Reference": "MT0136601",
|
|
||||||
"Chambres": "2",
|
|
||||||
"Cave": "",
|
|
||||||
"Balcon": "5 m²",
|
|
||||||
"Surface": "67 m²",
|
|
||||||
"Ascenseur": "",
|
|
||||||
"Etages": "30",
|
|
||||||
"Parking": "1",
|
|
||||||
"Salle de Séjour": ""
|
|
||||||
},
|
|
||||||
"flatisfy": {
|
|
||||||
"postal_code": "35000"
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,53 +0,0 @@
|
|||||||
{
|
|
||||||
"id": "127963747@seloger",
|
|
||||||
"url": "http://www.seloger.com/annonces/achat/appartement/rennes-35/127963747.htm?p=",
|
|
||||||
"title": "Appartement 3 pi\u00e8ces 78m\u00b2 - Rennes",
|
|
||||||
"area": 78,
|
|
||||||
"cost": 211000,
|
|
||||||
"price_per_meter": 2705.128205128205128205128205,
|
|
||||||
"currency": "\u20ac",
|
|
||||||
"utilities": "",
|
|
||||||
"date": "2018-01-17T17:54:00",
|
|
||||||
"location": " Rennes (35000)",
|
|
||||||
"station": "",
|
|
||||||
"text": "ARSENAL/REDON - CIT\u00c9 JUDICIAIRE. D'une surface de 78 m\u00b2, cet appartement de type T3 est compos\u00e9 au rez-de-chauss\u00e9e comme suit: cuisine am\u00e9nag\u00e9e, deux chambres, salon/salle \u00e0 manger, salle de bain, toilettes.. La belle et lumineuse pi\u00e8ce de vie de 33 m\u00b2 vous permettra d'envisager une disposition agr\u00e9able de votre int\u00e9rieur.. Id\u00e9alement situ\u00e9 dans un secteur recherch\u00e9. Tr\u00e8s bon \u00e9tat.. Un garage situ\u00e9 en sous-sol compl\u00e8te cet appartement.. Contacter Agence ORPI au 02.23.44.37. 47.. 211000 euros Honoraires \u00e0 la charge du vendeur.",
|
|
||||||
"phone": null,
|
|
||||||
"photos": [{
|
|
||||||
"id": "1d9ks91ml67r2zwwcytkg3l4jh4yc8ii3y4fa64u8.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/1/d/9/k/1d9ks91ml67r2zwwcytkg3l4jh4yc8ii3y4fa64u8.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "0a95gv0bukbrk77mhe0h4n14j9bx2zrkfikgh7h8g.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/0/a/9/5/0a95gv0bukbrk77mhe0h4n14j9bx2zrkfikgh7h8g.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "1hd329lc8srsdh71o3iyo2tuv8jw9jutnctvqnv9c.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/1/h/d/3/1hd329lc8srsdh71o3iyo2tuv8jw9jutnctvqnv9c.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "1lf8fyr5marcjalerkc914opcc29osb23z9c9648w.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/1/l/f/8/1lf8fyr5marcjalerkc914opcc29osb23z9c9648w.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "1yrk6jbek3h7q3f9a3g1vy0kqc2uh7z4yckznrx8g.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/width/800/visuels/1/y/r/k/1yrk6jbek3h7q3f9a3g1vy0kqc2uh7z4yckznrx8g.jpg",
|
|
||||||
"data": null
|
|
||||||
}],
|
|
||||||
"rooms": 3,
|
|
||||||
"bedrooms": 2,
|
|
||||||
"details": {
|
|
||||||
"Box": "1",
|
|
||||||
"Pi\u00e8ces": "3",
|
|
||||||
"Etage": "RDC",
|
|
||||||
"Reference": "114020E0PULC",
|
|
||||||
"Chambres": "2",
|
|
||||||
"Chauffage": "individuel",
|
|
||||||
"Toilette": "1",
|
|
||||||
"Salle de bain": "1",
|
|
||||||
"Ascenseur": "",
|
|
||||||
"Toilettes S\u00e9par\u00e9es": "",
|
|
||||||
"Surface": "78 m\u00b2",
|
|
||||||
"Salle \u00c0 Manger": "",
|
|
||||||
"Salle de s\u00e9jour": "33 m\u00b2"
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,68 +0,0 @@
|
|||||||
{
|
|
||||||
"id": "128358415@seloger",
|
|
||||||
"url": "http://www.seloger.com/annonces/achat/maison/rennes-35/128358415.htm?p=",
|
|
||||||
"title": " 60m\u00b2 - Rennes",
|
|
||||||
"area": 60,
|
|
||||||
"cost": 179888,
|
|
||||||
"price_per_meter": 2998.133333333333333333333333,
|
|
||||||
"currency": "\u20ac",
|
|
||||||
"utilities": "",
|
|
||||||
"date": "2018-01-19T08:46:00",
|
|
||||||
"location": " Rennes (35000)",
|
|
||||||
"station": "",
|
|
||||||
"text": "I@D France - Sarah LECLERC vous propose: Pour les Amoureux de la Pierre, Maison de ville enti\u00e8rement r\u00e9nov\u00e9e avec go\u00fbt et modernit\u00e9, Poutres apparentes dans les 2 chambres, Cuisine am\u00e9nag\u00e9e ouverte sur le salon-salle \u00e0 manger de 30 M 2, Salle de douche, JARDINET et TERRASSE de 95 M 2 (possibilit\u00e9 jardin japonais).. Situ\u00e9e AU COEUR DE LA VILLE, \u00e0 proximit\u00e9 des \u00c9coles, des Commerces et du march\u00e9, tout peut se faire \u00e0 pied.. Ligne de bus \u00e0 proximit\u00e9 (ligne 61).. AUX PORTES DE RENNES (5mn).. Peut se vivre comme un appartement sans les charges de copropri\u00e9t\u00e9 ! BEAUCOUP DE CHARME POUR CE BIEN RARE SUR LE MARCHE !! Honoraires d'agence \u00e0 la charge du vendeur. Information d'affichage \u00e9nerg\u00e9tique sur ce bien: DPE VI indice 0 et GES VI indice 0. La pr\u00e9sente annonce immobili\u00e8re a \u00e9t\u00e9 r\u00e9dig\u00e9e sous la responsabilit\u00e9 \u00e9ditoriale de Mme Sarah LECLERC (ID 27387), Agent Commercial mandataire en immobilier immatricul\u00e9 au Registre Sp\u00e9cial des Agents Commerciaux (RSAC) du Tribunal de Commerce de rennes sous le num\u00e9ro 521558007.",
|
|
||||||
"phone": null,
|
|
||||||
"photos": [{
|
|
||||||
"id": "0j9kfrqnixlcnezpzsgz3g3vnekr6qj8rn7jcv22g.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/height/800/visuels/0/j/9/k/0j9kfrqnixlcnezpzsgz3g3vnekr6qj8rn7jcv22g.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "0yqp4d8arum1iy1pk9f1xh1req853dnhutgdjkcoo.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/height/800/visuels/0/y/q/p/0yqp4d8arum1iy1pk9f1xh1req853dnhutgdjkcoo.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "10a86qpr9k9wurb8itfnfgzo8eetxs6th2gmiv1o8.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/height/800/visuels/1/0/a/8/10a86qpr9k9wurb8itfnfgzo8eetxs6th2gmiv1o8.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "0eybdtrwgscy2dadq05naujq5okeotl5cyfuergvs.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/height/800/visuels/0/e/y/b/0eybdtrwgscy2dadq05naujq5okeotl5cyfuergvs.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "0maihs9wfff2xl3plqtq254n44gkaxlvejyrtnbqw.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/height/800/visuels/0/m/a/i/0maihs9wfff2xl3plqtq254n44gkaxlvejyrtnbqw.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "0cjgak7htwwtsl4to31rqqmyg5a73h6vwzserq2wo.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/height/800/visuels/0/c/j/g/0cjgak7htwwtsl4to31rqqmyg5a73h6vwzserq2wo.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "102tkunk4f87ksovtm7x6u1awoz65it97nabbx9a0.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/height/800/visuels/1/0/2/t/102tkunk4f87ksovtm7x6u1awoz65it97nabbx9a0.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "1kd6jjp93vv5wv5dw8964n7t823luy8jk3m4obkfs.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/height/800/visuels/1/k/d/6/1kd6jjp93vv5wv5dw8964n7t823luy8jk3m4obkfs.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "052a19zndeojbs4px73q8ns94g1uxi0exxqyltpo8.jpg",
|
|
||||||
"url": "https://v.seloger.com/s/height/800/visuels/0/5/2/a/052a19zndeojbs4px73q8ns94g1uxi0exxqyltpo8.jpg",
|
|
||||||
"data": null
|
|
||||||
}],
|
|
||||||
"rooms": 3,
|
|
||||||
"bedrooms": 2,
|
|
||||||
"details": {
|
|
||||||
"Cuisine": "am\u00e9ricaine \u00e9quip\u00e9e",
|
|
||||||
"Pi\u00e8ces": "3",
|
|
||||||
"Etage": "1",
|
|
||||||
"Reference": "488187",
|
|
||||||
"Chambres": "2",
|
|
||||||
"Chauffage": "\u00e9lectrique radiateur",
|
|
||||||
"Terrain": "95 m\u00b2",
|
|
||||||
"Surface": "60 m\u00b2",
|
|
||||||
"Terrasse": "1",
|
|
||||||
"Ann\u00e9e de construction": "1870",
|
|
||||||
"Salle \u00c0 Manger": "",
|
|
||||||
"Salle de s\u00e9jour": "22 m\u00b2"
|
|
||||||
}
|
|
||||||
}
|
|
Before Width: | Height: | Size: 24 KiB |
@ -1,44 +0,0 @@
|
|||||||
{
|
|
||||||
"id": "13783671@explorimmo",
|
|
||||||
"url": "http://www.explorimmo.com/annonce-13783671.html",
|
|
||||||
"title": "Vente appartement 3 pi\u00e8ces 65 m2",
|
|
||||||
"area": 65,
|
|
||||||
"cost": 145275,
|
|
||||||
"price_per_meter": 2235,
|
|
||||||
"currency": "EUR",
|
|
||||||
"utilities": "H.C.",
|
|
||||||
"date": "2017-11-10T02:04:00",
|
|
||||||
"location": "225 RUE DE FOUGERES Rennes 35700",
|
|
||||||
"station": null,
|
|
||||||
"text": "Rennes en exclusivit\u00e9 rue de Foug\u00e8res - Grand Appartement 3 pi\u00e8ces avec Balcon\ndans une copropri\u00e9t\u00e9 avec ascenseur - Travaux \u00e0 pr\u00e9voir - 2 chambres - Cave et\ngarage\n\n",
|
|
||||||
"phone": null,
|
|
||||||
"photos": [{
|
|
||||||
"id": "cb10f556708c4e858c1a45ec1dfda623.jpg",
|
|
||||||
"url": "http://thbr.figarocms.net/images/AXuL6XMCphsRrTYttb7yR2W3CCg=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/cb10f556708c4e858c1a45ec1dfda623.jpg",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "e2696eacce2d487e99e88c2b945cee34.jpg",
|
|
||||||
"url": "http://thbr.figarocms.net/images/0Va3M6bf1eFkJJzPXC--QIc6WTo=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/e2696eacce2d487e99e88c2b945cee34.jpg",
|
|
||||||
"data": null
|
|
||||||
}],
|
|
||||||
"rooms": 3,
|
|
||||||
"bedrooms": 2,
|
|
||||||
"details": {
|
|
||||||
"available": true,
|
|
||||||
"heatingType": "",
|
|
||||||
"agency": "NEXITY LAMY, 6 avenue Jean Janvier, 35000, Rennes",
|
|
||||||
"bathrooms": 0,
|
|
||||||
"exposure": "Non pr\u00e9cis\u00e9",
|
|
||||||
"floor": "1",
|
|
||||||
"energy": "E",
|
|
||||||
"bedrooms": 2,
|
|
||||||
"greenhouseGasEmission": null,
|
|
||||||
"isFurnished": false,
|
|
||||||
"rooms": 3,
|
|
||||||
"fees": 0.0,
|
|
||||||
"creationDate": 1507712100000,
|
|
||||||
"agencyFees": 0.0,
|
|
||||||
"availabilityDate": null,
|
|
||||||
"guarantee": 0.0
|
|
||||||
}
|
|
||||||
}
|
|
Before Width: | Height: | Size: 40 KiB |
Before Width: | Height: | Size: 36 KiB |
Before Width: | Height: | Size: 25 KiB |
@ -1,45 +0,0 @@
|
|||||||
{
|
|
||||||
"id": "14428129@explorimmo",
|
|
||||||
"url": "http://www.explorimmo.com/annonce-14428129.html",
|
|
||||||
"title": "Vente appartement 3 pièces 67 m2",
|
|
||||||
"area": 67,
|
|
||||||
"cost": 155700,
|
|
||||||
"price_per_meter": 2323.8805970149256,
|
|
||||||
"currency": "EUR",
|
|
||||||
"utilities": "H.C.",
|
|
||||||
"date": "2017-12-05T07:40:00",
|
|
||||||
"location": "17 PLACE MARECHAL JUIN Rennes 35000",
|
|
||||||
"station": null,
|
|
||||||
"text": "Exclusivité Nexity Dans un immeuble de standing, en étage élevé avec\nascenseur, Appartement Type 3 de 67 m² exposé Sud / Ouest, un séjour avec\nbalcon et double exposition vue dégagée. Deux chambres dont une avec balcon,\nsalle de douches, WC séparé, cave et parking en sous-sol.\n\n",
|
|
||||||
"phone": null,
|
|
||||||
"photos": [
|
|
||||||
{
|
|
||||||
"id": "f9b2da6dfa184759aa0c349edb1cd037.jpg",
|
|
||||||
"url": "flatisfy/test_files/14428129@explorimmo.jpg",
|
|
||||||
"data": null
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"rooms": 3,
|
|
||||||
"bedrooms": 2,
|
|
||||||
"details": {
|
|
||||||
"available": true,
|
|
||||||
"heatingType": "",
|
|
||||||
"agency": "NEXITY LAMY, 6 avenue Jean Janvier, 35000, Rennes",
|
|
||||||
"bathrooms": 0,
|
|
||||||
"exposure": "Non précisé",
|
|
||||||
"floor": "15",
|
|
||||||
"energy": "C",
|
|
||||||
"bedrooms": 2,
|
|
||||||
"greenhouseGasEmission": null,
|
|
||||||
"isFurnished": false,
|
|
||||||
"rooms": 3,
|
|
||||||
"fees": 0,
|
|
||||||
"creationDate": 1512455998000,
|
|
||||||
"agencyFees": 0,
|
|
||||||
"availabilityDate": null,
|
|
||||||
"guarantee": 0
|
|
||||||
},
|
|
||||||
"flatisfy": {
|
|
||||||
"postal_code": "35000"
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,72 +0,0 @@
|
|||||||
{
|
|
||||||
"id": "14818297@explorimmo",
|
|
||||||
"url": "http://www.explorimmo.com/annonce-14818297.html",
|
|
||||||
"title": "Vente maison 3 pi\u00e8ces 60 m2",
|
|
||||||
"area": 60,
|
|
||||||
"cost": 179888,
|
|
||||||
"price_per_meter": 2998.133333333333333333333333,
|
|
||||||
"currency": "EUR",
|
|
||||||
"utilities": "H.C.",
|
|
||||||
"date": "2018-01-13T04:37:00",
|
|
||||||
"location": " Rennes 35000",
|
|
||||||
"station": null,
|
|
||||||
"text": "I@D France - Sarah LECLERC (06 01 43 20 02) vous propose : Pour les Amoureux\nde la Pierre, Maison de ville enti\u00e8rement r\u00e9nov\u00e9e avec go\u00fbt et modernit\u00e9,\nPoutres apparentes dans les 2 chambres, Cuisine am\u00e9nag\u00e9e ouverte sur le salon-\nsalle \u00e0 manger de 30 M 2 , Salle de douche, JARDINET et TERRASSE de 95 M 2\n(possibilit\u00e9 jardin japonais)... situ\u00e9e AU COEUR DE LA VILLE, \u00e0 proximit\u00e9 des\nEcoles, des Commerces et du march\u00e9, tout peut se faire \u00e0 pied... Ligne de bus\n\u00e0 proximit\u00e9 (ligne 61) ...AUX PORTES DE RENNES (5mn)... Peut se vivre comme un\nappartement sans les charges de copropri\u00e9t\u00e9 !BEAUCOUP DE CHARME POUR CE BIEN\nRARE SUR LE MARCHE !!!Honoraires d'agence \u00e0 la charge du vendeur.Information\nd'affichage \u00e9nerg\u00e9tique sur ce bien : DPE VI indice 0 et GES VI indice 0. La\npr\u00e9sente annonce immobili\u00e8re a \u00e9t\u00e9 r\u00e9dig\u00e9e sous la responsabilit\u00e9 \u00e9ditoriale\nde Mme Sarah LECLERC (ID 27387), Agent Commercial mandataire en immobilier\nimmatricul\u00e9 au Registre Sp\u00e9cial des Agents Commerciaux (RSAC) du Tribunal de\nCommerce de rennes sous le num\u00e9ro 521558007. Retrouvez tous nos biens sur\nnotre site internet. www.iadfrance.com\n\n",
|
|
||||||
"phone": null,
|
|
||||||
"photos": [{
|
|
||||||
"id": "http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-1.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"url": "http://thbr.figarocms.net/external/ydkyhrlKomMs9N1Jjums21g1Yac=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-1.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-2.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"url": "http://thbr.figarocms.net/external/tSmULhY2QwgR-ssclatZ1p0fxIY=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-2.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-3.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"url": "http://thbr.figarocms.net/external/2KG56A1y_EvvCCpzb-ButCIB9Gc=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-3.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-4.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"url": "http://thbr.figarocms.net/external/aZC1B1yyb70R_YUw3yuMDep9Jjs=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-4.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-5.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"url": "http://thbr.figarocms.net/external/eTTgRXM9s61HPshBL8vaCKzCoHE=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-5.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-6.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"url": "http://thbr.figarocms.net/external/0PLHLenqeoN12WySQzcHfp4J81g=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-6.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-7.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"url": "http://thbr.figarocms.net/external/isxp6GKSDn-ZTCstKe8All5i-uk=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-7.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-8.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"url": "http://thbr.figarocms.net/external/M6-Tv19WAG4EnwvTzHIzylqV66I=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-8.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"data": null
|
|
||||||
}, {
|
|
||||||
"id": "http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-9.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"url": "http://thbr.figarocms.net/external/vYv6ie0s_lXwighWdgrNJVHDROI=/560x420/filters:fill(f6f6f6):quality(80):strip_icc()/http%3A%2F%2Fpasserelle.static.iadfrance.com%2Fphotos%2Frealestate%2F2018-01%2Fproduct-477682-9.jpg%3Fbridge%3Dexplorimmo%26ts%3D201801200001",
|
|
||||||
"data": null
|
|
||||||
}],
|
|
||||||
"rooms": 3,
|
|
||||||
"bedrooms": 2,
|
|
||||||
"details": {
|
|
||||||
"available": true,
|
|
||||||
"heatingType": "chauffage_electrique",
|
|
||||||
"agency": "I@D FRANCE, Immeuble carr\u00e9 Haussmann IIIall\u00e9e de la ferme de Varatre, 77127, Lieusaint",
|
|
||||||
"bathrooms": 0,
|
|
||||||
"exposure": "Non pr\u00e9cis\u00e9",
|
|
||||||
"floor": "",
|
|
||||||
"energy": "",
|
|
||||||
"bedrooms": 2,
|
|
||||||
"greenhouseGasEmission": null,
|
|
||||||
"isFurnished": false,
|
|
||||||
"rooms": 3,
|
|
||||||
"fees": 0.0,
|
|
||||||
"creationDate": 1515718604000,
|
|
||||||
"agencyFees": 0.0,
|
|
||||||
"availabilityDate": null,
|
|
||||||
"guarantee": 0.0
|
|
||||||
}
|
|
||||||
}
|
|
Before Width: | Height: | Size: 34 KiB |
Before Width: | Height: | Size: 105 KiB |
@ -1,457 +0,0 @@
|
|||||||
# coding: utf-8
|
|
||||||
"""
|
|
||||||
This module contains unit testing functions.
|
|
||||||
"""
|
|
||||||
import copy
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import random
|
|
||||||
import sys
|
|
||||||
import unittest
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from io import BytesIO
|
|
||||||
|
|
||||||
import PIL
|
|
||||||
import requests
|
|
||||||
import requests_mock
|
|
||||||
|
|
||||||
from flatisfy import tools
|
|
||||||
from flatisfy.filters import duplicates
|
|
||||||
from flatisfy.filters.cache import ImageCache
|
|
||||||
from flatisfy.constants import BACKENDS_BY_PRECEDENCE
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
|
||||||
TESTS_DATA_DIR = os.path.dirname(os.path.realpath(__file__)) + "/test_files/"
|
|
||||||
|
|
||||||
|
|
||||||
class LocalImageCache(ImageCache):
|
|
||||||
"""
|
|
||||||
A local cache for images, stored in memory.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def on_miss(path):
|
|
||||||
"""
|
|
||||||
Helper to actually retrieve photos if not already cached.
|
|
||||||
"""
|
|
||||||
url = "mock://flatisfy" + path
|
|
||||||
with requests_mock.Mocker() as mock:
|
|
||||||
with open(path, "rb") as fh:
|
|
||||||
mock.get(url, content=fh.read())
|
|
||||||
return PIL.Image.open(BytesIO(requests.get(url).content))
|
|
||||||
|
|
||||||
|
|
||||||
class TestTexts(unittest.TestCase):
|
|
||||||
"""
|
|
||||||
Checks string normalizations.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def test_roman_numbers(self):
|
|
||||||
"""
|
|
||||||
Checks roman numbers replacement.
|
|
||||||
"""
|
|
||||||
self.assertEqual("XIV", tools.convert_arabic_to_roman("14"))
|
|
||||||
|
|
||||||
self.assertEqual("XXXIX", tools.convert_arabic_to_roman("39"))
|
|
||||||
|
|
||||||
self.assertEqual("40", tools.convert_arabic_to_roman("40"))
|
|
||||||
|
|
||||||
self.assertEqual("1987", tools.convert_arabic_to_roman("1987"))
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
"Dans le XVe arrondissement",
|
|
||||||
tools.convert_arabic_to_roman_in_text("Dans le 15e arrondissement"),
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual("XXeme arr.", tools.convert_arabic_to_roman_in_text("20eme arr."))
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
"A AIX EN PROVENCE",
|
|
||||||
tools.convert_arabic_to_roman_in_text("A AIX EN PROVENCE"),
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
"Montigny Le Bretonneux",
|
|
||||||
tools.convert_arabic_to_roman_in_text("Montigny Le Bretonneux"),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_roman_numbers_in_text(self):
|
|
||||||
"""
|
|
||||||
Checks conversion of roman numbers to arabic ones in string
|
|
||||||
normalization.
|
|
||||||
"""
|
|
||||||
self.assertEqual(
|
|
||||||
"dans le XVe arrondissement",
|
|
||||||
tools.normalize_string("Dans le 15e arrondissement"),
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual("paris XVe, 75005", tools.normalize_string("Paris 15e, 75005"))
|
|
||||||
|
|
||||||
self.assertEqual("paris xve, 75005", tools.normalize_string("Paris XVe, 75005"))
|
|
||||||
|
|
||||||
def test_multiple_whitespaces(self):
|
|
||||||
"""
|
|
||||||
Checks whitespaces are collapsed.
|
|
||||||
"""
|
|
||||||
self.assertEqual("avec ascenseur", tools.normalize_string("avec ascenseur"))
|
|
||||||
|
|
||||||
def test_whitespace_trim(self):
|
|
||||||
"""
|
|
||||||
Checks that trailing and beginning whitespaces are trimmed.
|
|
||||||
"""
|
|
||||||
self.assertEqual("rennes 35000", tools.normalize_string(" Rennes 35000 "))
|
|
||||||
|
|
||||||
def test_accents(self):
|
|
||||||
"""
|
|
||||||
Checks accents are replaced.
|
|
||||||
"""
|
|
||||||
self.assertEqual("eeeaui", tools.normalize_string(u"éèêàüï"))
|
|
||||||
|
|
||||||
|
|
||||||
class TestPhoneNumbers(unittest.TestCase):
|
|
||||||
"""
|
|
||||||
Checks phone numbers normalizations.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def test_prefix(self):
|
|
||||||
"""
|
|
||||||
Checks phone numbers with international prefixes.
|
|
||||||
"""
|
|
||||||
self.assertEqual("0605040302", duplicates.homogeneize_phone_number("+33605040302"))
|
|
||||||
|
|
||||||
def test_dots_separators(self):
|
|
||||||
"""
|
|
||||||
Checks phone numbers with dots.
|
|
||||||
"""
|
|
||||||
self.assertEqual("0605040302", duplicates.homogeneize_phone_number("06.05.04.03.02"))
|
|
||||||
|
|
||||||
def test_spaces_separators(self):
|
|
||||||
"""
|
|
||||||
Checks phone numbers with spaces.
|
|
||||||
"""
|
|
||||||
self.assertEqual("0605040302", duplicates.homogeneize_phone_number("06 05 04 03 02"))
|
|
||||||
|
|
||||||
|
|
||||||
class TestPhotos(unittest.TestCase):
|
|
||||||
HASH_THRESHOLD = 10 # pylint: disable=invalid-name
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
self.IMAGE_CACHE = LocalImageCache( # pylint: disable=invalid-name
|
|
||||||
storage_dir=tempfile.mkdtemp(prefix="flatisfy-")
|
|
||||||
)
|
|
||||||
super(TestPhotos, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
def test_same_photo_twice(self):
|
|
||||||
"""
|
|
||||||
Compares a photo against itself.
|
|
||||||
"""
|
|
||||||
photo = {"url": TESTS_DATA_DIR + "127028739@seloger.jpg"}
|
|
||||||
|
|
||||||
self.assertTrue(duplicates.compare_photos(photo, photo, self.IMAGE_CACHE, self.HASH_THRESHOLD))
|
|
||||||
|
|
||||||
def test_different_photos(self):
|
|
||||||
"""
|
|
||||||
Compares two different photos.
|
|
||||||
"""
|
|
||||||
self.assertFalse(
|
|
||||||
duplicates.compare_photos(
|
|
||||||
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
|
|
||||||
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
|
|
||||||
self.IMAGE_CACHE,
|
|
||||||
self.HASH_THRESHOLD,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertFalse(
|
|
||||||
duplicates.compare_photos(
|
|
||||||
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
|
|
||||||
{"url": TESTS_DATA_DIR + "127028739-3@seloger.jpg"},
|
|
||||||
self.IMAGE_CACHE,
|
|
||||||
self.HASH_THRESHOLD,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_matching_photos(self):
|
|
||||||
"""
|
|
||||||
Compares two matching photos with different size and source.
|
|
||||||
"""
|
|
||||||
self.assertTrue(
|
|
||||||
duplicates.compare_photos(
|
|
||||||
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
|
|
||||||
{"url": TESTS_DATA_DIR + "14428129@explorimmo.jpg"},
|
|
||||||
self.IMAGE_CACHE,
|
|
||||||
self.HASH_THRESHOLD,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertTrue(
|
|
||||||
duplicates.compare_photos(
|
|
||||||
{"url": TESTS_DATA_DIR + "127028739-2@seloger.jpg"},
|
|
||||||
{"url": TESTS_DATA_DIR + "14428129-2@explorimmo.jpg"},
|
|
||||||
self.IMAGE_CACHE,
|
|
||||||
self.HASH_THRESHOLD,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertTrue(
|
|
||||||
duplicates.compare_photos(
|
|
||||||
{"url": TESTS_DATA_DIR + "127028739-3@seloger.jpg"},
|
|
||||||
{"url": TESTS_DATA_DIR + "14428129-3@explorimmo.jpg"},
|
|
||||||
self.IMAGE_CACHE,
|
|
||||||
self.HASH_THRESHOLD,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertTrue(
|
|
||||||
duplicates.compare_photos(
|
|
||||||
{"url": TESTS_DATA_DIR + "127028739@seloger.jpg"},
|
|
||||||
{"url": TESTS_DATA_DIR + "127028739-watermark@seloger.jpg"},
|
|
||||||
self.IMAGE_CACHE,
|
|
||||||
self.HASH_THRESHOLD,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_matching_cropped_photos(self):
|
|
||||||
"""
|
|
||||||
Compares two matching photos with one being cropped.
|
|
||||||
"""
|
|
||||||
# Fixme: the image hash treshold should be 10 ideally
|
|
||||||
self.assertTrue(
|
|
||||||
duplicates.compare_photos(
|
|
||||||
{"url": TESTS_DATA_DIR + "vertical.jpg"},
|
|
||||||
{"url": TESTS_DATA_DIR + "vertical-cropped.jpg"},
|
|
||||||
self.IMAGE_CACHE,
|
|
||||||
20,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Fixme: the image hash treshold should be 10 ideally
|
|
||||||
self.assertTrue(
|
|
||||||
duplicates.compare_photos(
|
|
||||||
{"url": TESTS_DATA_DIR + "13783671@explorimmo.jpg"},
|
|
||||||
{"url": TESTS_DATA_DIR + "124910113@seloger.jpg"},
|
|
||||||
self.IMAGE_CACHE,
|
|
||||||
20,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestImageCache(unittest.TestCase):
|
|
||||||
"""
|
|
||||||
Checks image cache is working as expected.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
self.IMAGE_CACHE = ImageCache(storage_dir=tempfile.mkdtemp(prefix="flatisfy-")) # pylint: disable=invalid-name
|
|
||||||
super(TestImageCache, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
def test_invalid_url(self):
|
|
||||||
"""
|
|
||||||
Check that it returns nothing on an invalid URL.
|
|
||||||
"""
|
|
||||||
# See https://framagit.org/phyks/Flatisfy/issues/116.
|
|
||||||
self.assertIsNone(self.IMAGE_CACHE.get("https://httpbin.org/status/404"))
|
|
||||||
self.assertIsNone(self.IMAGE_CACHE.get("https://httpbin.org/status/500"))
|
|
||||||
|
|
||||||
def test_invalid_data(self):
|
|
||||||
"""
|
|
||||||
Check that it returns nothing on an invalid data.
|
|
||||||
"""
|
|
||||||
# See https://framagit.org/phyks/Flatisfy/issues/116.
|
|
||||||
self.assertIsNone(self.IMAGE_CACHE.get("https://httpbin.org/"))
|
|
||||||
|
|
||||||
|
|
||||||
class TestDuplicates(unittest.TestCase):
|
|
||||||
"""
|
|
||||||
Checks duplicates detection.
|
|
||||||
"""
|
|
||||||
|
|
||||||
DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS = 8 # pylint: disable=invalid-name
|
|
||||||
DUPLICATES_MIN_SCORE_WITH_PHOTOS = 15 # pylint: disable=invalid-name
|
|
||||||
HASH_THRESHOLD = 10 # pylint: disable=invalid-name
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
self.IMAGE_CACHE = LocalImageCache( # pylint: disable=invalid-name
|
|
||||||
storage_dir=tempfile.mkdtemp(prefix="flatisfy-")
|
|
||||||
)
|
|
||||||
super(TestDuplicates, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def generate_fake_flat():
|
|
||||||
"""
|
|
||||||
Generates a fake flat post.
|
|
||||||
"""
|
|
||||||
backend = BACKENDS_BY_PRECEDENCE[random.randint(0, len(BACKENDS_BY_PRECEDENCE) - 1)]
|
|
||||||
return {
|
|
||||||
"id": str(random.randint(100000, 199999)) + "@" + backend,
|
|
||||||
"phone": "0607080910",
|
|
||||||
"rooms": random.randint(1, 4),
|
|
||||||
"utilities": "",
|
|
||||||
"area": random.randint(200, 1500) / 10,
|
|
||||||
"cost": random.randint(100000, 300000),
|
|
||||||
"bedrooms": random.randint(1, 4),
|
|
||||||
}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def load_files(file1, file2):
|
|
||||||
"""
|
|
||||||
Load two files
|
|
||||||
|
|
||||||
:return: A dict with two flats
|
|
||||||
"""
|
|
||||||
with open(TESTS_DATA_DIR + file1 + ".json", "r") as flat_file:
|
|
||||||
flat1 = json.loads(flat_file.read())
|
|
||||||
|
|
||||||
with open(TESTS_DATA_DIR + file2 + ".json", "r") as flat_file:
|
|
||||||
flat2 = json.loads(flat_file.read())
|
|
||||||
|
|
||||||
return [flat1, flat2]
|
|
||||||
|
|
||||||
def test_duplicates(self):
|
|
||||||
"""
|
|
||||||
Two identical flats should be detected as duplicates.
|
|
||||||
"""
|
|
||||||
flat1 = self.generate_fake_flat()
|
|
||||||
flat2 = copy.deepcopy(flat1)
|
|
||||||
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
|
||||||
self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
|
||||||
|
|
||||||
def test_different_prices(self):
|
|
||||||
"""
|
|
||||||
Two flats with different prices should not be detected as duplicates.
|
|
||||||
"""
|
|
||||||
flat1 = self.generate_fake_flat()
|
|
||||||
flat2 = copy.deepcopy(flat1)
|
|
||||||
flat2["cost"] += 1000
|
|
||||||
|
|
||||||
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
|
||||||
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
|
||||||
|
|
||||||
def test_different_rooms(self):
|
|
||||||
"""
|
|
||||||
Two flats with different rooms quantity should not be detected as
|
|
||||||
duplicates.
|
|
||||||
"""
|
|
||||||
flat1 = self.generate_fake_flat()
|
|
||||||
flat2 = copy.deepcopy(flat1)
|
|
||||||
flat2["rooms"] += 1
|
|
||||||
|
|
||||||
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
|
||||||
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
|
||||||
|
|
||||||
def test_different_areas(self):
|
|
||||||
"""
|
|
||||||
Two flats with different areas should not be detected as duplicates.
|
|
||||||
"""
|
|
||||||
flat1 = self.generate_fake_flat()
|
|
||||||
flat2 = copy.deepcopy(flat1)
|
|
||||||
flat2["area"] += 10
|
|
||||||
|
|
||||||
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
|
||||||
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
|
||||||
|
|
||||||
def test_different_areas_decimals(self):
|
|
||||||
"""
|
|
||||||
Two flats which areas integers are equal but decimals are present and
|
|
||||||
different should not be detected as duplicates.
|
|
||||||
"""
|
|
||||||
flat1 = self.generate_fake_flat()
|
|
||||||
flat2 = copy.deepcopy(flat1)
|
|
||||||
flat1["area"] = 50.65
|
|
||||||
flat2["area"] = 50.37
|
|
||||||
|
|
||||||
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
|
||||||
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
|
||||||
|
|
||||||
def test_different_phones(self):
|
|
||||||
"""
|
|
||||||
Two flats with different phone numbers should not be detected as
|
|
||||||
duplicates.
|
|
||||||
"""
|
|
||||||
flat1 = self.generate_fake_flat()
|
|
||||||
flat2 = copy.deepcopy(flat1)
|
|
||||||
flat2["phone"] = "0708091011"
|
|
||||||
|
|
||||||
score = duplicates.get_duplicate_score(flat1, flat2, self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
|
||||||
self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITHOUT_PHOTOS)
|
|
||||||
|
|
||||||
def test_real_duplicates(self):
|
|
||||||
"""
|
|
||||||
Two flats with same price, area and rooms quantity should be detected
|
|
||||||
as duplicates.
|
|
||||||
"""
|
|
||||||
flats = self.load_files("127028739@seloger", "14428129@explorimmo")
|
|
||||||
|
|
||||||
score = duplicates.get_duplicate_score(flats[0], flats[1], self.IMAGE_CACHE, self.HASH_THRESHOLD)
|
|
||||||
self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITH_PHOTOS)
|
|
||||||
|
|
||||||
# TODO: fixme, find new testing examples
|
|
||||||
# flats = self.load_files(
|
|
||||||
# "128358415@seloger",
|
|
||||||
# "14818297@explorimmo"
|
|
||||||
# )
|
|
||||||
|
|
||||||
# score = duplicates.get_duplicate_score(
|
|
||||||
# flats[0], flats[1],
|
|
||||||
# self.IMAGE_CACHE, 20
|
|
||||||
# )
|
|
||||||
# self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITH_PHOTOS)
|
|
||||||
|
|
||||||
# # Different number of photos, and some are cropped
|
|
||||||
# flats = self.load_files(
|
|
||||||
# "124910113@seloger",
|
|
||||||
# "13783671@explorimmo"
|
|
||||||
# )
|
|
||||||
|
|
||||||
# score = duplicates.get_duplicate_score(
|
|
||||||
# flats[0], flats[1],
|
|
||||||
# self.IMAGE_CACHE, 20
|
|
||||||
# )
|
|
||||||
# self.assertGreaterEqual(score, self.DUPLICATES_MIN_SCORE_WITH_PHOTOS)
|
|
||||||
|
|
||||||
# # Same flat, different agencies, texts and photos
|
|
||||||
# flats = self.load_files(
|
|
||||||
# "122509451@seloger",
|
|
||||||
# "127963747@seloger"
|
|
||||||
# )
|
|
||||||
|
|
||||||
# score = duplicates.get_duplicate_score(
|
|
||||||
# flats[0], flats[1],
|
|
||||||
# self.IMAGE_CACHE, self.HASH_THRESHOLD
|
|
||||||
# )
|
|
||||||
# # Fix me : should be TestDuplicates.DUPLICATES_MIN_SCORE_WITH_PHOTOS
|
|
||||||
# self.assertGreaterEqual(score, 4)
|
|
||||||
|
|
||||||
# # Really similar flats, but different
|
|
||||||
# flats = self.load_files(
|
|
||||||
# "123312807@seloger",
|
|
||||||
# "123314207@seloger"
|
|
||||||
# )
|
|
||||||
|
|
||||||
# score = duplicates.get_duplicate_score(
|
|
||||||
# flats[0], flats[1],
|
|
||||||
# self.IMAGE_CACHE, self.HASH_THRESHOLD
|
|
||||||
# )
|
|
||||||
# self.assertLess(score, self.DUPLICATES_MIN_SCORE_WITH_PHOTOS)
|
|
||||||
|
|
||||||
|
|
||||||
def run():
|
|
||||||
"""
|
|
||||||
Run all the tests
|
|
||||||
"""
|
|
||||||
LOGGER.info("Running tests…")
|
|
||||||
try:
|
|
||||||
for testsuite in [
|
|
||||||
TestTexts,
|
|
||||||
TestPhoneNumbers,
|
|
||||||
TestImageCache,
|
|
||||||
TestDuplicates,
|
|
||||||
TestPhotos,
|
|
||||||
]:
|
|
||||||
suite = unittest.TestLoader().loadTestsFromTestCase(testsuite)
|
|
||||||
result = unittest.TextTestRunner(verbosity=2).run(suite)
|
|
||||||
assert result.wasSuccessful()
|
|
||||||
except AssertionError:
|
|
||||||
sys.exit(1)
|
|
@ -3,7 +3,9 @@
|
|||||||
This module contains basic utility functions, such as pretty printing of JSON
|
This module contains basic utility functions, such as pretty printing of JSON
|
||||||
output, checking that a value is within a given interval etc.
|
output, checking that a value is within a given interval etc.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
from __future__ import (
|
||||||
|
absolute_import, division, print_function, unicode_literals
|
||||||
|
)
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import itertools
|
import itertools
|
||||||
@ -11,15 +13,10 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import math
|
import math
|
||||||
import re
|
import re
|
||||||
import time
|
|
||||||
|
|
||||||
import imagehash
|
|
||||||
import mapbox
|
|
||||||
import requests
|
import requests
|
||||||
import unidecode
|
import unidecode
|
||||||
|
|
||||||
from flatisfy.constants import TimeToModes
|
|
||||||
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -27,115 +24,14 @@ LOGGER = logging.getLogger(__name__)
|
|||||||
NAVITIA_ENDPOINT = "https://api.navitia.io/v1/coverage/fr-idf/journeys"
|
NAVITIA_ENDPOINT = "https://api.navitia.io/v1/coverage/fr-idf/journeys"
|
||||||
|
|
||||||
|
|
||||||
def next_weekday(d, weekday):
|
|
||||||
"""
|
|
||||||
Find datetime object for next given weekday.
|
|
||||||
|
|
||||||
From
|
|
||||||
https://stackoverflow.com/questions/6558535/find-the-date-for-the-first-monday-after-a-given-a-date.
|
|
||||||
|
|
||||||
:param d: Datetime to search from.
|
|
||||||
:param weekday: Weekday (0 for Monday, etc)
|
|
||||||
:returns: The datetime object for the next given weekday.
|
|
||||||
"""
|
|
||||||
days_ahead = weekday - d.weekday()
|
|
||||||
if days_ahead <= 0: # Target day already happened this week
|
|
||||||
days_ahead += 7
|
|
||||||
return d + datetime.timedelta(days_ahead)
|
|
||||||
|
|
||||||
|
|
||||||
def convert_arabic_to_roman(arabic):
|
|
||||||
"""
|
|
||||||
Convert an arabic literal to a roman one. Limits to 39, which is a rough
|
|
||||||
estimate for a maximum for using roman notations in daily life.
|
|
||||||
|
|
||||||
..note::
|
|
||||||
Based on https://gist.github.com/riverrun/ac91218bb1678b857c12.
|
|
||||||
|
|
||||||
:param arabic: An arabic number, as string.
|
|
||||||
:returns: The corresponding roman one, as string.
|
|
||||||
"""
|
|
||||||
if int(arabic) > 39:
|
|
||||||
return arabic
|
|
||||||
|
|
||||||
to_roman = {
|
|
||||||
1: "I",
|
|
||||||
2: "II",
|
|
||||||
3: "III",
|
|
||||||
4: "IV",
|
|
||||||
5: "V",
|
|
||||||
6: "VI",
|
|
||||||
7: "VII",
|
|
||||||
8: "VIII",
|
|
||||||
9: "IX",
|
|
||||||
10: "X",
|
|
||||||
20: "XX",
|
|
||||||
30: "XXX",
|
|
||||||
}
|
|
||||||
roman_chars_list = []
|
|
||||||
count = 1
|
|
||||||
for digit in arabic[::-1]:
|
|
||||||
digit = int(digit)
|
|
||||||
if digit != 0:
|
|
||||||
roman_chars_list.append(to_roman[digit * count])
|
|
||||||
count *= 10
|
|
||||||
return "".join(roman_chars_list[::-1])
|
|
||||||
|
|
||||||
|
|
||||||
def convert_arabic_to_roman_in_text(text):
|
|
||||||
"""
|
|
||||||
Convert roman literals to arabic one in a text.
|
|
||||||
|
|
||||||
:param text: Some text to convert roman literals from.
|
|
||||||
:returns: The corresponding text with roman literals converted to
|
|
||||||
arabic.
|
|
||||||
"""
|
|
||||||
return re.sub(r"(\d+)", lambda matchobj: convert_arabic_to_roman(matchobj.group(0)), text)
|
|
||||||
|
|
||||||
|
|
||||||
def hash_dict(func):
|
|
||||||
"""
|
|
||||||
Decorator to use on functions accepting dict parameters, to transform them
|
|
||||||
into immutable dicts and be able to use lru_cache.
|
|
||||||
|
|
||||||
From https://stackoverflow.com/a/44776960.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class HDict(dict):
|
|
||||||
"""
|
|
||||||
Transform mutable dictionnary into immutable. Useful to be compatible
|
|
||||||
with lru_cache
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return hash(json.dumps(self))
|
|
||||||
|
|
||||||
def wrapped(*args, **kwargs):
|
|
||||||
"""
|
|
||||||
The wrapped function
|
|
||||||
"""
|
|
||||||
args = tuple([HDict(arg) if isinstance(arg, dict) else arg for arg in args])
|
|
||||||
kwargs = {k: HDict(v) if isinstance(v, dict) else v for k, v in kwargs.items()}
|
|
||||||
return func(*args, **kwargs)
|
|
||||||
|
|
||||||
return wrapped
|
|
||||||
|
|
||||||
|
|
||||||
class DateAwareJSONEncoder(json.JSONEncoder):
|
class DateAwareJSONEncoder(json.JSONEncoder):
|
||||||
"""
|
"""
|
||||||
Extend the default JSON encoder to serialize datetimes to iso strings.
|
Extend the default JSON encoder to serialize datetimes to iso strings.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def default(self, o): # pylint: disable=locally-disabled,E0202
|
def default(self, o): # pylint: disable=locally-disabled,E0202
|
||||||
if isinstance(o, (datetime.date, datetime.datetime)):
|
if isinstance(o, (datetime.date, datetime.datetime)):
|
||||||
return o.isoformat()
|
return o.isoformat()
|
||||||
try:
|
|
||||||
return json.JSONEncoder.default(self, o)
|
return json.JSONEncoder.default(self, o)
|
||||||
except TypeError:
|
|
||||||
# Discard image hashes
|
|
||||||
if isinstance(o, imagehash.ImageHash):
|
|
||||||
return None
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def pretty_json(data):
|
def pretty_json(data):
|
||||||
@ -153,7 +49,9 @@ def pretty_json(data):
|
|||||||
"toto": "ok"
|
"toto": "ok"
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
return json.dumps(data, cls=DateAwareJSONEncoder, indent=4, separators=(",", ": "), sort_keys=True)
|
return json.dumps(data, cls=DateAwareJSONEncoder,
|
||||||
|
indent=4, separators=(',', ': '),
|
||||||
|
sort_keys=True)
|
||||||
|
|
||||||
|
|
||||||
def batch(iterable, size):
|
def batch(iterable, size):
|
||||||
@ -167,10 +65,7 @@ def batch(iterable, size):
|
|||||||
sourceiter = iter(iterable)
|
sourceiter = iter(iterable)
|
||||||
while True:
|
while True:
|
||||||
batchiter = itertools.islice(sourceiter, size)
|
batchiter = itertools.islice(sourceiter, size)
|
||||||
try:
|
yield itertools.chain([batchiter.next()], batchiter)
|
||||||
yield itertools.chain([next(batchiter)], batchiter)
|
|
||||||
except StopIteration:
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
def is_within_interval(value, min_value=None, max_value=None):
|
def is_within_interval(value, min_value=None, max_value=None):
|
||||||
@ -185,11 +80,9 @@ def is_within_interval(value, min_value=None, max_value=None):
|
|||||||
:return: ``True`` if the value is ``None``. ``True`` or ``False`` whether
|
:return: ``True`` if the value is ``None``. ``True`` or ``False`` whether
|
||||||
the value is within the given interval or not.
|
the value is within the given interval or not.
|
||||||
|
|
||||||
.. note::
|
.. note:: A value is always within a ``None`` bound.
|
||||||
|
|
||||||
A value is always within a ``None`` bound.
|
:Example:
|
||||||
|
|
||||||
Example::
|
|
||||||
|
|
||||||
>>> is_within_interval(None)
|
>>> is_within_interval(None)
|
||||||
True
|
True
|
||||||
@ -216,24 +109,16 @@ def is_within_interval(value, min_value=None, max_value=None):
|
|||||||
return all(checks)
|
return all(checks)
|
||||||
|
|
||||||
|
|
||||||
def normalize_string(string, lowercase=True, convert_arabic_numerals=True):
|
def normalize_string(string):
|
||||||
"""
|
"""
|
||||||
Normalize the given string for matching.
|
Normalize the given string for matching.
|
||||||
|
|
||||||
Example::
|
.. todo :: Convert romanian numerals to decimal
|
||||||
|
|
||||||
|
:Example:
|
||||||
|
|
||||||
>>> normalize_string("tétéà 14ème-XIV, foobar")
|
>>> normalize_string("tétéà 14ème-XIV, foobar")
|
||||||
'tetea XIVeme xiv, foobar'
|
|
||||||
|
|
||||||
>>> normalize_string("tétéà 14ème-XIV, foobar", False)
|
|
||||||
'tetea 14eme xiv, foobar'
|
'tetea 14eme xiv, foobar'
|
||||||
|
|
||||||
:param string: The string to normalize.
|
|
||||||
:param lowercase: Whether to convert string to lowercase or not. Defaults
|
|
||||||
to ``True``.
|
|
||||||
:param convert_arabic_numerals: Whether to convert arabic numerals to roman
|
|
||||||
ones. Defaults to ``True``.
|
|
||||||
:return: The normalized string.
|
|
||||||
"""
|
"""
|
||||||
# ASCIIfy the string
|
# ASCIIfy the string
|
||||||
string = unidecode.unidecode(string)
|
string = unidecode.unidecode(string)
|
||||||
@ -243,19 +128,11 @@ def normalize_string(string, lowercase=True, convert_arabic_numerals=True):
|
|||||||
string = re.sub(r"[^a-zA-Z0-9,;:]", " ", string)
|
string = re.sub(r"[^a-zA-Z0-9,;:]", " ", string)
|
||||||
|
|
||||||
# Convert to lowercase
|
# Convert to lowercase
|
||||||
if lowercase:
|
|
||||||
string = string.lower()
|
string = string.lower()
|
||||||
|
|
||||||
# Convert arabic numbers to roman numbers
|
|
||||||
if convert_arabic_numerals:
|
|
||||||
string = convert_arabic_to_roman_in_text(string)
|
|
||||||
|
|
||||||
# Collapse multiple spaces, replace tabulations and newlines by space
|
# Collapse multiple spaces, replace tabulations and newlines by space
|
||||||
string = re.sub(r"\s+", " ", string)
|
string = re.sub(r"\s+", " ", string)
|
||||||
|
|
||||||
# Trim whitespaces
|
|
||||||
string = string.strip()
|
|
||||||
|
|
||||||
return string
|
return string
|
||||||
|
|
||||||
|
|
||||||
@ -292,7 +169,10 @@ def distance(gps1, gps2):
|
|||||||
long2 = math.radians(gps2[1])
|
long2 = math.radians(gps2[1])
|
||||||
|
|
||||||
# pylint: disable=locally-disabled,invalid-name
|
# pylint: disable=locally-disabled,invalid-name
|
||||||
a = math.sin((lat2 - lat1) / 2.0) ** 2 + math.cos(lat1) * math.cos(lat2) * math.sin((long2 - long1) / 2.0) ** 2
|
a = (
|
||||||
|
math.sin((lat2 - lat1) / 2.0)**2 +
|
||||||
|
math.cos(lat1) * math.cos(lat2) * math.sin((long2 - long1) / 2.0)**2
|
||||||
|
)
|
||||||
c = 2.0 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
|
c = 2.0 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
|
||||||
earth_radius = 6371000
|
earth_radius = 6371000
|
||||||
|
|
||||||
@ -347,7 +227,7 @@ def merge_dicts(*args):
|
|||||||
return merge_dicts(merged_flat, *args[2:])
|
return merge_dicts(merged_flat, *args[2:])
|
||||||
|
|
||||||
|
|
||||||
def get_travel_time_between(latlng_from, latlng_to, mode, config):
|
def get_travel_time_between(latlng_from, latlng_to, config):
|
||||||
"""
|
"""
|
||||||
Query the Navitia API to get the travel time between two points identified
|
Query the Navitia API to get the travel time between two points identified
|
||||||
by their latitude and longitude.
|
by their latitude and longitude.
|
||||||
@ -355,139 +235,68 @@ def get_travel_time_between(latlng_from, latlng_to, mode, config):
|
|||||||
:param latlng_from: A tuple of (latitude, longitude) for the starting
|
:param latlng_from: A tuple of (latitude, longitude) for the starting
|
||||||
point.
|
point.
|
||||||
:param latlng_to: A tuple of (latitude, longitude) for the destination.
|
:param latlng_to: A tuple of (latitude, longitude) for the destination.
|
||||||
:param mode: A TimeToMode enum value for the mode of transportation to use.
|
|
||||||
:return: A dict of the travel time in seconds and sections of the journey
|
:return: A dict of the travel time in seconds and sections of the journey
|
||||||
with GeoJSON paths. Returns ``None`` if it could not fetch it.
|
with GeoJSON paths. Returns ``None`` if it could not fetch it.
|
||||||
|
|
||||||
.. note ::
|
.. note :: Uses the Navitia API. Requires a ``navitia_api_key`` field to be
|
||||||
|
|
||||||
Uses the Navitia API. Requires a ``navitia_api_key`` field to be
|
|
||||||
filled-in in the ``config``.
|
filled-in in the ``config``.
|
||||||
"""
|
"""
|
||||||
sections = []
|
time = None
|
||||||
travel_time = None
|
|
||||||
|
|
||||||
if mode == TimeToModes.PUBLIC_TRANSPORT:
|
|
||||||
# Check that Navitia API key is available
|
# Check that Navitia API key is available
|
||||||
if config["navitia_api_key"]:
|
if config["navitia_api_key"]:
|
||||||
# Search route for next Monday at 8am to avoid looking for a route
|
|
||||||
# in the middle of the night if the fetch is done by night.
|
|
||||||
date_from = next_weekday(datetime.datetime.now(), 0).replace(
|
|
||||||
hour=8,
|
|
||||||
minute=0,
|
|
||||||
)
|
|
||||||
payload = {
|
payload = {
|
||||||
"from": "%s;%s" % (latlng_from[1], latlng_from[0]),
|
"from": "%s;%s" % (latlng_from[1], latlng_from[0]),
|
||||||
"to": "%s;%s" % (latlng_to[1], latlng_to[0]),
|
"to": "%s;%s" % (latlng_to[1], latlng_to[0]),
|
||||||
"datetime": date_from.isoformat(),
|
"datetime": datetime.datetime.now().isoformat(),
|
||||||
"count": 1,
|
"count": 1
|
||||||
}
|
}
|
||||||
try:
|
try:
|
||||||
# Do the query to Navitia API
|
# Do the query to Navitia API
|
||||||
req = requests.get(
|
req = requests.get(
|
||||||
NAVITIA_ENDPOINT,
|
NAVITIA_ENDPOINT, params=payload,
|
||||||
params=payload,
|
auth=(config["navitia_api_key"], "")
|
||||||
auth=(config["navitia_api_key"], ""),
|
|
||||||
)
|
)
|
||||||
req.raise_for_status()
|
req.raise_for_status()
|
||||||
|
|
||||||
journeys = req.json()["journeys"][0]
|
journeys = req.json()["journeys"][0]
|
||||||
travel_time = journeys["durations"]["total"]
|
time = journeys["durations"]["total"]
|
||||||
|
sections = []
|
||||||
for section in journeys["sections"]:
|
for section in journeys["sections"]:
|
||||||
if section["type"] == "public_transport":
|
if section["type"] == "public_transport":
|
||||||
# Public transport
|
# Public transport
|
||||||
sections.append(
|
sections.append({
|
||||||
{
|
|
||||||
"geojson": section["geojson"],
|
"geojson": section["geojson"],
|
||||||
"color": (section["display_informations"].get("color", None)),
|
"color": (
|
||||||
}
|
section["display_informations"].get("color", None)
|
||||||
)
|
)
|
||||||
|
})
|
||||||
elif section["type"] == "street_network":
|
elif section["type"] == "street_network":
|
||||||
# Walking
|
# Walking
|
||||||
sections.append({"geojson": section["geojson"], "color": None})
|
sections.append({
|
||||||
|
"geojson": section["geojson"],
|
||||||
|
"color": None
|
||||||
|
})
|
||||||
else:
|
else:
|
||||||
# Skip anything else
|
# Skip anything else
|
||||||
continue
|
continue
|
||||||
except (
|
except (requests.exceptions.RequestException,
|
||||||
requests.exceptions.RequestException,
|
ValueError, IndexError, KeyError) as exc:
|
||||||
ValueError,
|
|
||||||
IndexError,
|
|
||||||
KeyError,
|
|
||||||
) as exc:
|
|
||||||
# Ignore any possible exception
|
# Ignore any possible exception
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"An exception occurred during travel time lookup on Navitia: %s.",
|
"An exception occurred during travel time lookup on "
|
||||||
str(exc),
|
"Navitia: %s.",
|
||||||
|
str(exc)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"No API key available for travel time lookup. Please provide "
|
"No API key available for travel time lookup. Please provide "
|
||||||
"a Navitia API key. Skipping travel time lookup."
|
"a Navitia API key. Skipping travel time lookup."
|
||||||
)
|
)
|
||||||
elif mode in [TimeToModes.WALK, TimeToModes.BIKE, TimeToModes.CAR]:
|
if time:
|
||||||
MAPBOX_MODES = {
|
return {
|
||||||
TimeToModes.WALK: "mapbox/walking",
|
"time": time,
|
||||||
TimeToModes.BIKE: "mapbox/cycling",
|
"sections": sections
|
||||||
TimeToModes.CAR: "mapbox/driving",
|
|
||||||
}
|
}
|
||||||
# Check that Mapbox API key is available
|
|
||||||
if config["mapbox_api_key"]:
|
|
||||||
try:
|
|
||||||
service = mapbox.Directions(access_token=config["mapbox_api_key"])
|
|
||||||
origin = {
|
|
||||||
"type": "Feature",
|
|
||||||
"properties": {"name": "Start"},
|
|
||||||
"geometry": {
|
|
||||||
"type": "Point",
|
|
||||||
"coordinates": [latlng_from[1], latlng_from[0]],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
destination = {
|
|
||||||
"type": "Feature",
|
|
||||||
"properties": {"name": "End"},
|
|
||||||
"geometry": {
|
|
||||||
"type": "Point",
|
|
||||||
"coordinates": [latlng_to[1], latlng_to[0]],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
response = service.directions([origin, destination], MAPBOX_MODES[mode])
|
|
||||||
response.raise_for_status()
|
|
||||||
route = response.geojson()["features"][0]
|
|
||||||
# Fix longitude/latitude inversion in geojson output
|
|
||||||
geometry = route["geometry"]
|
|
||||||
geometry["coordinates"] = [(x[1], x[0]) for x in geometry["coordinates"]]
|
|
||||||
sections = [{"geojson": geometry, "color": "000"}]
|
|
||||||
travel_time = route["properties"]["duration"]
|
|
||||||
except (requests.exceptions.RequestException, IndexError, KeyError) as exc:
|
|
||||||
# Ignore any possible exception
|
|
||||||
LOGGER.warning(
|
|
||||||
"An exception occurred during travel time lookup on Mapbox: %s.",
|
|
||||||
str(exc),
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
LOGGER.warning(
|
|
||||||
"No API key available for travel time lookup. Please provide "
|
|
||||||
"a Mapbox API key. Skipping travel time lookup."
|
|
||||||
)
|
|
||||||
|
|
||||||
if travel_time:
|
|
||||||
return {"time": travel_time, "sections": sections}
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def timeit(func):
|
|
||||||
"""
|
|
||||||
A decorator that logs how much time was spent in the function.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def wrapped(*args, **kwargs):
|
|
||||||
"""
|
|
||||||
The wrapped function
|
|
||||||
"""
|
|
||||||
before = time.time()
|
|
||||||
res = func(*args, **kwargs)
|
|
||||||
runtime = time.time() - before
|
|
||||||
LOGGER.info("%s -- Execution took %s seconds.", func.__name__, runtime)
|
|
||||||
return res
|
|
||||||
|
|
||||||
return wrapped
|
|
||||||
|
@ -2,10 +2,13 @@
|
|||||||
"""
|
"""
|
||||||
This module contains the definition of the Bottle web app.
|
This module contains the definition of the Bottle web app.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
from __future__ import (
|
||||||
|
absolute_import, division, print_function, unicode_literals
|
||||||
|
)
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import bottle
|
import bottle
|
||||||
@ -23,14 +26,9 @@ class QuietWSGIRefServer(bottle.WSGIRefServer):
|
|||||||
Quiet implementation of Bottle built-in WSGIRefServer, as `Canister` is
|
Quiet implementation of Bottle built-in WSGIRefServer, as `Canister` is
|
||||||
handling the logging through standard Python logging.
|
handling the logging through standard Python logging.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# pylint: disable=locally-disabled,too-few-public-methods
|
# pylint: disable=locally-disabled,too-few-public-methods
|
||||||
quiet = True
|
quiet = True
|
||||||
|
|
||||||
def run(self, app):
|
|
||||||
app.log.info("Server is now up and ready! Listening on %s:%s." % (self.host, self.port))
|
|
||||||
super(QuietWSGIRefServer, self).run(app)
|
|
||||||
|
|
||||||
|
|
||||||
def _serve_static_file(filename):
|
def _serve_static_file(filename):
|
||||||
"""
|
"""
|
||||||
@ -38,7 +36,10 @@ def _serve_static_file(filename):
|
|||||||
"""
|
"""
|
||||||
return bottle.static_file(
|
return bottle.static_file(
|
||||||
filename,
|
filename,
|
||||||
root=os.path.join(os.path.dirname(os.path.realpath(__file__)), "static"),
|
root=os.path.join(
|
||||||
|
os.path.dirname(os.path.realpath(__file__)),
|
||||||
|
"static"
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -50,78 +51,53 @@ def get_app(config):
|
|||||||
"""
|
"""
|
||||||
get_session = database.init_db(config["database"], config["search_index"])
|
get_session = database.init_db(config["database"], config["search_index"])
|
||||||
|
|
||||||
app = bottle.Bottle()
|
app = bottle.default_app()
|
||||||
app.install(DatabasePlugin(get_session))
|
app.install(DatabasePlugin(get_session))
|
||||||
app.install(ConfigPlugin(config))
|
app.install(ConfigPlugin(config))
|
||||||
app.config.setdefault("canister.log_level", "DISABLED")
|
app.config.setdefault("canister.log_level", logging.root.level)
|
||||||
app.config.setdefault("canister.log_path", False)
|
app.config.setdefault("canister.log_path", None)
|
||||||
app.config.setdefault("canister.debug", False)
|
app.config.setdefault("canister.debug", False)
|
||||||
app.install(canister.Canister())
|
app.install(canister.Canister())
|
||||||
# Use DateAwareJSONEncoder to dump JSON strings
|
# Use DateAwareJSONEncoder to dump JSON strings
|
||||||
# From http://stackoverflow.com/questions/21282040/bottle-framework-how-to-return-datetime-in-json-response#comment55718456_21282666. pylint: disable=locally-disabled,line-too-long
|
# From http://stackoverflow.com/questions/21282040/bottle-framework-how-to-return-datetime-in-json-response#comment55718456_21282666. pylint: disable=locally-disabled,line-too-long
|
||||||
app.install(bottle.JSONPlugin(json_dumps=functools.partial(json.dumps, cls=DateAwareJSONEncoder)))
|
bottle.install(
|
||||||
|
bottle.JSONPlugin(
|
||||||
# Enable CORS
|
json_dumps=functools.partial(json.dumps, cls=DateAwareJSONEncoder)
|
||||||
@app.hook("after_request")
|
)
|
||||||
def enable_cors():
|
|
||||||
"""
|
|
||||||
Add CORS headers at each request.
|
|
||||||
"""
|
|
||||||
# The str() call is required as we import unicode_literal and WSGI
|
|
||||||
# headers list should have plain str type.
|
|
||||||
bottle.response.headers[str("Access-Control-Allow-Origin")] = str("*")
|
|
||||||
bottle.response.headers[str("Access-Control-Allow-Methods")] = str("PUT, GET, POST, DELETE, OPTIONS, PATCH")
|
|
||||||
bottle.response.headers[str("Access-Control-Allow-Headers")] = str(
|
|
||||||
"Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# API v1 routes
|
# API v1 routes
|
||||||
app.route("/api/v1", ["GET", "OPTIONS"], api_routes.index_v1)
|
app.route("/api/v1/", "GET", api_routes.index_v1)
|
||||||
|
|
||||||
app.route("/api/v1/time_to_places", ["GET", "OPTIONS"], api_routes.time_to_places_v1)
|
app.route("/api/v1/time_to/places", "GET", api_routes.time_to_places_v1)
|
||||||
|
|
||||||
app.route("/api/v1/flats", ["GET", "OPTIONS"], api_routes.flats_v1)
|
app.route("/api/v1/flats", "GET", api_routes.flats_v1)
|
||||||
app.route("/api/v1/flats/:flat_id", ["GET", "OPTIONS"], api_routes.flat_v1)
|
app.route("/api/v1/flats/status/:status", "GET",
|
||||||
app.route("/api/v1/flats/:flat_id", ["PATCH", "OPTIONS"], api_routes.update_flat_v1)
|
api_routes.flats_by_status_v1)
|
||||||
|
|
||||||
app.route("/api/v1/ics/visits.ics", ["GET", "OPTIONS"], api_routes.ics_feed_v1)
|
app.route("/api/v1/flat/:flat_id", "GET", api_routes.flat_v1)
|
||||||
|
app.route("/api/v1/flat/:flat_id/status", "POST",
|
||||||
|
api_routes.update_flat_status_v1)
|
||||||
|
app.route("/api/v1/flat/:flat_id/notes", "POST",
|
||||||
|
api_routes.update_flat_notes_v1)
|
||||||
|
app.route("/api/v1/flat/:flat_id/notation", "POST",
|
||||||
|
api_routes.update_flat_notation_v1)
|
||||||
|
|
||||||
app.route("/api/v1/search", ["POST", "OPTIONS"], api_routes.search_v1)
|
app.route("/api/v1/search", "POST", api_routes.search_v1)
|
||||||
|
|
||||||
app.route("/api/v1/opendata", ["GET", "OPTIONS"], api_routes.opendata_index_v1)
|
|
||||||
app.route(
|
|
||||||
"/api/v1/opendata/postal_codes",
|
|
||||||
["GET", "OPTIONS"],
|
|
||||||
api_routes.opendata_postal_codes_v1,
|
|
||||||
)
|
|
||||||
|
|
||||||
app.route("/api/v1/metadata", ["GET", "OPTIONS"], api_routes.metadata_v1)
|
|
||||||
app.route("/api/v1/import", ["GET", "OPTIONS"], api_routes.import_v1)
|
|
||||||
|
|
||||||
# Index
|
# Index
|
||||||
app.route("/", "GET", lambda: _serve_static_file("index.html"))
|
app.route("/", "GET", lambda: _serve_static_file("index.html"))
|
||||||
|
|
||||||
# Static files
|
# Static files
|
||||||
app.route("/favicon.ico", "GET", lambda: _serve_static_file("favicon.ico"))
|
app.route("/favicon.ico", "GET",
|
||||||
|
lambda: _serve_static_file("favicon.ico"))
|
||||||
app.route(
|
app.route(
|
||||||
"/assets/<filename:path>",
|
"/assets/<filename:path>", "GET",
|
||||||
"GET",
|
lambda filename: _serve_static_file("/assets/{}".format(filename))
|
||||||
lambda filename: _serve_static_file("/assets/{}".format(filename)),
|
|
||||||
)
|
)
|
||||||
app.route(
|
app.route(
|
||||||
"/img/<filename:path>",
|
"/img/<filename:path>", "GET",
|
||||||
"GET",
|
lambda filename: _serve_static_file("/img/{}".format(filename))
|
||||||
lambda filename: _serve_static_file("/img/{}".format(filename)),
|
|
||||||
)
|
|
||||||
app.route(
|
|
||||||
"/.well-known/<filename:path>",
|
|
||||||
"GET",
|
|
||||||
lambda filename: _serve_static_file("/.well-known/{}".format(filename)),
|
|
||||||
)
|
|
||||||
app.route(
|
|
||||||
"/data/img/<filename:path>",
|
|
||||||
"GET",
|
|
||||||
lambda filename: bottle.static_file(filename, root=os.path.join(config["data_directory"], "images")),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return app
|
return app
|
||||||
|
@ -7,7 +7,9 @@ This module is heavily based on code from
|
|||||||
[Bottle-SQLAlchemy](https://github.com/iurisilvio/bottle-sqlalchemy) which is
|
[Bottle-SQLAlchemy](https://github.com/iurisilvio/bottle-sqlalchemy) which is
|
||||||
licensed under MIT license.
|
licensed under MIT license.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
from __future__ import (
|
||||||
|
absolute_import, division, print_function, unicode_literals
|
||||||
|
)
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
import inspect
|
import inspect
|
||||||
@ -20,8 +22,7 @@ class ConfigPlugin(object):
|
|||||||
A Bottle plugin to automatically pass the config object to the routes
|
A Bottle plugin to automatically pass the config object to the routes
|
||||||
specifying they need it.
|
specifying they need it.
|
||||||
"""
|
"""
|
||||||
|
name = 'config'
|
||||||
name = "config"
|
|
||||||
api = 2
|
api = 2
|
||||||
KEYWORD = "config"
|
KEYWORD = "config"
|
||||||
|
|
||||||
@ -40,7 +41,9 @@ class ConfigPlugin(object):
|
|||||||
if not isinstance(other, ConfigPlugin):
|
if not isinstance(other, ConfigPlugin):
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
raise bottle.PluginError("Found another conflicting Config plugin.")
|
raise bottle.PluginError(
|
||||||
|
"Found another conflicting Config plugin."
|
||||||
|
)
|
||||||
|
|
||||||
def apply(self, callback, route):
|
def apply(self, callback, route):
|
||||||
"""
|
"""
|
||||||
|
@ -7,7 +7,9 @@ This module is heavily based on code from
|
|||||||
[Bottle-SQLAlchemy](https://github.com/iurisilvio/bottle-sqlalchemy) which is
|
[Bottle-SQLAlchemy](https://github.com/iurisilvio/bottle-sqlalchemy) which is
|
||||||
licensed under MIT license.
|
licensed under MIT license.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
from __future__ import (
|
||||||
|
absolute_import, division, print_function, unicode_literals
|
||||||
|
)
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
@ -19,8 +21,7 @@ class DatabasePlugin(object):
|
|||||||
A Bottle plugin to automatically pass an SQLAlchemy database session object
|
A Bottle plugin to automatically pass an SQLAlchemy database session object
|
||||||
to the routes specifying they need it.
|
to the routes specifying they need it.
|
||||||
"""
|
"""
|
||||||
|
name = 'database'
|
||||||
name = "database"
|
|
||||||
api = 2
|
api = 2
|
||||||
KEYWORD = "db"
|
KEYWORD = "db"
|
||||||
|
|
||||||
@ -40,7 +41,9 @@ class DatabasePlugin(object):
|
|||||||
if not isinstance(other, DatabasePlugin):
|
if not isinstance(other, DatabasePlugin):
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
raise bottle.PluginError("Found another conflicting Database plugin.")
|
raise bottle.PluginError(
|
||||||
|
"Found another conflicting Database plugin."
|
||||||
|
)
|
||||||
|
|
||||||
def apply(self, callback, route):
|
def apply(self, callback, route):
|
||||||
"""
|
"""
|
||||||
@ -61,7 +64,6 @@ class DatabasePlugin(object):
|
|||||||
if self.KEYWORD not in callback_args:
|
if self.KEYWORD not in callback_args:
|
||||||
# If no need for a db session, call the route callback
|
# If no need for a db session, call the route callback
|
||||||
return callback
|
return callback
|
||||||
|
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Wrap the callback in a call to get_session.
|
Wrap the callback in a call to get_session.
|
||||||
@ -70,7 +72,6 @@ class DatabasePlugin(object):
|
|||||||
# Get a db session and pass it to the callback
|
# Get a db session and pass it to the callback
|
||||||
kwargs[self.KEYWORD] = session
|
kwargs[self.KEYWORD] = session
|
||||||
return callback(*args, **kwargs)
|
return callback(*args, **kwargs)
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
@ -6,10 +6,7 @@ require('isomorphic-fetch')
|
|||||||
const postProcessAPIResults = function (flat) {
|
const postProcessAPIResults = function (flat) {
|
||||||
/* eslint-disable camelcase */
|
/* eslint-disable camelcase */
|
||||||
if (flat.date) {
|
if (flat.date) {
|
||||||
flat.date = moment.utc(flat.date)
|
flat.date = moment(flat.date)
|
||||||
}
|
|
||||||
if (flat.visit_date) {
|
|
||||||
flat.visit_date = moment.utc(flat.visit_date)
|
|
||||||
}
|
}
|
||||||
if (flat.flatisfy_time_to) {
|
if (flat.flatisfy_time_to) {
|
||||||
const momentifiedTimeTo = {}
|
const momentifiedTimeTo = {}
|
||||||
@ -23,10 +20,6 @@ const postProcessAPIResults = function (flat) {
|
|||||||
})
|
})
|
||||||
flat.flatisfy_time_to = momentifiedTimeTo
|
flat.flatisfy_time_to = momentifiedTimeTo
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fill cost per square meter.
|
|
||||||
flat.sqCost = Math.round(flat.cost * 100 / flat.area) / 100 | 0
|
|
||||||
|
|
||||||
/* eslint-enable camelcase */
|
/* eslint-enable camelcase */
|
||||||
return flat
|
return flat
|
||||||
}
|
}
|
||||||
@ -46,7 +39,7 @@ export const getFlats = function (callback) {
|
|||||||
|
|
||||||
export const getFlat = function (flatId, callback) {
|
export const getFlat = function (flatId, callback) {
|
||||||
fetch(
|
fetch(
|
||||||
'/api/v1/flats/' + encodeURIComponent(flatId),
|
'/api/v1/flat/' + encodeURIComponent(flatId),
|
||||||
{ credentials: 'same-origin' }
|
{ credentials: 'same-origin' }
|
||||||
)
|
)
|
||||||
.then(function (response) {
|
.then(function (response) {
|
||||||
@ -61,10 +54,10 @@ export const getFlat = function (flatId, callback) {
|
|||||||
|
|
||||||
export const updateFlatStatus = function (flatId, newStatus, callback) {
|
export const updateFlatStatus = function (flatId, newStatus, callback) {
|
||||||
fetch(
|
fetch(
|
||||||
'/api/v1/flats/' + encodeURIComponent(flatId),
|
'/api/v1/flat/' + encodeURIComponent(flatId) + '/status',
|
||||||
{
|
{
|
||||||
credentials: 'same-origin',
|
credentials: 'same-origin',
|
||||||
method: 'PATCH',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json'
|
'Content-Type': 'application/json'
|
||||||
},
|
},
|
||||||
@ -79,10 +72,10 @@ export const updateFlatStatus = function (flatId, newStatus, callback) {
|
|||||||
|
|
||||||
export const updateFlatNotes = function (flatId, newNotes, callback) {
|
export const updateFlatNotes = function (flatId, newNotes, callback) {
|
||||||
fetch(
|
fetch(
|
||||||
'/api/v1/flats/' + encodeURIComponent(flatId),
|
'/api/v1/flat/' + encodeURIComponent(flatId) + '/notes',
|
||||||
{
|
{
|
||||||
credentials: 'same-origin',
|
credentials: 'same-origin',
|
||||||
method: 'PATCH',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json'
|
'Content-Type': 'application/json'
|
||||||
},
|
},
|
||||||
@ -97,10 +90,10 @@ export const updateFlatNotes = function (flatId, newNotes, callback) {
|
|||||||
|
|
||||||
export const updateFlatNotation = function (flatId, newNotation, callback) {
|
export const updateFlatNotation = function (flatId, newNotation, callback) {
|
||||||
fetch(
|
fetch(
|
||||||
'/api/v1/flats/' + encodeURIComponent(flatId),
|
'/api/v1/flat/' + encodeURIComponent(flatId) + '/notation',
|
||||||
{
|
{
|
||||||
credentials: 'same-origin',
|
credentials: 'same-origin',
|
||||||
method: 'PATCH',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json'
|
'Content-Type': 'application/json'
|
||||||
},
|
},
|
||||||
@ -113,26 +106,8 @@ export const updateFlatNotation = function (flatId, newNotation, callback) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export const updateFlatVisitDate = function (flatId, newVisitDate, callback) {
|
|
||||||
fetch(
|
|
||||||
'/api/v1/flats/' + encodeURIComponent(flatId),
|
|
||||||
{
|
|
||||||
credentials: 'same-origin',
|
|
||||||
method: 'PATCH',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
visit_date: newVisitDate // eslint-disable-line camelcase
|
|
||||||
})
|
|
||||||
}
|
|
||||||
).then(callback).catch(function (ex) {
|
|
||||||
console.error('Unable to update flat date of visit: ' + ex)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getTimeToPlaces = function (callback) {
|
export const getTimeToPlaces = function (callback) {
|
||||||
fetch('/api/v1/time_to_places', { credentials: 'same-origin' })
|
fetch('/api/v1/time_to/places', { credentials: 'same-origin' })
|
||||||
.then(function (response) {
|
.then(function (response) {
|
||||||
return response.json()
|
return response.json()
|
||||||
}).then(function (json) {
|
}).then(function (json) {
|
||||||
@ -161,10 +136,3 @@ export const doSearch = function (query, callback) {
|
|||||||
console.error('Unable to perform search: ' + ex)
|
console.error('Unable to perform search: ' + ex)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getMetadata = function (callback) {
|
|
||||||
fetch('/api/v1/metadata', { credentials: 'same-origin' })
|
|
||||||
.then(response => response.json())
|
|
||||||
.then(json => callback(json.data))
|
|
||||||
.catch(ex => console.error('Unable to fetch application metadata: ' + ex))
|
|
||||||
}
|
|
||||||
|
@ -16,12 +16,29 @@
|
|||||||
<style>
|
<style>
|
||||||
body {
|
body {
|
||||||
margin: 0 auto;
|
margin: 0 auto;
|
||||||
/* max-width: 75em; */
|
max-width: 75em;
|
||||||
font-family: "Helvetica", "Arial", sans-serif;
|
font-family: "Helvetica", "Arial", sans-serif;
|
||||||
line-height: 1.5;
|
line-height: 1.5;
|
||||||
padding: 4em 1em;
|
padding: 4em 1em;
|
||||||
padding-top: 1em;
|
padding-top: 1em;
|
||||||
color: #555;
|
color: #555;
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mobile-only {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media screen and (max-width: 767px) {
|
||||||
|
body {
|
||||||
|
max-width: auto;
|
||||||
|
width: 100%;
|
||||||
|
padding: 1em 0.1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mobile-only {
|
||||||
|
display: initial;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
h1 {
|
h1 {
|
||||||
|
@ -1,398 +0,0 @@
|
|||||||
<template>
|
|
||||||
<div>
|
|
||||||
<template v-if="isLoading">
|
|
||||||
<p>{{ $t("common.loading") }}</p>
|
|
||||||
</template>
|
|
||||||
<div class="grid" v-else-if="flat && timeToPlaces">
|
|
||||||
<div class="left-panel">
|
|
||||||
<h2>
|
|
||||||
(<!--
|
|
||||||
--><router-link :to="{ name: 'status', params: { status: flat.status }}"><!--
|
|
||||||
-->{{ flat.status ? capitalize($t("status." + flat.status)) : '' }}<!--
|
|
||||||
--></router-link><!--
|
|
||||||
-->) {{ flat.title }} [{{ flat.id.split("@")[1] }}]
|
|
||||||
<span class="expired">{{ flat.is_expired ? '[' + $t('common.expired') + ']' : '' }}</span>
|
|
||||||
</h2>
|
|
||||||
<div class="grid">
|
|
||||||
<div class="left-panel">
|
|
||||||
<span>
|
|
||||||
{{ flat.cost | cost(flat.currency) }}
|
|
||||||
<template v-if="flat.utilities === 'included'">
|
|
||||||
{{ $t("flatsDetails.utilities_included") }}
|
|
||||||
</template>
|
|
||||||
<template v-else-if="flat.utilities === 'excluded'">
|
|
||||||
{{ $t("flatsDetails.utilities_excluded") }}
|
|
||||||
</template>
|
|
||||||
</span>
|
|
||||||
<span v-if="flat.flatisfy_postal_code.postal_code">
|
|
||||||
à {{ flat.flatisfy_postal_code.name }} ({{ flat.flatisfy_postal_code.postal_code }})
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<span class="right-panel right">
|
|
||||||
<template v-if="flat.area"><span>{{flat.area}} m<sup>2</sup></span></template><template v-if="flat.rooms"><span>, {{flat.rooms}} {{ $tc("flatsDetails.rooms", flat.rooms) }}</span></template><template v-if="flat.bedrooms"><span>/ {{flat.bedrooms}} {{ $tc("flatsDetails.bedrooms", flat.bedrooms) }}</span></template>
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<template v-if="flat.photos && flat.photos.length > 0">
|
|
||||||
<Slider :photos="flat.photos"></Slider>
|
|
||||||
</template>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<h3>{{ $t("flatsDetails.Description") }}</h3>
|
|
||||||
<p>{{ flat.text }}</p>
|
|
||||||
<p class="right">{{ flat.location }}</p>
|
|
||||||
<p>{{ $t("flatsDetails.First_posted") }} {{ flat.date ? flat.date.fromNow() : '?' }}.</p>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<h3>{{ $t("flatsDetails.Details") }}</h3>
|
|
||||||
<table>
|
|
||||||
<tr v-for="(value, key) in flat.details">
|
|
||||||
<th>{{ key }}</th>
|
|
||||||
<td>{{ value }}</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<h3>{{ $t("flatsDetails.Metadata") }}</h3>
|
|
||||||
<table>
|
|
||||||
<tr>
|
|
||||||
<th>
|
|
||||||
{{ $t("flatsDetails.postal_code") }}
|
|
||||||
</th>
|
|
||||||
<td>
|
|
||||||
<template v-if="flat.flatisfy_postal_code.postal_code">
|
|
||||||
{{ flat.flatisfy_postal_code.name }} ({{ flat.flatisfy_postal_code.postal_code }})
|
|
||||||
</template>
|
|
||||||
<template v-else>
|
|
||||||
?
|
|
||||||
</template>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
|
|
||||||
<tr v-if="displayedStations">
|
|
||||||
<th>
|
|
||||||
{{ $t("flatsDetails.nearby_stations") }}
|
|
||||||
</th>
|
|
||||||
<td>
|
|
||||||
{{ displayedStations }}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr v-if="Object.keys(flat.flatisfy_time_to).length">
|
|
||||||
<th>
|
|
||||||
{{ $t("flatsDetails.Times_to") }}
|
|
||||||
</th>
|
|
||||||
<td>
|
|
||||||
<ul class="time_to_list">
|
|
||||||
<li v-for="(time_to, place) in flat.flatisfy_time_to" :key="place">
|
|
||||||
{{ place }}: {{ humanizeTimeTo(time_to["time"]) }}
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<th>
|
|
||||||
{{ $t("flatsDetails.SquareMeterCost") }}
|
|
||||||
</th>
|
|
||||||
<td>
|
|
||||||
{{ flat.sqCost }} {{ flat.currency }}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<h3>{{ $t("flatsDetails.Location") }}</h3>
|
|
||||||
|
|
||||||
<FlatsMap :flats="flatMarker" :places="timeToPlaces" :journeys="journeys"></FlatsMap>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<h3>{{ $t("flatsDetails.Notes") }}</h3>
|
|
||||||
|
|
||||||
<form v-on:submit="updateFlatNotes">
|
|
||||||
<textarea ref="notesTextarea" rows="10" :v-model="flat.notes"></textarea>
|
|
||||||
<p class="right"><input type="submit" :value="$t('flatsDetails.Save')"/></p>
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="right-panel">
|
|
||||||
<h3>{{ $t("flatsDetails.Contact") }}</h3>
|
|
||||||
<div class="contact">
|
|
||||||
<template v-if="flat.phone">
|
|
||||||
<p v-for="phoneNumber in flat.phone.split(',')">
|
|
||||||
<a :href="'tel:+33' + normalizePhoneNumber(phoneNumber)">{{ phoneNumber }}</a>
|
|
||||||
</p>
|
|
||||||
</template>
|
|
||||||
<template v-if="flat.urls.length == 1">
|
|
||||||
<a :href="flat.urls[0]" target="_blank">
|
|
||||||
{{ $tc("common.Original_post", 1) }}
|
|
||||||
<i class="fa fa-external-link" aria-hidden="true"></i>
|
|
||||||
</a>
|
|
||||||
</template>
|
|
||||||
<template v-else-if="flat.urls.length > 1">
|
|
||||||
<p>{{ $tc("common.Original_post", flat.urls.length) }}
|
|
||||||
<ul>
|
|
||||||
<li v-for="(url, index) in flat.urls">
|
|
||||||
<a :href="url" target="_blank">
|
|
||||||
{{ $tc("common.Original_post", 1) }} {{ index + 1 }}
|
|
||||||
<i class="fa fa-external-link" aria-hidden="true"></i>
|
|
||||||
</a>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</p>
|
|
||||||
</template>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<h3>{{ $t("flatsDetails.Visit") }}</h3>
|
|
||||||
<div class="visit">
|
|
||||||
<flat-pickr
|
|
||||||
:value="flatpickrValue"
|
|
||||||
:config="flatpickrConfig"
|
|
||||||
:placeholder="$t('flatsDetails.setDateOfVisit')"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<h3>{{ $t("common.Actions") }}</h3>
|
|
||||||
|
|
||||||
<nav>
|
|
||||||
<ul>
|
|
||||||
<template v-if="flat.status !== 'user_deleted'">
|
|
||||||
<Notation :flat="flat"></Notation>
|
|
||||||
<li>
|
|
||||||
<button v-on:click="updateFlatStatus('user_deleted')" class="fullButton">
|
|
||||||
<i class="fa fa-trash" aria-hidden="true"></i>
|
|
||||||
{{ $t("common.Remove") }}
|
|
||||||
</button>
|
|
||||||
</li>
|
|
||||||
</template>
|
|
||||||
<template v-else>
|
|
||||||
<li>
|
|
||||||
<button v-on:click="updateFlatStatus('new')" class="fullButton">
|
|
||||||
<i class="fa fa-undo" aria-hidden="true"></i>
|
|
||||||
{{ $t("common.Restore") }}
|
|
||||||
</button>
|
|
||||||
</li>
|
|
||||||
</template>
|
|
||||||
</ul>
|
|
||||||
</nav>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script>
|
|
||||||
import flatPickr from 'vue-flatpickr-component'
|
|
||||||
import moment from 'moment'
|
|
||||||
import 'font-awesome-webpack'
|
|
||||||
import 'flatpickr/dist/flatpickr.css'
|
|
||||||
|
|
||||||
import FlatsMap from '../components/flatsmap.vue'
|
|
||||||
import Slider from '../components/slider.vue'
|
|
||||||
import Notation from '../components/notation.vue'
|
|
||||||
|
|
||||||
import { capitalize } from '../tools'
|
|
||||||
|
|
||||||
export default {
|
|
||||||
components: {
|
|
||||||
FlatsMap,
|
|
||||||
Slider,
|
|
||||||
flatPickr,
|
|
||||||
Notation
|
|
||||||
},
|
|
||||||
|
|
||||||
created () {
|
|
||||||
this.fetchData()
|
|
||||||
},
|
|
||||||
|
|
||||||
data () {
|
|
||||||
return {
|
|
||||||
// TODO: Flatpickr locale
|
|
||||||
'overloadNotation': null,
|
|
||||||
'flatpickrConfig': {
|
|
||||||
static: true,
|
|
||||||
altFormat: 'h:i K, M j, Y',
|
|
||||||
altInput: true,
|
|
||||||
enableTime: true,
|
|
||||||
onChange: selectedDates => this.updateFlatVisitDate(selectedDates.length > 0 ? selectedDates[0] : null)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
props: ['flat'],
|
|
||||||
|
|
||||||
computed: {
|
|
||||||
isLoading () {
|
|
||||||
return this.$store.getters.isLoading
|
|
||||||
},
|
|
||||||
flatMarker () {
|
|
||||||
return this.$store.getters.flatsMarkers(this.$router, flat => flat.id === this.flat.id)
|
|
||||||
},
|
|
||||||
'flatpickrValue' () {
|
|
||||||
if (this.flat && this.flat.visit_date) {
|
|
||||||
return this.flat.visit_date.local().format()
|
|
||||||
}
|
|
||||||
return null
|
|
||||||
},
|
|
||||||
timeToPlaces () {
|
|
||||||
return this.$store.getters.timeToPlaces(this.flat.flatisfy_constraint)
|
|
||||||
},
|
|
||||||
notation () {
|
|
||||||
if (this.overloadNotation) {
|
|
||||||
return this.overloadNotation
|
|
||||||
}
|
|
||||||
return this.flat.notation
|
|
||||||
},
|
|
||||||
journeys () {
|
|
||||||
if (Object.keys(this.flat.flatisfy_time_to).length > 0) {
|
|
||||||
const journeys = []
|
|
||||||
for (const place in this.flat.flatisfy_time_to) {
|
|
||||||
this.flat.flatisfy_time_to[place].sections.forEach(
|
|
||||||
section => journeys.push({
|
|
||||||
geojson: section.geojson,
|
|
||||||
options: {
|
|
||||||
color: section.color ? ('#' + section.color) : '#2196f3',
|
|
||||||
dashArray: section.color ? 'none' : '2, 10'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return journeys
|
|
||||||
}
|
|
||||||
return []
|
|
||||||
},
|
|
||||||
displayedStations () {
|
|
||||||
if (this.flat.flatisfy_stations.length > 0) {
|
|
||||||
const stationsNames = this.flat.flatisfy_stations.map(station => station.name)
|
|
||||||
return stationsNames.join(', ')
|
|
||||||
} else {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
watch: {
|
|
||||||
flat: 'fetchData'
|
|
||||||
},
|
|
||||||
|
|
||||||
methods: {
|
|
||||||
fetchData () {
|
|
||||||
this.$store.dispatch('getAllTimeToPlaces')
|
|
||||||
},
|
|
||||||
|
|
||||||
updateFlatStatus (status) {
|
|
||||||
this.$store.dispatch('updateFlatStatus', { flatId: this.flat.id, newStatus: status })
|
|
||||||
},
|
|
||||||
|
|
||||||
updateFlatNotes () {
|
|
||||||
const notes = this.$refs.notesTextarea.value
|
|
||||||
this.$store.dispatch(
|
|
||||||
'updateFlatNotes',
|
|
||||||
{ flatId: this.flat.id, newNotes: notes }
|
|
||||||
)
|
|
||||||
},
|
|
||||||
|
|
||||||
updateFlatVisitDate (date) {
|
|
||||||
if (date) {
|
|
||||||
date = moment(date).utc().format()
|
|
||||||
}
|
|
||||||
this.$store.dispatch(
|
|
||||||
'updateFlatVisitDate',
|
|
||||||
{ flatId: this.flat.id, newVisitDate: date }
|
|
||||||
)
|
|
||||||
},
|
|
||||||
|
|
||||||
humanizeTimeTo (time) {
|
|
||||||
const minutes = Math.floor(time.as('minutes'))
|
|
||||||
return minutes + ' ' + this.$tc('common.mins', minutes)
|
|
||||||
},
|
|
||||||
|
|
||||||
normalizePhoneNumber (phoneNumber) {
|
|
||||||
phoneNumber = phoneNumber.replace(/ /g, '')
|
|
||||||
phoneNumber = phoneNumber.replace(/\./g, '')
|
|
||||||
return phoneNumber
|
|
||||||
},
|
|
||||||
|
|
||||||
capitalize: capitalize
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
.expired {
|
|
||||||
font-weight: bold;
|
|
||||||
text-transform: uppercase;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media screen and (min-width: 768px) {
|
|
||||||
.grid {
|
|
||||||
display: grid;
|
|
||||||
grid-gap: 50px;
|
|
||||||
grid-template-columns: 75fr 25fr;
|
|
||||||
}
|
|
||||||
|
|
||||||
.left-panel {
|
|
||||||
grid-column: 1;
|
|
||||||
grid-row: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.right-panel {
|
|
||||||
grid-column: 2;
|
|
||||||
grid-row: 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.left-panel textarea {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.right {
|
|
||||||
text-align: right;
|
|
||||||
}
|
|
||||||
|
|
||||||
nav ul {
|
|
||||||
list-style-type: none;
|
|
||||||
padding-left: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.contact {
|
|
||||||
padding-left: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.right-panel li {
|
|
||||||
margin-bottom: 1em;
|
|
||||||
margin-top: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
button {
|
|
||||||
cursor: pointer;
|
|
||||||
width: 75%;
|
|
||||||
padding: 0.3em;
|
|
||||||
font-size: 0.9em;
|
|
||||||
}
|
|
||||||
|
|
||||||
table {
|
|
||||||
table-layout: fixed;
|
|
||||||
}
|
|
||||||
|
|
||||||
td {
|
|
||||||
word-wrap: break-word;
|
|
||||||
word-break: break-all;
|
|
||||||
white-space: normal;
|
|
||||||
}
|
|
||||||
|
|
||||||
.time_to_list {
|
|
||||||
margin: 0;
|
|
||||||
padding-left: 0;
|
|
||||||
list-style-position: outside;
|
|
||||||
list-style-type: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media screen and (max-width: 767px) {
|
|
||||||
.right-panel nav {
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.fullButton {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
@ -1,26 +1,21 @@
|
|||||||
<template lang="html">
|
<template lang="html">
|
||||||
<div class="full">
|
<div class="full">
|
||||||
<v-map v-if="bounds" :zoom="zoom.defaultZoom" :bounds="bounds" :min-zoom="zoom.minZoom" :max-zoom="zoom.maxZoom" v-on:click="$emit('select-flat', null)" @update:bounds="bounds = $event">
|
<v-map :zoom="zoom.defaultZoom" :center="center" :bounds="bounds" :min-zoom="zoom.minZoom" :max-zoom="zoom.maxZoom">
|
||||||
<v-tilelayer :url="tiles.url" :attribution="tiles.attribution"></v-tilelayer>
|
<v-tilelayer :url="tiles.url" :attribution="tiles.attribution"></v-tilelayer>
|
||||||
<v-marker-cluster>
|
|
||||||
<template v-for="marker in flats">
|
<template v-for="marker in flats">
|
||||||
<v-marker :lat-lng="{ lat: marker.gps[0], lng: marker.gps[1] }" :icon="icons.flat" v-on:click="$emit('select-flat', marker.flatId)">
|
<v-marker :lat-lng="{ lat: marker.gps[0], lng: marker.gps[1] }" :icon="icons.flat">
|
||||||
<!-- <v-popup :content="marker.content"></v-popup> -->
|
<v-popup :content="marker.content"></v-popup>
|
||||||
</v-marker>
|
</v-marker>
|
||||||
</template>
|
</template>
|
||||||
</v-marker-cluster>
|
|
||||||
<v-marker-cluster>
|
|
||||||
<template v-for="(place_gps, place_name) in places">
|
<template v-for="(place_gps, place_name) in places">
|
||||||
<v-marker :lat-lng="{ lat: place_gps[0], lng: place_gps[1] }" :icon="icons.place">
|
<v-marker :lat-lng="{ lat: place_gps[0], lng: place_gps[1] }" :icon="icons.place">
|
||||||
<v-tooltip :content="place_name"></v-tooltip>
|
<v-tooltip :content="place_name"></v-tooltip>
|
||||||
</v-marker>
|
</v-marker>
|
||||||
</template>
|
</template>
|
||||||
</v-marker-cluster>
|
|
||||||
<template v-for="journey in journeys">
|
<template v-for="journey in journeys">
|
||||||
<v-geojson-layer :geojson="journey.geojson" :options="Object.assign({}, defaultGeoJSONOptions, journey.options)"></v-geojson-layer>
|
<v-geojson-layer :geojson="journey.geojson" :options="Object.assign({}, defaultGeoJSONOptions, journey.options)"></v-geojson-layer>
|
||||||
</template>
|
</template>
|
||||||
</v-map>
|
</v-map>
|
||||||
<div v-else>Nothing to display yet</div>
|
|
||||||
</div>
|
</div>
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
@ -36,13 +31,10 @@ L.Icon.Default.mergeOptions({
|
|||||||
})
|
})
|
||||||
|
|
||||||
import 'leaflet/dist/leaflet.css'
|
import 'leaflet/dist/leaflet.css'
|
||||||
import 'leaflet.markercluster/dist/MarkerCluster.css'
|
|
||||||
import 'leaflet.markercluster/dist/MarkerCluster.Default.css'
|
|
||||||
|
|
||||||
require('leaflet.icon.glyph')
|
require('leaflet.icon.glyph')
|
||||||
|
|
||||||
import { LMap, LTileLayer, LMarker, LTooltip, LPopup, LGeoJson } from 'vue2-leaflet'
|
import Vue2Leaflet from 'vue2-leaflet'
|
||||||
import Vue2LeafletMarkerCluster from 'vue2-leaflet-markercluster'
|
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
data () {
|
data () {
|
||||||
@ -54,11 +46,11 @@ export default {
|
|||||||
fillColor: '#e4ce7f',
|
fillColor: '#e4ce7f',
|
||||||
fillOpacity: 1
|
fillOpacity: 1
|
||||||
},
|
},
|
||||||
bounds: [[40.91351257612758, -7.580566406250001], [51.65892664880053, 12.0849609375]],
|
center: null,
|
||||||
zoom: {
|
zoom: {
|
||||||
defaultZoom: 6,
|
defaultZoom: 13,
|
||||||
minZoom: 5,
|
minZoom: 11,
|
||||||
maxZoom: 20
|
maxZoom: 17
|
||||||
},
|
},
|
||||||
tiles: {
|
tiles: {
|
||||||
url: 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
|
url: 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
|
||||||
@ -75,27 +67,25 @@ export default {
|
|||||||
},
|
},
|
||||||
|
|
||||||
components: {
|
components: {
|
||||||
'v-map': LMap,
|
'v-map': Vue2Leaflet.Map,
|
||||||
'v-tilelayer': LTileLayer,
|
'v-tilelayer': Vue2Leaflet.TileLayer,
|
||||||
'v-marker': LMarker,
|
'v-marker': Vue2Leaflet.Marker,
|
||||||
'v-marker-cluster': Vue2LeafletMarkerCluster,
|
'v-tooltip': Vue2Leaflet.Tooltip,
|
||||||
'v-tooltip': LTooltip,
|
'v-popup': Vue2Leaflet.Popup,
|
||||||
'v-popup': LPopup,
|
'v-geojson-layer': Vue2Leaflet.GeoJSON
|
||||||
'v-geojson-layer': LGeoJson
|
|
||||||
},
|
},
|
||||||
|
|
||||||
watch: {
|
computed: {
|
||||||
flats: 'computeBounds',
|
bounds () {
|
||||||
places: 'computeBounds'
|
let bounds = []
|
||||||
},
|
this.flats.forEach(flat => bounds.push(flat.gps))
|
||||||
|
Object.keys(this.places).forEach(place => bounds.push(this.places[place]))
|
||||||
|
|
||||||
methods: {
|
if (bounds.length > 0) {
|
||||||
computeBounds (newData, oldData) {
|
bounds = L.latLngBounds(bounds)
|
||||||
if (this.flats.length && JSON.stringify(newData) !== JSON.stringify(oldData)) {
|
return bounds
|
||||||
const allBounds = []
|
} else {
|
||||||
this.flats.forEach(flat => allBounds.push(flat.gps))
|
return null
|
||||||
Object.keys(this.places).forEach(place => allBounds.push(this.places[place]))
|
|
||||||
this.bounds = allBounds.length ? L.latLngBounds(allBounds) : undefined
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -37,34 +37,73 @@
|
|||||||
<span class="sr-only">{{ $t("common.sort" + capitalize(sortOrder)) }}</span>
|
<span class="sr-only">{{ $t("common.sort" + capitalize(sortOrder)) }}</span>
|
||||||
</span>
|
</span>
|
||||||
</th>
|
</th>
|
||||||
<th class="pointer" v-on:click="updateSortBy('sqCost')">
|
|
||||||
{{ $t("flatsDetails.SquareMeterCost") }}
|
|
||||||
<span v-if="sortBy === 'sqCost'">
|
|
||||||
<i class="fa" :class="'fa-angle-' + sortOrder" aria-hidden="true"></i>
|
|
||||||
<span class="sr-only">{{ $t("common.sort" + capitalize(sortOrder)) }}</span>
|
|
||||||
</span>
|
|
||||||
</th>
|
|
||||||
<th>{{ $t("common.Actions") }}</th>
|
<th>{{ $t("common.Actions") }}</th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
<FlatsTableLine :flat="flat" :showNotationColumn="showNotationColumn" :showNotes="showNotes" v-for="flat in sortedFlats" :key="flat.id"></FlatsTableLine>
|
<tr v-for="flat in sortedFlats" :key="flat.id" v-on:click="event => showMore(event, flat.id)" class="pointer">
|
||||||
|
<td v-if="showNotationColumn">
|
||||||
|
<template v-for="n in range(flat.notation)">
|
||||||
|
<i class="fa fa-star" aria-hidden="true" :title="capitalize($t('status.followed'))"></i>
|
||||||
|
</template>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<template v-if="!showNotationColumn" v-for="n in range(flat.notation)">
|
||||||
|
<i class="fa fa-star" aria-hidden="true" :title="capitalize($t('status.followed'))"></i>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
[{{ flat.id.split("@")[1] }}] {{ flat.title }}
|
||||||
|
|
||||||
|
<template v-if="flat.photos && flat.photos.length > 0">
|
||||||
|
<br/>
|
||||||
|
<img :src="flat.photos[0].url"/>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
<template v-if="showNotes">
|
||||||
|
<br/>
|
||||||
|
<pre>{{ flat.notes }}</pre>
|
||||||
|
</template>
|
||||||
|
</td>
|
||||||
|
<td>{{ flat.area }} m²</td>
|
||||||
|
<td>
|
||||||
|
{{ flat.rooms ? flat.rooms : '?'}}
|
||||||
|
<span class="mobile-only">{{ $t("flatsDetails.RM") }}</span>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
{{ flat.cost }} {{ flat.currency }}
|
||||||
|
<template v-if="flat.utilities == 'included'">
|
||||||
|
{{ $t("flatsDetails.utilities_included") }}
|
||||||
|
</template>
|
||||||
|
<template v-else-if="flat.utilities == 'excluded'">
|
||||||
|
{{ $t("flatsDetails.utilities_excluded") }}
|
||||||
|
</template>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<router-link :to="{name: 'details', params: {id: flat.id}}" :aria-label="$t('common.More_about') + ' ' + flat.id" :title="$t('common.More_about') + ' ' + flat.id">
|
||||||
|
<i class="fa fa-plus" aria-hidden="true"></i>
|
||||||
|
</router-link>
|
||||||
|
<a :href="flat.urls[0]" :aria-label="$t('common.Original_post_for') + ' ' + flat.id" :title="$t('common.Original_post_for') + ' ' + flat.id" target="_blank">
|
||||||
|
<i class="fa fa-external-link" aria-hidden="true"></i>
|
||||||
|
</a>
|
||||||
|
<button v-if="flat.status !== 'user_deleted'" v-on:click="updateFlatStatus(flat.id, 'user_deleted')" :aria-label="$t('common.Remove') + ' ' + flat.id" :title="$t('common.Remove') + ' ' + flat.id">
|
||||||
|
<i class="fa fa-trash" aria-hidden="true"></i>
|
||||||
|
</button>
|
||||||
|
<button v-else v-on:click="updateFlatStatus(flat.id, 'new')" :aria-label="$t('common.Restore') + ' ' + flat.id" :title="$t('common.Restore') + ' ' + flat.id">
|
||||||
|
<i class="fa fa-undo" aria-hidden="true"></i>
|
||||||
|
</button>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
|
// TODO: Table is too wide on mobile device, and button trash is not aligned with links
|
||||||
import 'font-awesome-webpack'
|
import 'font-awesome-webpack'
|
||||||
|
|
||||||
import FlatsTableLine from './flatstableline.vue'
|
import { capitalize, range } from '../tools'
|
||||||
|
|
||||||
import { capitalize } from '../tools'
|
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
components: {
|
|
||||||
FlatsTableLine
|
|
||||||
},
|
|
||||||
|
|
||||||
data () {
|
data () {
|
||||||
return {
|
return {
|
||||||
sortBy: this.initialSortBy,
|
sortBy: this.initialSortBy,
|
||||||
@ -118,6 +157,9 @@ export default {
|
|||||||
},
|
},
|
||||||
|
|
||||||
methods: {
|
methods: {
|
||||||
|
updateFlatStatus (id, status) {
|
||||||
|
this.$store.dispatch('updateFlatStatus', { flatId: id, newStatus: status })
|
||||||
|
},
|
||||||
updateSortBy (field) {
|
updateSortBy (field) {
|
||||||
if (this.sortBy === field) {
|
if (this.sortBy === field) {
|
||||||
if (this.sortOrder === 'up') {
|
if (this.sortOrder === 'up') {
|
||||||
@ -129,7 +171,13 @@ export default {
|
|||||||
this.sortBy = field
|
this.sortBy = field
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
capitalize: capitalize
|
showMore (event, flatId) {
|
||||||
|
if (event.target.tagName === 'TD') {
|
||||||
|
this.$router.push({ name: 'details', params: { id: flatId }})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
capitalize: capitalize,
|
||||||
|
range: range
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
@ -172,13 +220,27 @@ pre {
|
|||||||
word-break: break-all;
|
word-break: break-all;
|
||||||
}
|
}
|
||||||
|
|
||||||
.no-padding {
|
@media screen and (max-width: 767px) {
|
||||||
padding: 0;
|
table {
|
||||||
}
|
margin: 0;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
.fill {
|
thead {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
th, td {
|
||||||
|
padding: 0.25em;
|
||||||
|
}
|
||||||
|
|
||||||
|
td a, td button {
|
||||||
display: block;
|
display: block;
|
||||||
padding: 2em;
|
width: 1em;
|
||||||
text-decoration: none;
|
}
|
||||||
|
|
||||||
|
td {
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
|
@ -1,100 +0,0 @@
|
|||||||
<template>
|
|
||||||
<tr>
|
|
||||||
<td v-if="showNotationColumn">
|
|
||||||
<Notation :flat="flat" :title="capitalizedStatus"></Notation>
|
|
||||||
</td>
|
|
||||||
<td class="no-padding">
|
|
||||||
<Notation v-if="!showNotationColumn" :flat="flat" :title="capitalizedStatus"></Notation>
|
|
||||||
<router-link class="fill" :to="{name: 'details', params: {id: flat.id}}">
|
|
||||||
[{{ flat.id.split("@")[1] }}]
|
|
||||||
<span class="expired">{{ flat.is_expired ? "[" + $t("common.expired") + "]" : null }}</span>
|
|
||||||
{{ flat.title }}
|
|
||||||
|
|
||||||
<template v-if="photo">
|
|
||||||
<br/>
|
|
||||||
<img :src="photo" height="200" style="max-width: 25vw" />
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<template v-if="showNotes">
|
|
||||||
<br/>
|
|
||||||
<pre>{{ flat.notes }}</pre>
|
|
||||||
</template>
|
|
||||||
</router-link>
|
|
||||||
</td>
|
|
||||||
<td>{{ flat.area }} m²</td>
|
|
||||||
<td>
|
|
||||||
{{ flat.rooms ? flat.rooms : '?'}}
|
|
||||||
</td>
|
|
||||||
<td>
|
|
||||||
{{ flat.cost | cost(flat.currency) }}
|
|
||||||
<template v-if="flat.utilities == 'included'">
|
|
||||||
{{ $t("flatsDetails.utilities_included") }}
|
|
||||||
</template>
|
|
||||||
<template v-else-if="flat.utilities == 'excluded'">
|
|
||||||
{{ $t("flatsDetails.utilities_excluded") }}
|
|
||||||
</template>
|
|
||||||
</td>
|
|
||||||
<td>
|
|
||||||
{{ flat.sqCost }} {{ flat.currency }}
|
|
||||||
</td>
|
|
||||||
<td>
|
|
||||||
<router-link :to="{name: 'details', params: {id: flat.id}}" :aria-label="$t('common.More_about') + ' ' + flat.id" :title="$t('common.More_about') + ' ' + flat.id">
|
|
||||||
<i class="fa fa-eye" aria-hidden="true"></i>
|
|
||||||
</router-link>
|
|
||||||
<a :href="flat.urls[0]" :aria-label="$t('common.Original_post_for') + ' ' + flat.id" :title="$t('common.Original_post_for') + ' ' + flat.id" target="_blank">
|
|
||||||
<i class="fa fa-external-link" aria-hidden="true"></i>
|
|
||||||
</a>
|
|
||||||
<button v-if="flat.status !== 'user_deleted'" v-on:click="updateFlatStatus(flat.id, 'user_deleted')" :aria-label="$t('common.Remove') + ' ' + flat.id" :title="$t('common.Remove') + ' ' + flat.id">
|
|
||||||
<i class="fa fa-trash" aria-hidden="true"></i>
|
|
||||||
</button>
|
|
||||||
<button v-else v-on:click="updateFlatStatus(flat.id, 'new')" :aria-label="$t('common.Restore') + ' ' + flat.id" :title="$t('common.Restore') + ' ' + flat.id">
|
|
||||||
<i class="fa fa-undo" aria-hidden="true"></i>
|
|
||||||
</button>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script>
|
|
||||||
import { capitalize } from '../tools'
|
|
||||||
import Notation from '../components/notation.vue'
|
|
||||||
|
|
||||||
export default {
|
|
||||||
props: {
|
|
||||||
flat: Object,
|
|
||||||
showNotationColumn: Boolean,
|
|
||||||
showNotes: Boolean
|
|
||||||
},
|
|
||||||
|
|
||||||
components: {
|
|
||||||
Notation
|
|
||||||
},
|
|
||||||
|
|
||||||
computed: {
|
|
||||||
capitalizedStatus () {
|
|
||||||
return capitalize(this.$t('status.followed'))
|
|
||||||
},
|
|
||||||
photo () {
|
|
||||||
if (this.flat.photos && this.flat.photos.length > 0) {
|
|
||||||
if (this.flat.photos[0].local) {
|
|
||||||
return `/data/img/${this.flat.photos[0].local}`
|
|
||||||
}
|
|
||||||
return this.flat.photos[0].url
|
|
||||||
}
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
methods: {
|
|
||||||
updateFlatStatus (id, status) {
|
|
||||||
this.$store.dispatch('updateFlatStatus', { flatId: id, newStatus: status })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
.expired {
|
|
||||||
font-weight: bold;
|
|
||||||
text-transform: uppercase;
|
|
||||||
}
|
|
||||||
</style>
|
|
@ -1,68 +0,0 @@
|
|||||||
<template>
|
|
||||||
<div>
|
|
||||||
<template v-for="n in range(5)">
|
|
||||||
<button v-bind:key="n" v-on:mouseover="handleHover(n)" v-on:mouseout="handleOut()" v-on:click="updateNotation(n)">
|
|
||||||
<i class="fa" v-bind:class="{'fa-star': n < notation, 'fa-star-o': n >= notation}" aria-hidden="true"></i>
|
|
||||||
</button>
|
|
||||||
</template>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script>
|
|
||||||
|
|
||||||
import { range } from '../tools'
|
|
||||||
import 'flatpickr/dist/flatpickr.css'
|
|
||||||
|
|
||||||
export default {
|
|
||||||
data () {
|
|
||||||
return {
|
|
||||||
'overloadNotation': null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
props: ['flat'],
|
|
||||||
|
|
||||||
computed: {
|
|
||||||
notation () {
|
|
||||||
if (this.overloadNotation) {
|
|
||||||
return this.overloadNotation
|
|
||||||
}
|
|
||||||
return this.flat.notation
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
methods: {
|
|
||||||
updateNotation (notation) {
|
|
||||||
notation = notation + 1
|
|
||||||
|
|
||||||
if (notation === this.flat.notation) {
|
|
||||||
this.flat.notation = 0
|
|
||||||
this.$store.dispatch('updateFlatNotation', { flatId: this.flat.id, newNotation: 0 })
|
|
||||||
this.$store.dispatch('updateFlatStatus', { flatId: this.flat.id, newStatus: 'new' })
|
|
||||||
} else {
|
|
||||||
this.flat.notation = notation
|
|
||||||
this.$store.dispatch('updateFlatNotation', { flatId: this.flat.id, newNotation: notation })
|
|
||||||
this.$store.dispatch('updateFlatStatus', { flatId: this.flat.id, newStatus: 'followed' })
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
handleHover (n) {
|
|
||||||
this.overloadNotation = n + 1
|
|
||||||
},
|
|
||||||
|
|
||||||
handleOut () {
|
|
||||||
this.overloadNotation = null
|
|
||||||
},
|
|
||||||
|
|
||||||
range: range
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
button {
|
|
||||||
border: none;
|
|
||||||
width: auto;
|
|
||||||
background-color: transparent;
|
|
||||||
}
|
|
||||||
</style>
|
|
@ -1,15 +1,15 @@
|
|||||||
<template>
|
<template>
|
||||||
<div @keydown="closeModal">
|
<div @keydown="closeModal">
|
||||||
<isotope ref="cpt" :options="isotopeOptions" v-images-loaded:on.progress="layout" :list="photos">
|
<isotope ref="cpt" :options="isotopeOptions" v-images-loaded:on.progress="layout" :list="photos">
|
||||||
<div v-for="(photo, index) in photosURLOrLocal" :key="photo">
|
<div v-for="(photo, index) in photos" :key="photo.url">
|
||||||
<img :src="photo" v-on:click="openModal(index)"/>
|
<img :src="photo.url" v-on:click="openModal(index)"/>
|
||||||
</div>
|
</div>
|
||||||
</isotope>
|
</isotope>
|
||||||
|
|
||||||
<div class="modal" ref="modal" :aria-label="$t('slider.Fullscreen_photo')" role="dialog">
|
<div class="modal" ref="modal" :aria-label="$t('slider.Fullscreen_photo')" role="dialog">
|
||||||
<span class="close"><button v-on:click="closeModal" :title="$t('common.Close')" :aria-label="$t('common.Close')">×</button></span>
|
<span class="close"><button v-on:click="closeModal" :title="$t('common.Close')" :aria-label="$t('common.Close')">×</button></span>
|
||||||
|
|
||||||
<img class="modal-content" :src="photosURLOrLocal[modalImgIndex]">
|
<img class="modal-content" :src="photos[modalImgIndex].url">
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</template>
|
</template>
|
||||||
@ -27,17 +27,6 @@ export default {
|
|||||||
isotope
|
isotope
|
||||||
},
|
},
|
||||||
|
|
||||||
computed: {
|
|
||||||
photosURLOrLocal () {
|
|
||||||
return this.photos.map(photo => {
|
|
||||||
if (photo.local) {
|
|
||||||
return `/data/img/${photo.local}`
|
|
||||||
}
|
|
||||||
return photo.url
|
|
||||||
})
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
created () {
|
created () {
|
||||||
window.addEventListener('keydown', event => {
|
window.addEventListener('keydown', event => {
|
||||||
if (!this.isModalOpen) {
|
if (!this.isModalOpen) {
|
||||||
@ -125,8 +114,8 @@ export default {
|
|||||||
.modal-content {
|
.modal-content {
|
||||||
margin: auto;
|
margin: auto;
|
||||||
display: block;
|
display: block;
|
||||||
max-height: 80%;
|
height: 80%;
|
||||||
max-width: 100%;
|
max-width: 700px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.close {
|
.close {
|
||||||
|
@ -13,14 +13,10 @@ export default {
|
|||||||
'Close': 'Close',
|
'Close': 'Close',
|
||||||
'sortUp': 'Sort in ascending order',
|
'sortUp': 'Sort in ascending order',
|
||||||
'sortDown': 'Sort in descending order',
|
'sortDown': 'Sort in descending order',
|
||||||
'mins': 'min | mins',
|
'mins': 'min | mins'
|
||||||
'Unknown': 'Unknown',
|
|
||||||
'expired': 'expired'
|
|
||||||
},
|
},
|
||||||
home: {
|
home: {
|
||||||
'new_available_flats': 'New available flats',
|
'new_available_flats': 'New available flats'
|
||||||
'Last_update': 'Last update:',
|
|
||||||
'show_expired_flats': 'Show expired flats'
|
|
||||||
},
|
},
|
||||||
flatListing: {
|
flatListing: {
|
||||||
'no_available_flats': 'No available flats.',
|
'no_available_flats': 'No available flats.',
|
||||||
@ -38,25 +34,20 @@ export default {
|
|||||||
'Area': 'Area',
|
'Area': 'Area',
|
||||||
'Rooms': 'Rooms',
|
'Rooms': 'Rooms',
|
||||||
'Cost': 'Cost',
|
'Cost': 'Cost',
|
||||||
'SquareMeterCost': 'Cost / m²',
|
|
||||||
'utilities_included': '(utilities included)',
|
'utilities_included': '(utilities included)',
|
||||||
'utilities_excluded': '(utilities excluded)',
|
'utilities_excluded': '(utilities excluded)',
|
||||||
'Description': 'Description',
|
'Description': 'Description',
|
||||||
'First_posted': 'First posted',
|
|
||||||
'Details': 'Details',
|
'Details': 'Details',
|
||||||
'Metadata': 'Metadata',
|
'Metadata': 'Metadata',
|
||||||
'postal_code': 'Postal code',
|
'postal_code': 'Postal code',
|
||||||
'nearby_stations': 'Nearby stations',
|
'nearby_stations': 'Nearby stations',
|
||||||
'Times_to': 'Times to',
|
'Times_to': 'Times to',
|
||||||
'Location': 'Location',
|
'Location': 'Location',
|
||||||
'Notes': 'Notes',
|
|
||||||
'Save': 'Save',
|
|
||||||
'Contact': 'Contact',
|
'Contact': 'Contact',
|
||||||
'Visit': 'Visit',
|
|
||||||
'setDateOfVisit': 'Set date of visit',
|
|
||||||
'no_phone_found': 'No phone found',
|
'no_phone_found': 'No phone found',
|
||||||
'rooms': 'room | rooms',
|
'rooms': 'room | rooms',
|
||||||
'bedrooms': 'bedroom | bedrooms'
|
'bedrooms': 'bedroom | bedrooms',
|
||||||
|
'RM': 'RM'
|
||||||
},
|
},
|
||||||
status: {
|
status: {
|
||||||
'new': 'new',
|
'new': 'new',
|
||||||
|
@ -1,75 +0,0 @@
|
|||||||
export default {
|
|
||||||
common: {
|
|
||||||
'flats': 'appartement | appartements',
|
|
||||||
'loading': 'Chargement…',
|
|
||||||
'Actions': 'Actions',
|
|
||||||
'More_about': 'Plus sur',
|
|
||||||
'Remove': 'Enlever',
|
|
||||||
'Restore': 'Remettre',
|
|
||||||
'Original_post': 'Annonce originale | Annonces originales',
|
|
||||||
'Original_post_for': 'Annonce originale pour',
|
|
||||||
'Follow': 'Suivre',
|
|
||||||
'Unfollow': 'Arrêter de suivre',
|
|
||||||
'Close': 'Fermer',
|
|
||||||
'sortUp': 'Trier par ordre croissant',
|
|
||||||
'sortDown': 'Trier par ordre décroissant',
|
|
||||||
'mins': 'min | mins',
|
|
||||||
'Unknown': 'Inconnu',
|
|
||||||
'expired': 'expiré'
|
|
||||||
},
|
|
||||||
home: {
|
|
||||||
'new_available_flats': 'Nouveaux appartements disponibles',
|
|
||||||
'Last_update': 'Dernière mise à jour :',
|
|
||||||
'show_expired_flats': 'Montrer les annonces expirées'
|
|
||||||
},
|
|
||||||
flatListing: {
|
|
||||||
'no_available_flats': 'Pas d\'appartement disponible.',
|
|
||||||
'no_matching_flats': 'Pas d\'appartement correspondant.'
|
|
||||||
},
|
|
||||||
menu: {
|
|
||||||
'available_flats': 'Appartements disponibles',
|
|
||||||
'followed_flats': 'Appartements suivis',
|
|
||||||
'by_status': 'Appartements par statut',
|
|
||||||
'search': 'Rechercher'
|
|
||||||
},
|
|
||||||
flatsDetails: {
|
|
||||||
'Notation': 'Note',
|
|
||||||
'Title': 'Titre',
|
|
||||||
'Area': 'Surface',
|
|
||||||
'Rooms': 'Pièces',
|
|
||||||
'Cost': 'Coût',
|
|
||||||
'SquareMeterCost': 'Coût / m²',
|
|
||||||
'utilities_included': '(charges comprises)',
|
|
||||||
'utilities_excluded': '(charges non comprises)',
|
|
||||||
'Description': 'Description',
|
|
||||||
'First_posted': 'Posté pour la première fois',
|
|
||||||
'Details': 'Détails',
|
|
||||||
'Metadata': 'Metadonnées',
|
|
||||||
'postal_code': 'Code postal',
|
|
||||||
'nearby_stations': 'Stations proches',
|
|
||||||
'Times_to': 'Temps jusqu\'à',
|
|
||||||
'Location': 'Localisation',
|
|
||||||
'Notes': 'Notes',
|
|
||||||
'Save': 'Sauvegarder',
|
|
||||||
'Contact': 'Contact',
|
|
||||||
'Visit': 'Visite',
|
|
||||||
'setDateOfVisit': 'Entrer une date de visite',
|
|
||||||
'no_phone_found': 'Pas de numéro de téléphone trouvé',
|
|
||||||
'rooms': 'pièce | pièces',
|
|
||||||
'bedrooms': 'chambre | chambres'
|
|
||||||
},
|
|
||||||
status: {
|
|
||||||
'new': 'nouveau',
|
|
||||||
'followed': 'suivi',
|
|
||||||
'ignored': 'ignoré',
|
|
||||||
'user_deleted': 'effacé',
|
|
||||||
'duplicate': 'en double'
|
|
||||||
},
|
|
||||||
slider: {
|
|
||||||
'Fullscreen_photo': 'Photo en plein écran'
|
|
||||||
},
|
|
||||||
search: {
|
|
||||||
'input_placeholder': 'Tapez n\'importe quoi à rechercher…',
|
|
||||||
'Search': 'Chercher !'
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,10 +1,8 @@
|
|||||||
import Vue from 'vue'
|
import Vue from 'vue'
|
||||||
import VueI18n from 'vue-i18n'
|
import VueI18n from 'vue-i18n'
|
||||||
import moment from 'moment'
|
|
||||||
|
|
||||||
// Import translations
|
// Import translations
|
||||||
import en from './en'
|
import en from './en'
|
||||||
import fr from './fr'
|
|
||||||
|
|
||||||
Vue.use(VueI18n)
|
Vue.use(VueI18n)
|
||||||
|
|
||||||
@ -34,8 +32,7 @@ export function getBrowserLocales () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const messages = {
|
const messages = {
|
||||||
'en': en,
|
'en': en
|
||||||
'fr': fr
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const locales = getBrowserLocales()
|
const locales = getBrowserLocales()
|
||||||
@ -49,9 +46,6 @@ for (var i = 0; i < locales.length; ++i) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set the locale for Moment.js
|
|
||||||
moment.locale(locale)
|
|
||||||
|
|
||||||
export default new VueI18n({
|
export default new VueI18n({
|
||||||
locale: locale,
|
locale: locale,
|
||||||
messages
|
messages
|
||||||
|
@ -3,12 +3,9 @@ import Vue from 'vue'
|
|||||||
import i18n from './i18n'
|
import i18n from './i18n'
|
||||||
import router from './router'
|
import router from './router'
|
||||||
import store from './store'
|
import store from './store'
|
||||||
import { costFilter } from './tools'
|
|
||||||
|
|
||||||
import App from './components/app.vue'
|
import App from './components/app.vue'
|
||||||
|
|
||||||
Vue.filter('cost', costFilter)
|
|
||||||
|
|
||||||
new Vue({
|
new Vue({
|
||||||
i18n,
|
i18n,
|
||||||
router,
|
router,
|
||||||
|
@ -39,22 +39,10 @@ export default {
|
|||||||
commit(types.UPDATE_FLAT_NOTES, { flatId, newNotes })
|
commit(types.UPDATE_FLAT_NOTES, { flatId, newNotes })
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
updateFlatVisitDate ({ commit }, { flatId, newVisitDate }) {
|
|
||||||
commit(types.IS_LOADING)
|
|
||||||
api.updateFlatVisitDate(flatId, newVisitDate, response => {
|
|
||||||
commit(types.UPDATE_FLAT_VISIT_DATE, { flatId, newVisitDate })
|
|
||||||
})
|
|
||||||
},
|
|
||||||
doSearch ({ commit }, { query }) {
|
doSearch ({ commit }, { query }) {
|
||||||
commit(types.IS_LOADING)
|
commit(types.IS_LOADING)
|
||||||
api.doSearch(query, flats => {
|
api.doSearch(query, flats => {
|
||||||
commit(types.REPLACE_FLATS, { flats })
|
commit(types.REPLACE_FLATS, { flats })
|
||||||
})
|
})
|
||||||
},
|
|
||||||
getMetadata ({ commit }) {
|
|
||||||
commit(types.IS_LOADING)
|
|
||||||
api.getMetadata(metadata => {
|
|
||||||
commit(types.RECEIVE_METADATA, { metadata })
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,88 +1,58 @@
|
|||||||
import { findFlatGPS, costFilter } from '../tools'
|
import { findFlatGPS } from '../tools'
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
allFlats: (state) => state.flats,
|
allFlats: state => state.flats,
|
||||||
|
|
||||||
flat: (state, getters) => (id) =>
|
flat: (state, getters) => id => state.flats.find(flat => flat.id === id),
|
||||||
state.flats.find((flat) => flat.id === id),
|
|
||||||
|
|
||||||
isLoading: (state) => state.loading > 0,
|
isLoading: state => state.loading > 0,
|
||||||
|
|
||||||
inseeCodesFlatsBuckets: (state, getters) => (filter) => {
|
postalCodesFlatsBuckets: (state, getters) => filter => {
|
||||||
const buckets = {}
|
const postalCodeBuckets = {}
|
||||||
|
|
||||||
state.flats.forEach((flat) => {
|
state.flats.forEach(flat => {
|
||||||
if (!filter || filter(flat)) {
|
if (!filter || filter(flat)) {
|
||||||
const insee = flat.flatisfy_postal_code.insee_code
|
const postalCode = flat.flatisfy_postal_code.postal_code
|
||||||
if (!buckets[insee]) {
|
if (!postalCodeBuckets[postalCode]) {
|
||||||
buckets[insee] = {
|
postalCodeBuckets[postalCode] = {
|
||||||
name: flat.flatisfy_postal_code.name,
|
'name': flat.flatisfy_postal_code.name,
|
||||||
flats: []
|
'flats': []
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
buckets[insee].flats.push(flat)
|
postalCodeBuckets[postalCode].flats.push(flat)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
return buckets
|
return postalCodeBuckets
|
||||||
},
|
},
|
||||||
|
|
||||||
flatsMarkers: (state, getters) => (router, filter) => {
|
flatsMarkers: (state, getters) => (router, filter) => {
|
||||||
const markers = []
|
const markers = []
|
||||||
state.flats.forEach((flat) => {
|
state.flats.forEach(flat => {
|
||||||
if (filter && filter(flat)) {
|
if (filter && filter(flat)) {
|
||||||
const gps = findFlatGPS(flat)
|
const gps = findFlatGPS(flat)
|
||||||
|
|
||||||
if (gps) {
|
if (gps) {
|
||||||
const previousMarker = markers.find(
|
const previousMarkerIndex = markers.findIndex(
|
||||||
(marker) =>
|
marker => marker.gps[0] === gps[0] && marker.gps[1] === gps[1]
|
||||||
marker.gps[0] === gps[0] && marker.gps[1] === gps[1]
|
|
||||||
)
|
)
|
||||||
if (previousMarker) {
|
|
||||||
// randomize position a bit
|
const href = router.resolve({ name: 'details', params: { id: flat.id }}).href
|
||||||
// gps[0] += (Math.random() - 0.5) / 500
|
if (previousMarkerIndex !== -1) {
|
||||||
// gps[1] += (Math.random() - 0.5) / 500
|
markers[previousMarkerIndex].content += '<br/><a href="' + href + '">' + flat.title + '</a>'
|
||||||
}
|
} else {
|
||||||
const href = router.resolve({
|
|
||||||
name: 'details',
|
|
||||||
params: { id: flat.id }
|
|
||||||
}).href
|
|
||||||
const cost = flat.cost
|
|
||||||
? costFilter(flat.cost, flat.currency)
|
|
||||||
: ''
|
|
||||||
markers.push({
|
markers.push({
|
||||||
title: '',
|
'title': '',
|
||||||
content:
|
'content': '<a href="' + href + '">' + flat.title + '</a>',
|
||||||
'<a href="' +
|
'gps': gps
|
||||||
href +
|
|
||||||
'">' +
|
|
||||||
flat.title +
|
|
||||||
'</a>' +
|
|
||||||
cost,
|
|
||||||
gps: gps,
|
|
||||||
flatId: flat.id
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
return markers
|
return markers
|
||||||
},
|
},
|
||||||
|
|
||||||
allTimeToPlaces: (state) => {
|
allTimeToPlaces: state => state.timeToPlaces
|
||||||
const places = {}
|
|
||||||
Object.keys(state.timeToPlaces).forEach((constraint) => {
|
|
||||||
const constraintTimeToPlaces = state.timeToPlaces[constraint]
|
|
||||||
Object.keys(constraintTimeToPlaces).forEach((name) => {
|
|
||||||
places[name] = constraintTimeToPlaces[name]
|
|
||||||
})
|
|
||||||
})
|
|
||||||
return places
|
|
||||||
},
|
|
||||||
|
|
||||||
timeToPlaces: (state, getters) => (constraintName) => {
|
|
||||||
return state.timeToPlaces[constraintName]
|
|
||||||
},
|
|
||||||
|
|
||||||
metadata: (state) => state.metadata
|
|
||||||
}
|
}
|
||||||
|