2017-04-03 17:29:29 +02:00
|
|
|
# coding: utf-8
|
|
|
|
"""
|
|
|
|
This module handles the configuration management for Flatisfy.
|
|
|
|
|
|
|
|
It loads the default configuration, then overloads it with the provided config
|
|
|
|
file and then overloads it with command-line options.
|
|
|
|
"""
|
|
|
|
from __future__ import absolute_import, print_function, unicode_literals
|
|
|
|
from builtins import str
|
|
|
|
|
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import traceback
|
|
|
|
|
|
|
|
import appdirs
|
2021-04-08 20:08:23 +02:00
|
|
|
from woob.capabilities.housing import POSTS_TYPES, HOUSE_TYPES
|
2017-04-03 17:29:29 +02:00
|
|
|
|
2017-09-24 22:51:12 +02:00
|
|
|
from flatisfy import data
|
2017-04-03 17:29:29 +02:00
|
|
|
from flatisfy import tools
|
2018-09-07 19:28:15 +02:00
|
|
|
from flatisfy.constants import TimeToModes
|
2017-09-24 22:51:12 +02:00
|
|
|
from flatisfy.models.postal_code import PostalCode
|
2017-04-03 17:29:29 +02:00
|
|
|
|
|
|
|
|
2021-04-08 20:08:23 +02:00
|
|
|
DIRPATH = os.path.dirname(os.path.realpath(__file__))
|
|
|
|
|
|
|
|
|
2017-04-03 17:29:29 +02:00
|
|
|
# Default configuration
|
|
|
|
DEFAULT_CONFIG = {
|
|
|
|
# Constraints to match
|
|
|
|
"constraints": {
|
2017-06-16 16:21:13 +02:00
|
|
|
"default": {
|
|
|
|
"type": None, # RENT, SALE, SHARING
|
|
|
|
"house_types": [], # List of house types, must be in APART, HOUSE,
|
2021-01-26 14:39:52 +01:00
|
|
|
# PARKING, LAND, OTHER or UNKNOWN
|
2017-06-16 16:21:13 +02:00
|
|
|
"postal_codes": [], # List of postal codes
|
2021-01-29 12:03:50 +01:00
|
|
|
"insees": [], # List of postal codes
|
2017-06-16 16:21:13 +02:00
|
|
|
"area": (None, None), # (min, max) in m^2
|
|
|
|
"cost": (None, None), # (min, max) in currency unit
|
|
|
|
"rooms": (None, None), # (min, max)
|
|
|
|
"bedrooms": (None, None), # (min, max)
|
2017-10-29 20:03:39 +01:00
|
|
|
"minimum_nb_photos": None, # min number of photos
|
2021-03-26 23:36:36 +01:00
|
|
|
"description_should_contain": [], # list of terms (str) or list
|
|
|
|
# (acting as an or)
|
2021-01-26 14:43:15 +01:00
|
|
|
"description_should_not_contain": [
|
2020-01-05 12:17:01 +01:00
|
|
|
"vendu",
|
|
|
|
"Vendu",
|
|
|
|
"VENDU",
|
2021-01-26 14:39:52 +01:00
|
|
|
"recherche",
|
2020-01-05 12:17:01 +01:00
|
|
|
],
|
2017-06-16 16:21:13 +02:00
|
|
|
"time_to": {} # Dict mapping names to {"gps": [lat, lng],
|
2021-01-26 14:39:52 +01:00
|
|
|
# "time": (min, max),
|
|
|
|
# "mode": Valid mode }
|
|
|
|
# Time is in seconds
|
2017-06-16 16:21:13 +02:00
|
|
|
}
|
2017-04-03 17:29:29 +02:00
|
|
|
},
|
2017-11-17 16:14:11 +01:00
|
|
|
# Whether or not to store personal data from housing posts (phone number
|
|
|
|
# etc)
|
|
|
|
"store_personal_data": False,
|
2017-12-29 22:58:05 +01:00
|
|
|
# Max distance between an housing and a found station, to avoid
|
|
|
|
# false-positive
|
|
|
|
"max_distance_housing_station": 1500,
|
2017-12-30 19:30:32 +01:00
|
|
|
# Score to consider two flats as being duplicates
|
|
|
|
"duplicate_threshold": 15,
|
2018-01-21 11:52:52 +01:00
|
|
|
# Score to consider two images as being duplicates through hash comparison
|
|
|
|
"duplicate_image_hash_threshold": 10,
|
2018-01-22 01:06:09 +01:00
|
|
|
# Whether images should be downloaded and served locally
|
|
|
|
"serve_images_locally": True,
|
2017-04-03 17:29:29 +02:00
|
|
|
# Navitia API key
|
|
|
|
"navitia_api_key": None,
|
2018-09-07 19:28:15 +02:00
|
|
|
# Mapbox API key
|
|
|
|
"mapbox_api_key": None,
|
2017-04-03 17:29:29 +02:00
|
|
|
# Number of filtering passes to run
|
2017-05-04 20:52:10 +02:00
|
|
|
"passes": 3,
|
2017-04-03 17:29:29 +02:00
|
|
|
# Maximum number of entries to fetch
|
|
|
|
"max_entries": None,
|
|
|
|
# Directory in wich data will be put. ``None`` is XDG default location.
|
|
|
|
"data_directory": None,
|
2021-04-08 20:08:23 +02:00
|
|
|
# Path to the modules directory containing all Woob modules.
|
|
|
|
"modules_path": os.path.join(DIRPATH, '..', 'modules'),
|
2017-04-03 17:29:29 +02:00
|
|
|
# SQLAlchemy URI to the database to use
|
|
|
|
"database": None,
|
2017-05-02 18:35:34 +02:00
|
|
|
# Path to the Whoosh search index file. Use ``None`` to put it in
|
|
|
|
# ``data_directory``.
|
|
|
|
"search_index": None,
|
2017-04-03 17:29:29 +02:00
|
|
|
# Web app port
|
|
|
|
"port": 8080,
|
|
|
|
# Web app host to listen on
|
2017-04-13 23:24:31 +02:00
|
|
|
"host": "127.0.0.1",
|
|
|
|
# Web server to use to serve the webapp (see Bottle deployment doc)
|
2017-04-27 16:37:39 +02:00
|
|
|
"webserver": None,
|
2021-04-08 20:08:23 +02:00
|
|
|
# List of Woob backends to use (default to any backend available)
|
2017-05-02 18:35:34 +02:00
|
|
|
"backends": None,
|
2017-06-13 18:19:16 +02:00
|
|
|
# Should email notifications be sent?
|
|
|
|
"send_email": False,
|
2021-01-26 14:39:52 +01:00
|
|
|
"smtp_server": "localhost",
|
2017-06-13 18:19:16 +02:00
|
|
|
"smtp_port": 25,
|
2019-01-25 18:41:16 +01:00
|
|
|
"smtp_username": None,
|
|
|
|
"smtp_password": None,
|
2017-06-13 18:19:16 +02:00
|
|
|
"smtp_from": "noreply@flatisfy.org",
|
|
|
|
"smtp_to": [],
|
2021-01-26 14:42:53 +01:00
|
|
|
"notification_lang": "en",
|
2017-06-13 18:19:16 +02:00
|
|
|
# The web site url, to be used in email notifications. (doesn't matter
|
|
|
|
# whether the trailing slash is present or not)
|
2021-01-16 11:19:24 +01:00
|
|
|
"website_url": "http://127.0.0.1:8080",
|
|
|
|
"ignore_station": False,
|
2017-04-03 17:29:29 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
LOGGER = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2017-10-19 15:30:03 -04:00
|
|
|
def validate_config(config, check_with_data):
|
2017-04-03 17:29:29 +02:00
|
|
|
"""
|
|
|
|
Check that the config passed as argument is a valid configuration.
|
|
|
|
|
|
|
|
:param config: A config dictionary to fetch.
|
2017-10-19 15:30:03 -04:00
|
|
|
:param check_with_data: Whether we should use the available OpenData to
|
2017-12-05 14:56:08 +01:00
|
|
|
check the config values.
|
2017-04-03 17:29:29 +02:00
|
|
|
:return: ``True`` if the configuration is valid, ``False`` otherwise.
|
|
|
|
"""
|
2021-01-26 14:39:52 +01:00
|
|
|
|
2017-04-03 17:29:29 +02:00
|
|
|
def _check_constraints_bounds(bounds):
|
|
|
|
"""
|
|
|
|
Check the bounds for numeric constraints.
|
|
|
|
"""
|
2017-10-29 02:39:15 +02:00
|
|
|
assert isinstance(bounds, list)
|
2017-04-03 17:29:29 +02:00
|
|
|
assert len(bounds) == 2
|
2021-01-26 16:49:43 +01:00
|
|
|
assert all(x is None or (isinstance(x, (float, int)) and x >= 0) for x in bounds)
|
2017-04-03 17:29:29 +02:00
|
|
|
if bounds[0] is not None and bounds[1] is not None:
|
|
|
|
assert bounds[1] > bounds[0]
|
|
|
|
|
|
|
|
try:
|
|
|
|
# Note: The traceback fetching code only handle single line asserts.
|
|
|
|
# Then, we disable line-too-long pylint check and E501 flake8 checks
|
|
|
|
# and use long lines whenever needed, in order to have the full assert
|
|
|
|
# message in the log output.
|
2017-04-13 23:24:31 +02:00
|
|
|
# pylint: disable=locally-disabled,line-too-long
|
2017-06-16 16:21:13 +02:00
|
|
|
|
2017-09-24 22:51:12 +02:00
|
|
|
assert config["passes"] in [0, 1, 2, 3]
|
2021-01-26 14:39:52 +01:00
|
|
|
assert config["max_entries"] is None or (
|
|
|
|
isinstance(config["max_entries"], int) and config["max_entries"] > 0
|
|
|
|
) # noqa: E501
|
2017-09-24 22:51:12 +02:00
|
|
|
|
2021-01-26 16:49:43 +01:00
|
|
|
assert config["data_directory"] is None or isinstance(config["data_directory"], str) # noqa: E501
|
2017-09-24 22:51:12 +02:00
|
|
|
assert os.path.isdir(config["data_directory"])
|
|
|
|
assert isinstance(config["search_index"], str)
|
2021-01-26 16:49:43 +01:00
|
|
|
assert config["modules_path"] is None or isinstance(config["modules_path"], str) # noqa: E501
|
2017-09-24 22:51:12 +02:00
|
|
|
|
2021-01-26 16:49:43 +01:00
|
|
|
assert config["database"] is None or isinstance(config["database"], str) # noqa: E501
|
2017-09-24 22:51:12 +02:00
|
|
|
|
|
|
|
assert isinstance(config["port"], int)
|
|
|
|
assert isinstance(config["host"], str)
|
2021-01-26 16:49:43 +01:00
|
|
|
assert config["webserver"] is None or isinstance(config["webserver"], str) # noqa: E501
|
|
|
|
assert config["backends"] is None or isinstance(config["backends"], list) # noqa: E501
|
2017-09-24 22:51:12 +02:00
|
|
|
|
|
|
|
assert isinstance(config["send_email"], bool)
|
2021-01-26 16:49:43 +01:00
|
|
|
assert config["smtp_server"] is None or isinstance(config["smtp_server"], str) # noqa: E501
|
|
|
|
assert config["smtp_port"] is None or isinstance(config["smtp_port"], int) # noqa: E501
|
|
|
|
assert config["smtp_username"] is None or isinstance(config["smtp_username"], str) # noqa: E501
|
|
|
|
assert config["smtp_password"] is None or isinstance(config["smtp_password"], str) # noqa: E501
|
2017-09-24 22:51:12 +02:00
|
|
|
assert config["smtp_to"] is None or isinstance(config["smtp_to"], list)
|
2021-01-26 16:49:43 +01:00
|
|
|
assert config["notification_lang"] is None or isinstance(config["notification_lang"], str)
|
2017-09-24 22:51:12 +02:00
|
|
|
|
2017-11-17 16:14:11 +01:00
|
|
|
assert isinstance(config["store_personal_data"], bool)
|
2017-12-30 14:45:58 +01:00
|
|
|
assert isinstance(config["max_distance_housing_station"], (int, float))
|
2017-12-30 19:30:32 +01:00
|
|
|
assert isinstance(config["duplicate_threshold"], int)
|
2018-01-21 11:52:52 +01:00
|
|
|
assert isinstance(config["duplicate_image_hash_threshold"], int)
|
2017-11-17 16:14:11 +01:00
|
|
|
|
2018-09-07 19:28:15 +02:00
|
|
|
# API keys
|
2021-01-26 16:49:43 +01:00
|
|
|
assert config["navitia_api_key"] is None or isinstance(config["navitia_api_key"], str) # noqa: E501
|
|
|
|
assert config["mapbox_api_key"] is None or isinstance(config["mapbox_api_key"], str) # noqa: E501
|
2018-09-07 19:28:15 +02:00
|
|
|
|
2021-01-26 16:49:43 +01:00
|
|
|
assert config["ignore_station"] is None or isinstance(config["ignore_station"], bool) # noqa: E501
|
2021-01-16 11:19:24 +01:00
|
|
|
|
2017-06-16 16:21:13 +02:00
|
|
|
# Ensure constraints are ok
|
2017-06-20 13:37:22 +02:00
|
|
|
assert config["constraints"]
|
2017-06-16 16:21:13 +02:00
|
|
|
for constraint in config["constraints"].values():
|
|
|
|
assert "type" in constraint
|
|
|
|
assert isinstance(constraint["type"], str)
|
2018-06-24 21:04:35 +02:00
|
|
|
assert constraint["type"].upper() in POSTS_TYPES.__members__
|
2017-10-29 03:05:35 +01:00
|
|
|
|
2017-10-29 20:03:39 +01:00
|
|
|
assert "minimum_nb_photos" in constraint
|
|
|
|
if constraint["minimum_nb_photos"]:
|
|
|
|
assert isinstance(constraint["minimum_nb_photos"], int)
|
|
|
|
assert constraint["minimum_nb_photos"] >= 0
|
2017-06-16 16:21:13 +02:00
|
|
|
|
2017-10-29 20:16:33 +01:00
|
|
|
assert "description_should_contain" in constraint
|
|
|
|
assert isinstance(constraint["description_should_contain"], list)
|
|
|
|
if constraint["description_should_contain"]:
|
|
|
|
for term in constraint["description_should_contain"]:
|
2021-03-26 23:36:36 +01:00
|
|
|
try:
|
|
|
|
assert isinstance(term, str)
|
|
|
|
except AssertionError:
|
|
|
|
assert isinstance(term, list)
|
|
|
|
assert all(isinstance(x, str) for x in term)
|
2017-10-29 20:16:33 +01:00
|
|
|
|
2018-11-07 15:47:19 +01:00
|
|
|
assert "description_should_not_contain" in constraint
|
2021-01-26 14:39:52 +01:00
|
|
|
assert isinstance(constraint["description_should_not_contain"], list)
|
2018-11-07 15:47:19 +01:00
|
|
|
if constraint["description_should_not_contain"]:
|
|
|
|
for term in constraint["description_should_not_contain"]:
|
|
|
|
assert isinstance(term, str)
|
|
|
|
|
2017-06-16 16:21:13 +02:00
|
|
|
assert "house_types" in constraint
|
|
|
|
assert constraint["house_types"]
|
|
|
|
for house_type in constraint["house_types"]:
|
2018-06-24 21:04:35 +02:00
|
|
|
assert house_type.upper() in HOUSE_TYPES.__members__
|
2017-06-16 16:21:13 +02:00
|
|
|
|
|
|
|
assert "postal_codes" in constraint
|
|
|
|
assert constraint["postal_codes"]
|
2017-11-26 16:51:37 +01:00
|
|
|
assert all(isinstance(x, str) for x in constraint["postal_codes"])
|
2021-01-29 12:03:50 +01:00
|
|
|
if "insee_codes" in constraint:
|
|
|
|
assert constraint["insee_codes"]
|
|
|
|
assert all(isinstance(x, str) for x in constraint["insee_codes"])
|
|
|
|
|
2017-10-19 15:30:03 -04:00
|
|
|
if check_with_data:
|
2017-12-06 19:16:24 +01:00
|
|
|
# Ensure data is built into db
|
|
|
|
data.preprocess_data(config, force=False)
|
|
|
|
# Check postal codes
|
2021-01-29 12:03:50 +01:00
|
|
|
opendata = data.load_data(PostalCode, constraint, config)
|
|
|
|
opendata_postal_codes = [x.postal_code for x in opendata]
|
|
|
|
opendata_insee_codes = [x.insee_code for x in opendata]
|
2017-10-19 15:30:03 -04:00
|
|
|
for postal_code in constraint["postal_codes"]:
|
|
|
|
assert postal_code in opendata_postal_codes # noqa: E501
|
2021-01-29 12:03:50 +01:00
|
|
|
if "insee_codes" in constraint:
|
|
|
|
for insee in constraint["insee_codes"]:
|
|
|
|
assert insee in opendata_insee_codes # noqa: E501
|
2017-06-16 16:21:13 +02:00
|
|
|
|
|
|
|
assert "area" in constraint
|
|
|
|
_check_constraints_bounds(constraint["area"])
|
|
|
|
|
|
|
|
assert "cost" in constraint
|
|
|
|
_check_constraints_bounds(constraint["cost"])
|
|
|
|
|
|
|
|
assert "rooms" in constraint
|
|
|
|
_check_constraints_bounds(constraint["rooms"])
|
|
|
|
|
|
|
|
assert "bedrooms" in constraint
|
|
|
|
_check_constraints_bounds(constraint["bedrooms"])
|
|
|
|
|
|
|
|
assert "time_to" in constraint
|
|
|
|
assert isinstance(constraint["time_to"], dict)
|
|
|
|
for name, item in constraint["time_to"].items():
|
|
|
|
assert isinstance(name, str)
|
|
|
|
assert "gps" in item
|
|
|
|
assert isinstance(item["gps"], list)
|
|
|
|
assert len(item["gps"]) == 2
|
|
|
|
assert "time" in item
|
|
|
|
_check_constraints_bounds(item["time"])
|
2018-09-07 19:28:15 +02:00
|
|
|
if "mode" in item:
|
|
|
|
TimeToModes[item["mode"]]
|
2017-04-03 17:29:29 +02:00
|
|
|
|
|
|
|
return True
|
|
|
|
except (AssertionError, KeyError):
|
|
|
|
_, _, exc_traceback = sys.exc_info()
|
|
|
|
return traceback.extract_tb(exc_traceback)[-1][-1]
|
|
|
|
|
|
|
|
|
2017-10-19 15:30:03 -04:00
|
|
|
def load_config(args=None, check_with_data=True):
|
2017-04-03 17:29:29 +02:00
|
|
|
"""
|
|
|
|
Load the configuration from file.
|
|
|
|
|
|
|
|
:param args: An argparse args structure.
|
2017-10-19 15:30:03 -04:00
|
|
|
:param check_with_data: Whether we should use the available OpenData to
|
2017-12-05 14:56:08 +01:00
|
|
|
check the config values. Defaults to ``True``.
|
2017-04-03 17:29:29 +02:00
|
|
|
:return: The loaded config dict.
|
|
|
|
"""
|
|
|
|
LOGGER.info("Initializing configuration...")
|
|
|
|
# Default configuration
|
|
|
|
config_data = DEFAULT_CONFIG.copy()
|
|
|
|
|
|
|
|
# Load config from specified JSON
|
|
|
|
if args and getattr(args, "config", None):
|
|
|
|
LOGGER.debug("Loading configuration from %s.", args.config)
|
|
|
|
try:
|
|
|
|
with open(args.config, "r") as fh:
|
|
|
|
config_data.update(json.load(fh))
|
2017-04-13 23:24:31 +02:00
|
|
|
except (IOError, ValueError) as exc:
|
2017-04-03 17:29:29 +02:00
|
|
|
LOGGER.error(
|
2021-02-08 16:21:55 +01:00
|
|
|
"Unable to load configuration from file, using default configuration: %s.",
|
2021-01-26 14:39:52 +01:00
|
|
|
exc,
|
2017-04-03 17:29:29 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Overload config with arguments
|
|
|
|
if args and getattr(args, "passes", None) is not None:
|
2021-01-26 16:49:43 +01:00
|
|
|
LOGGER.debug("Overloading number of passes from CLI arguments: %d.", args.passes)
|
2017-04-03 17:29:29 +02:00
|
|
|
config_data["passes"] = args.passes
|
|
|
|
if args and getattr(args, "max_entries", None) is not None:
|
|
|
|
LOGGER.debug(
|
|
|
|
"Overloading maximum number of entries from CLI arguments: %d.",
|
2021-01-26 14:39:52 +01:00
|
|
|
args.max_entries,
|
2017-04-03 17:29:29 +02:00
|
|
|
)
|
|
|
|
config_data["max_entries"] = args.max_entries
|
|
|
|
if args and getattr(args, "port", None) is not None:
|
|
|
|
LOGGER.debug("Overloading web app port: %d.", args.port)
|
|
|
|
config_data["port"] = args.port
|
|
|
|
if args and getattr(args, "host", None) is not None:
|
|
|
|
LOGGER.debug("Overloading web app host: %s.", args.host)
|
|
|
|
config_data["host"] = str(args.host)
|
|
|
|
|
|
|
|
# Handle data_directory option
|
|
|
|
if args and getattr(args, "data_dir", None) is not None:
|
|
|
|
LOGGER.debug("Overloading data directory from CLI arguments.")
|
|
|
|
config_data["data_directory"] = args.data_dir
|
|
|
|
elif config_data["data_directory"] is None:
|
2021-01-26 14:39:52 +01:00
|
|
|
config_data["data_directory"] = appdirs.user_data_dir("flatisfy", "flatisfy")
|
2021-01-26 16:49:43 +01:00
|
|
|
LOGGER.debug("Using default XDG data directory: %s.", config_data["data_directory"])
|
2017-06-15 15:48:16 +02:00
|
|
|
|
|
|
|
if not os.path.isdir(config_data["data_directory"]):
|
2021-01-26 14:39:52 +01:00
|
|
|
LOGGER.info(
|
|
|
|
"Creating data directory according to config: %s",
|
|
|
|
config_data["data_directory"],
|
|
|
|
)
|
2018-01-18 14:00:13 +01:00
|
|
|
os.makedirs(config_data["data_directory"])
|
2018-01-22 01:06:09 +01:00
|
|
|
os.makedirs(os.path.join(config_data["data_directory"], "images"))
|
2017-04-03 17:29:29 +02:00
|
|
|
|
|
|
|
if config_data["database"] is None:
|
2021-01-26 16:49:43 +01:00
|
|
|
config_data["database"] = "sqlite:///" + os.path.join(config_data["data_directory"], "flatisfy.db")
|
2017-04-03 17:29:29 +02:00
|
|
|
|
2017-05-02 18:35:34 +02:00
|
|
|
if config_data["search_index"] is None:
|
2021-01-26 16:49:43 +01:00
|
|
|
config_data["search_index"] = os.path.join(config_data["data_directory"], "search_index")
|
2017-05-02 18:35:34 +02:00
|
|
|
|
2017-06-19 16:08:06 +02:00
|
|
|
# Handle constraints filtering
|
|
|
|
if args and getattr(args, "constraints", None) is not None:
|
|
|
|
LOGGER.info(
|
2021-01-26 14:39:52 +01:00
|
|
|
(
|
|
|
|
"Filtering constraints from config according to CLI argument. "
|
|
|
|
"Using only the following constraints: %s."
|
|
|
|
),
|
|
|
|
args.constraints.replace(",", ", "),
|
2017-06-19 16:08:06 +02:00
|
|
|
)
|
|
|
|
constraints_filter = args.constraints.split(",")
|
2021-01-26 16:49:43 +01:00
|
|
|
config_data["constraints"] = {k: v for k, v in config_data["constraints"].items() if k in constraints_filter}
|
2017-06-19 16:08:06 +02:00
|
|
|
|
2017-06-13 18:19:16 +02:00
|
|
|
# Sanitize website url
|
|
|
|
if config_data["website_url"] is not None:
|
2021-01-26 14:39:52 +01:00
|
|
|
if config_data["website_url"][-1] != "/":
|
|
|
|
config_data["website_url"] += "/"
|
2017-06-13 18:19:16 +02:00
|
|
|
|
2017-10-19 15:30:03 -04:00
|
|
|
config_validation = validate_config(config_data, check_with_data)
|
2017-04-03 17:29:29 +02:00
|
|
|
if config_validation is True:
|
|
|
|
LOGGER.info("Config has been fully initialized.")
|
|
|
|
return config_data
|
2017-04-13 23:24:31 +02:00
|
|
|
LOGGER.error("Error in configuration: %s.", config_validation)
|
|
|
|
return None
|
2017-04-03 17:29:29 +02:00
|
|
|
|
|
|
|
|
|
|
|
def init_config(output=None):
|
|
|
|
"""
|
|
|
|
Initialize an empty configuration file.
|
|
|
|
|
|
|
|
:param output: File to output content to. Defaults to ``stdin``.
|
|
|
|
"""
|
|
|
|
config_data = DEFAULT_CONFIG.copy()
|
|
|
|
|
|
|
|
if output and output != "-":
|
|
|
|
with open(output, "w") as fh:
|
|
|
|
fh.write(tools.pretty_json(config_data))
|
|
|
|
else:
|
|
|
|
print(tools.pretty_json(config_data))
|