Compare commits
2 Commits
Author | SHA1 | Date | |
---|---|---|---|
d6b75cacdd | |||
691e752081 |
30
LICENSE
30
LICENSE
@ -1,9 +1,21 @@
|
||||
* --------------------------------------------------------------------------------
|
||||
* "THE NO-ALCOHOL BEER-WARE LICENSE" (Revision 42):
|
||||
* Phyks (webmaster@phyks.me) wrote this file. As long as you retain this notice you
|
||||
* can do whatever you want with this stuff (and you can also do whatever you want
|
||||
* with this stuff without retaining it, but that's not cool...). If we meet some
|
||||
* day, and you think this stuff is worth it, you can buy me a <del>beer</del> soda
|
||||
* in return.
|
||||
* Phyks
|
||||
* ---------------------------------------------------------------------------------
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Phyks (Lucas Verney)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
275
backend/commands.py
Normal file
275
backend/commands.py
Normal file
@ -0,0 +1,275 @@
|
||||
import libbmc
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
from backend import config
|
||||
from backend import tools
|
||||
from libbmc import bibtex
|
||||
from libbmc import fetcher
|
||||
from libbmc.repositories import arxiv
|
||||
from libbmc.papers import identifiers
|
||||
from libbmc.papers import tearpages
|
||||
|
||||
|
||||
def get_entry_from_index(item, file_or_id=None):
|
||||
"""
|
||||
Fetch an entry from the global index.
|
||||
|
||||
:param item: An identifier or filename.
|
||||
:param file_or_id: Whether it is a file or an entry identifier. If \
|
||||
``None``, will try to match both.
|
||||
:returns: TODO.
|
||||
"""
|
||||
entry = None
|
||||
# If explictly an identifier
|
||||
if file_or_id == "id":
|
||||
entry = bibtex.get_entry(config.get("index"), item)
|
||||
# If explicitely a filename
|
||||
elif file_or_id == "file":
|
||||
entry = bibtex.get_entry_by_filter(config.get("index"),
|
||||
lambda x: x.file == item) # TODO
|
||||
# Else, get by id or file
|
||||
else:
|
||||
entry = bibtex.get_entry_by_filter(config.get("index"),
|
||||
lambda x: (
|
||||
x.id == item or
|
||||
x.file == item)) # TODO
|
||||
return entry
|
||||
|
||||
|
||||
def download(url, manual, autoconfirm, tag):
|
||||
"""
|
||||
Download a given URL and add it to the library.
|
||||
|
||||
:param url: URL to download.
|
||||
:param manual: Whether BibTeX should be fetched automatically.
|
||||
:param autoconfirm: Whether import should be made silent or not.
|
||||
:param tag: A tag for this file.
|
||||
:returns: The name of the downloaded file once imported, \
|
||||
or ``None`` in case of error.
|
||||
"""
|
||||
# Download the paper
|
||||
print("Downloading %s…" % (url,))
|
||||
dl, contenttype = fetcher.download(url)
|
||||
|
||||
if dl is not None:
|
||||
print("Download finished.")
|
||||
|
||||
# Store it to a temporary file
|
||||
try:
|
||||
tmp = tempfile.NamedTemporaryFile(suffix='.%s' % (contenttype,))
|
||||
with open(tmp.name, 'wb+') as fh:
|
||||
fh.write(dl)
|
||||
|
||||
# And add it as a normal paper from now on
|
||||
new_name = import_file(tmp.name, manual,
|
||||
autoconfirm, tag)
|
||||
if new_name is None:
|
||||
return None
|
||||
else:
|
||||
return new_name
|
||||
finally:
|
||||
tmp.close()
|
||||
else:
|
||||
tools.warning("Could not fetch %s." % (url,))
|
||||
return None
|
||||
|
||||
|
||||
def import_file(src, manual=False, autoconfirm=False,
|
||||
tag='', rename=True):
|
||||
"""
|
||||
Add a file to the library.
|
||||
|
||||
:param src: The path of the file to import.
|
||||
:param manual: Whether BibTeX should be fetched automatically. \
|
||||
Default to ``False``.
|
||||
:param autoconfirm: Whether import should be made silent or not. \
|
||||
Default to ``False``.
|
||||
:param tag: A tag for this file. \
|
||||
Default to no tag.
|
||||
:param rename: Whether or not the file should be renamed according to the \
|
||||
mask in the config.
|
||||
:returns: The name of the imported file, or ``None`` in case of error.
|
||||
"""
|
||||
if not manual:
|
||||
type, identifier = identifiers.find_identifiers(src)
|
||||
|
||||
if type is None:
|
||||
tools.warning("Could not find an identifier for %s. \
|
||||
Switching to manual entry." % (src))
|
||||
# Fetch available identifiers types from libbmc
|
||||
# Append "manual" for manual entry of BibTeX and "skip" to skip the
|
||||
# file.
|
||||
available_types_list = (libbmc.__valid_identifiers__ +
|
||||
["manual", "skip"])
|
||||
available_types = " / ".joint(available_types_list)
|
||||
# Query for the type to use
|
||||
while type not in available_types_list:
|
||||
type = input("%s? " % (available_types)).lower()
|
||||
|
||||
if type == "skip":
|
||||
# If "skip" is chosen, skip this file
|
||||
return None
|
||||
elif type == "manual":
|
||||
identifier = None
|
||||
else:
|
||||
# Query for the identifier if required
|
||||
identifier = input("Value? ")
|
||||
else:
|
||||
print("%s found for %s: %s." % (type, src, identifier))
|
||||
|
||||
# Fetch BibTeX automatically if we have an identifier
|
||||
bibtex = None
|
||||
if identifier is not None:
|
||||
# If an identifier was provided, try to automatically fetch the bibtex
|
||||
bibtex = identifiers.get_bibtex((type, identifier))
|
||||
|
||||
# TODO: Check bibtex
|
||||
|
||||
# Handle tag
|
||||
if not autoconfirm:
|
||||
# If autoconfirm is not enabled, query for a tag
|
||||
user_tag = input("Tag for this paper [%s]? " % tag)
|
||||
if user_tag != "":
|
||||
tag = user_tag
|
||||
bibtex["tag"] = tag
|
||||
|
||||
# TODO: Handle renaming
|
||||
new_name = src
|
||||
if rename:
|
||||
pass
|
||||
|
||||
bibtex['file'] = os.path.abspath(new_name)
|
||||
|
||||
# Tear some pages if needed
|
||||
should_tear_pages = True
|
||||
if not autoconfirm:
|
||||
# Ask for confirmation
|
||||
pages_to_tear = tearpages.tearpage_needed(bibtex)
|
||||
user_tear_pages = input("Found some pages to tear: %s. \
|
||||
Confirm? [Y/n]" % (pages_to_tear)).lower()
|
||||
if user_tear_pages == "n":
|
||||
should_tear_pages = False
|
||||
if should_tear_pages:
|
||||
tearpages.tearpage(new_name, bibtex=bibtex)
|
||||
|
||||
# TODO: Append to global bibtex index
|
||||
|
||||
return new_name
|
||||
|
||||
|
||||
def delete(item, keep=False, file_or_id=None):
|
||||
"""
|
||||
Delete an entry in the main BibTeX file, and the associated documents.
|
||||
|
||||
:param item: An entry or filename to delete from the database.
|
||||
:param keep: Whether or not the document should be kept on the disk. \
|
||||
If True, will simply delete the entry in the main BibTeX index.
|
||||
:param file_or_id: Whether it is a file or an entry identifier. If \
|
||||
``None``, will try to match both.
|
||||
:returns: Nothing.
|
||||
"""
|
||||
entry = get_entry_from_index(item, file_or_id)
|
||||
|
||||
# Delete the entry from the bibtex index
|
||||
bibtex.delete(config.get("index"), entry.id) # TODO
|
||||
|
||||
# If file should not be kept
|
||||
if not keep:
|
||||
# Delete it
|
||||
os.unlink(entry.file) # TODO
|
||||
|
||||
|
||||
def edit(item, file_or_id):
|
||||
"""
|
||||
Edit an entry in the main BibTeX file.
|
||||
|
||||
:param item: An entry or filename to edit in the database.
|
||||
:param file_or_id: Whether it is a file or an entry identifier. If \
|
||||
``None``, will try to match both.
|
||||
:returns: Nothing.
|
||||
"""
|
||||
# TODO
|
||||
pass
|
||||
|
||||
|
||||
def list_entries():
|
||||
"""
|
||||
List all the available entries and their associated files.
|
||||
|
||||
:returns: A dict with entry identifiers as keys and associated files as \
|
||||
values.
|
||||
"""
|
||||
# Get the list of entries from the BibTeX index
|
||||
entries_list = bibtex.get(config.get("index"))
|
||||
return {entry.id: entry.file for entry in entries_list} # TODO
|
||||
|
||||
|
||||
def open(id):
|
||||
"""
|
||||
Open the file associated with the provided entry identifier.
|
||||
|
||||
:param id: An entry identifier in the main BibTeX file.
|
||||
:returns: ``False`` if an error occured. ``True`` otherwise.
|
||||
"""
|
||||
# Fetch the entry from the BibTeX index
|
||||
entry = bibtex.get_entry(config.get("index"), id)
|
||||
if entry is None:
|
||||
return False
|
||||
else:
|
||||
# Run xdg-open on the associated file to open it
|
||||
subprocess.Popen(['xdg-open', entry.filename]) # TODO
|
||||
return True
|
||||
|
||||
|
||||
def export(item, file_or_id=None):
|
||||
"""
|
||||
Export the BibTeX entries associated to some items.
|
||||
|
||||
:param item: An entry or filename to export as BibTeX.
|
||||
:param file_or_id: Whether it is a file or an entry identifier. If \
|
||||
``None``, will try to match both.
|
||||
:returns: TODO.
|
||||
"""
|
||||
# Fetch the entry from the BibTeX index
|
||||
entry = get_entry_from_index(item, file_or_id)
|
||||
if entry is not None:
|
||||
return bibtex.dict2BibTeX(entry) # TODO
|
||||
|
||||
|
||||
def resync():
|
||||
"""
|
||||
Compute the diff between the main BibTeX index and the files on the disk,
|
||||
and try to resync them.
|
||||
|
||||
:returns: Nothing.
|
||||
"""
|
||||
# TODO
|
||||
pass
|
||||
|
||||
|
||||
def update(item, file_or_id=None):
|
||||
"""
|
||||
Update an entry, trying to fetch a more recent version (on arXiv for \
|
||||
instance.)
|
||||
|
||||
:param item: An entry or filename to fetch update from.
|
||||
:param file_or_id: Whether it is a file or an entry identifier. If \
|
||||
``None``, will try to match both.
|
||||
:returns: TODO.
|
||||
"""
|
||||
entry = get_entry_from_index(item, file_or_id)
|
||||
# Fetch latest version
|
||||
latest_version = arxiv.get_latest_version(entry.eprint) # TODO
|
||||
if latest_version != entry.eprint: # TODO
|
||||
print("New version found for %s: %s" % (entry, latest_version))
|
||||
confirm = input("Download it? [Y/n] ")
|
||||
if confirm.lower() == 'n':
|
||||
return
|
||||
|
||||
# Download the updated version
|
||||
# TODO
|
||||
|
||||
# Delete previous version if needed
|
||||
# TODO
|
@ -1,12 +1,3 @@
|
||||
from __future__ import unicode_literals
|
||||
import os
|
||||
import errno
|
||||
import imp
|
||||
import inspect
|
||||
import json
|
||||
import sys
|
||||
import libbmc.tools as tools
|
||||
|
||||
# List of available options (in ~/.config/bmc/bmc.json file):
|
||||
# * folder : folder in which papers are stored
|
||||
# * proxies : list of proxies to use, e.g. ['', "socks5://localhost:4711"]
|
||||
@ -23,9 +14,17 @@ import libbmc.tools as tools
|
||||
# %v = arXiv version (e.g. '-v1') or nothing if not an arXiv paper
|
||||
|
||||
# You can add your custom masks to rename files by adding functions in
|
||||
# ~/.config/masks.py.
|
||||
# ~/.config/bmc/masks.py.
|
||||
# E.g. : def f(x): x.replace('test', 'some_expr')
|
||||
|
||||
import os
|
||||
import errno
|
||||
import imp
|
||||
import inspect
|
||||
import json
|
||||
import sys
|
||||
|
||||
from backend import tools
|
||||
|
||||
def make_sure_path_exists(path):
|
||||
try:
|
||||
@ -56,6 +55,7 @@ class Config():
|
||||
|
||||
def initialize(self):
|
||||
self.set("folder", os.path.expanduser("~/Papers/"))
|
||||
self.set("index", os.path.expanduser("~/Papers/index.bib"))
|
||||
self.set("proxies", [''])
|
||||
self.set("format_articles", "%f_%l-%j-%Y%v")
|
||||
self.set("format_books", "%a-%t")
|
||||
@ -86,7 +86,7 @@ class Config():
|
||||
folder_exists = make_sure_path_exists(self.get("folder"))
|
||||
except OSError:
|
||||
tools.warning("Unable to create paper storage folder.")
|
||||
sys.exit(1)
|
||||
raise
|
||||
self.load_masks()
|
||||
|
||||
def save(self):
|
||||
@ -98,7 +98,7 @@ class Config():
|
||||
separators=(',', ': ')))
|
||||
except IOError:
|
||||
tools.warning("Could not write config file.")
|
||||
sys.exit(1)
|
||||
raise
|
||||
|
||||
def load_masks(self):
|
||||
if os.path.isfile(self.config_path + "masks.py"):
|
8
backend/tools.py
Normal file
8
backend/tools.py
Normal file
@ -0,0 +1,8 @@
|
||||
import sys
|
||||
|
||||
|
||||
def warning(*objs):
|
||||
"""
|
||||
Write warnings to stderr.
|
||||
"""
|
||||
print("WARNING: ", *objs, file=sys.stderr)
|
772
bmc.py
772
bmc.py
@ -1,507 +1,76 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf8 -*-
|
||||
|
||||
from __future__ import unicode_literals
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import bibtexparser
|
||||
from codecs import open
|
||||
from libbmc.config import Config
|
||||
from libbmc import backend
|
||||
from libbmc import fetcher
|
||||
from libbmc import tearpages
|
||||
from libbmc import tools
|
||||
|
||||
from backend import commands
|
||||
from backend import tools
|
||||
from backend.config import Config
|
||||
|
||||
# TODO: Handle config
|
||||
|
||||
|
||||
config = Config()
|
||||
EDITOR = os.environ.get('EDITOR') if os.environ.get('EDITOR') else 'vim'
|
||||
# Load EDITOR variable
|
||||
EDITOR = os.environ.get("EDITOR")
|
||||
|
||||
|
||||
def checkBibtex(filename, bibtex_string):
|
||||
print("The bibtex entry found for "+filename+" is:")
|
||||
|
||||
bibtex = bibtexparser.loads(bibtex_string)
|
||||
bibtex = bibtex.entries_dict
|
||||
try:
|
||||
bibtex = bibtex[list(bibtex.keys())[0]]
|
||||
# Check entries are correct
|
||||
if "title" not in bibtex:
|
||||
raise AssertionError
|
||||
if "authors" not in bibtex and "author" not in bibtex:
|
||||
raise AssertionError
|
||||
if "year" not in bibtex:
|
||||
raise AssertionError
|
||||
# Print the bibtex and confirm
|
||||
print(tools.parsed2Bibtex(bibtex))
|
||||
check = tools.rawInput("Is it correct? [Y/n] ")
|
||||
except KeyboardInterrupt:
|
||||
sys.exit()
|
||||
except (IndexError, KeyError, AssertionError):
|
||||
print("Missing author, year or title in bibtex.")
|
||||
check = 'n'
|
||||
|
||||
try:
|
||||
old_filename = bibtex['file']
|
||||
except KeyError:
|
||||
old_filename = False
|
||||
|
||||
while check.lower() == 'n':
|
||||
with tempfile.NamedTemporaryFile(suffix=".tmp") as tmpfile:
|
||||
tmpfile.write(bibtex_string.encode('utf-8'))
|
||||
tmpfile.flush()
|
||||
subprocess.call([EDITOR, tmpfile.name])
|
||||
tmpfile.seek(0)
|
||||
bibtex = bibtexparser.loads(tmpfile.read().decode('utf-8')+"\n")
|
||||
|
||||
bibtex = bibtex.entries_dict
|
||||
try:
|
||||
bibtex = bibtex[list(bibtex.keys())[0]]
|
||||
except (IndexError, KeyError):
|
||||
tools.warning("Invalid bibtex entry")
|
||||
bibtex_string = ''
|
||||
tools.rawInput("Press Enter to go back to editor.")
|
||||
continue
|
||||
if('authors' not in bibtex and 'title' not in bibtex and 'year' not in
|
||||
bibtex):
|
||||
tools.warning("Invalid bibtex entry")
|
||||
bibtex_string = ''
|
||||
tools.rawInput("Press Enter to go back to editor.")
|
||||
continue
|
||||
|
||||
if old_filename is not False and 'file' not in bibtex:
|
||||
tools.warning("Invalid bibtex entry. No filename given.")
|
||||
tools.rawInput("Press Enter to go back to editor.")
|
||||
check = 'n'
|
||||
else:
|
||||
bibtex_string = tools.parsed2Bibtex(bibtex)
|
||||
print("\nThe bibtex entry for "+filename+" is:")
|
||||
print(bibtex_string)
|
||||
check = tools.rawInput("Is it correct? [Y/n] ")
|
||||
if old_filename is not False and old_filename != bibtex['file']:
|
||||
try:
|
||||
print("Moving file to new location…")
|
||||
shutil.move(old_filename, bibtex['file'])
|
||||
except shutil.Error:
|
||||
tools.warning("Unable to move file "+old_filename+" to " +
|
||||
bibtex['file']+". You should check it manually.")
|
||||
|
||||
return bibtex
|
||||
|
||||
|
||||
def addFile(src, filetype, manual, autoconfirm, tag, rename=True):
|
||||
def file_or_id_from_args(args):
|
||||
"""
|
||||
Add a file to the library
|
||||
Helper function to parse provided args to check if the argument is a \
|
||||
file or an identifier.
|
||||
"""
|
||||
doi = False
|
||||
arxiv = False
|
||||
isbn = False
|
||||
|
||||
if not manual:
|
||||
try:
|
||||
if filetype == 'article' or filetype is None:
|
||||
id_type, article_id = fetcher.findArticleID(src)
|
||||
if id_type == "DOI":
|
||||
doi = article_id
|
||||
elif id_type == "arXiv":
|
||||
arxiv = article_id
|
||||
|
||||
if filetype == 'book' or (doi is False and arxiv is False and
|
||||
filetype is None):
|
||||
isbn = fetcher.findISBN(src)
|
||||
except KeyboardInterrupt:
|
||||
doi = False
|
||||
arxiv = False
|
||||
isbn = False
|
||||
|
||||
if doi is False and isbn is False and arxiv is False:
|
||||
if filetype is None:
|
||||
tools.warning("Could not determine the DOI nor the arXiv id nor " +
|
||||
"the ISBN for "+src+". Switching to manual entry.")
|
||||
doi_arxiv_isbn = ''
|
||||
while(doi_arxiv_isbn not in
|
||||
['doi', 'arxiv', 'isbn', 'manual', 'skip']):
|
||||
doi_arxiv_isbn = (tools.rawInput("DOI / arXiv " +
|
||||
"/ ISBN / manual / skip? ").
|
||||
lower())
|
||||
if doi_arxiv_isbn == 'doi':
|
||||
doi = tools.rawInput('DOI? ')
|
||||
elif doi_arxiv_isbn == 'arxiv':
|
||||
arxiv = tools.rawInput('arXiv id? ')
|
||||
elif doi_arxiv_isbn == 'isbn':
|
||||
isbn = tools.rawInput('ISBN? ')
|
||||
elif doi_arxiv_isbn == 'skip':
|
||||
return False
|
||||
elif filetype == 'article':
|
||||
tools.warning("Could not determine the DOI nor the arXiv id for " +
|
||||
src+", switching to manual entry.")
|
||||
doi_arxiv = ''
|
||||
while doi_arxiv not in ['doi', 'arxiv', 'manual', 'skip']:
|
||||
doi_arxiv = (tools.rawInput("DOI / arXiv / manual / skip? ").
|
||||
lower())
|
||||
if doi_arxiv == 'doi':
|
||||
doi = tools.rawInput('DOI? ')
|
||||
elif doi_arxiv == 'arxiv':
|
||||
arxiv = tools.rawInput('arXiv id? ')
|
||||
elif doi_arxiv == 'skip':
|
||||
return False
|
||||
elif filetype == 'book':
|
||||
isbn_manual = ''
|
||||
while isbn_manual not in ['isbn', 'manual', 'skip']:
|
||||
isbn_manual = tools.rawInput("ISBN / manual / skip? ").lower()
|
||||
if isbn_manual == 'isbn':
|
||||
isbn = (tools.rawInput('ISBN? ').
|
||||
replace(' ', '').
|
||||
replace('-', ''))
|
||||
elif isbn_manual == 'skip':
|
||||
return False
|
||||
elif doi is not False:
|
||||
print("DOI for "+src+" is "+doi+".")
|
||||
elif arxiv is not False:
|
||||
print("ArXiv id for "+src+" is "+arxiv+".")
|
||||
elif isbn is not False:
|
||||
print("ISBN for "+src+" is "+isbn+".")
|
||||
|
||||
if doi is not False and doi != '':
|
||||
# Add extra \n for bibtexparser
|
||||
bibtex = fetcher.doi2Bib(doi).strip().replace(',', ",\n")+"\n"
|
||||
elif arxiv is not False and arxiv != '':
|
||||
bibtex = fetcher.arXiv2Bib(arxiv).strip().replace(',', ",\n")+"\n"
|
||||
elif isbn is not False and isbn != '':
|
||||
# Idem
|
||||
bibtex = fetcher.isbn2Bib(isbn).strip()+"\n"
|
||||
else:
|
||||
bibtex = ''
|
||||
|
||||
bibtex = bibtexparser.loads(bibtex)
|
||||
bibtex = bibtex.entries_dict
|
||||
if len(bibtex) > 0:
|
||||
bibtex_name = list(bibtex.keys())[0]
|
||||
bibtex = bibtex[bibtex_name]
|
||||
bibtex_string = tools.parsed2Bibtex(bibtex)
|
||||
else:
|
||||
bibtex_string = ''
|
||||
|
||||
if not autoconfirm:
|
||||
bibtex = checkBibtex(src, bibtex_string)
|
||||
|
||||
if not autoconfirm:
|
||||
tag = tools.rawInput("Tag for this paper (leave empty for default) ? ")
|
||||
else:
|
||||
tag = args.tag
|
||||
bibtex['tag'] = tag
|
||||
|
||||
if rename:
|
||||
new_name = backend.getNewName(src, bibtex, tag)
|
||||
|
||||
while os.path.exists(new_name):
|
||||
tools.warning("file "+new_name+" already exists.")
|
||||
default_rename = new_name.replace(tools.getExtension(new_name),
|
||||
" (2)" +
|
||||
tools.getExtension(new_name))
|
||||
rename = tools.rawInput("New name ["+default_rename+"]? ")
|
||||
if rename == '':
|
||||
new_name = default_rename
|
||||
else:
|
||||
new_name = rename
|
||||
try:
|
||||
shutil.copy2(src, new_name)
|
||||
except shutil.Error:
|
||||
new_name = False
|
||||
sys.exit("Unable to move file to library dir " +
|
||||
config.get("folder")+".")
|
||||
else:
|
||||
new_name = src
|
||||
bibtex['file'] = os.path.abspath(new_name)
|
||||
|
||||
# Remove first page of IOP papers
|
||||
try:
|
||||
if 'IOP' in bibtex['publisher'] and bibtex['ENTRYTYPE'] == 'article':
|
||||
tearpages.tearpage(new_name)
|
||||
except (KeyError, shutil.Error, IOError):
|
||||
pass
|
||||
|
||||
backend.bibtexAppend(bibtex)
|
||||
return new_name
|
||||
return "id" if args.id else "file" if args.file else None
|
||||
|
||||
|
||||
def editEntry(entry, file_id='both'):
|
||||
bibtex = backend.getBibtex(entry, file_id)
|
||||
if bibtex is False:
|
||||
tools.warning("Entry "+entry+" does not exist.")
|
||||
return False
|
||||
def parse_args():
|
||||
"""
|
||||
Build a parser and parse arguments of command line.
|
||||
|
||||
if file_id == 'file':
|
||||
filename = entry
|
||||
else:
|
||||
filename = bibtex['file']
|
||||
new_bibtex = checkBibtex(filename, tools.parsed2Bibtex(bibtex))
|
||||
|
||||
# Tag update
|
||||
if new_bibtex['tag'] != bibtex['tag']:
|
||||
print("Editing tag, moving file.")
|
||||
new_name = backend.getNewName(new_bibtex['file'],
|
||||
new_bibtex,
|
||||
new_bibtex['tag'])
|
||||
|
||||
while os.path.exists(new_name):
|
||||
tools.warning("file "+new_name+" already exists.")
|
||||
default_rename = new_name.replace(tools.getExtension(new_name),
|
||||
" (2)" +
|
||||
tools.getExtension(new_name))
|
||||
rename = tools.rawInput("New name ["+default_rename+"]? ")
|
||||
if rename == '':
|
||||
new_name = default_rename
|
||||
else:
|
||||
new_name = rename
|
||||
new_bibtex['file'] = new_name
|
||||
|
||||
try:
|
||||
shutil.move(bibtex['file'], new_bibtex['file'])
|
||||
except shutil.Error:
|
||||
tools.warning('Unable to move file '+bibtex['file']+' to ' +
|
||||
new_bibtex['file'] + ' according to tag edit.')
|
||||
|
||||
try:
|
||||
if not os.listdir(os.path.dirname(bibtex['file'])):
|
||||
os.rmdir(os.path.dirname(bibtex['file']))
|
||||
except OSError:
|
||||
tools.warning("Unable to delete empty tag dir " +
|
||||
os.path.dirname(bibtex['file']))
|
||||
|
||||
try:
|
||||
with open(config.get("folder")+'index.bib', 'r', encoding='utf-8') \
|
||||
as fh:
|
||||
index = bibtexparser.load(fh)
|
||||
index = index.entries_dict
|
||||
except (TypeError, IOError):
|
||||
tools.warning("Unable to open index file.")
|
||||
return False
|
||||
|
||||
index[new_bibtex['ID']] = new_bibtex
|
||||
backend.bibtexRewrite(index)
|
||||
return True
|
||||
|
||||
|
||||
def downloadFile(url, filetype, manual, autoconfirm, tag):
|
||||
print('Downloading '+url)
|
||||
dl, contenttype = fetcher.download(url)
|
||||
|
||||
if dl is not False:
|
||||
print('Download finished')
|
||||
tmp = tempfile.NamedTemporaryFile(suffix='.'+contenttype)
|
||||
|
||||
with open(tmp.name, 'wb+') as fh:
|
||||
fh.write(dl)
|
||||
new_name = addFile(tmp.name, filetype, manual, autoconfirm, tag)
|
||||
if new_name is False:
|
||||
return False
|
||||
tmp.close()
|
||||
return new_name
|
||||
else:
|
||||
tools.warning("Could not fetch "+url)
|
||||
return False
|
||||
|
||||
|
||||
def openFile(ident):
|
||||
try:
|
||||
with open(config.get("folder")+'index.bib', 'r', encoding='utf-8') \
|
||||
as fh:
|
||||
bibtex = bibtexparser.load(fh)
|
||||
bibtex = bibtex.entries_dict
|
||||
except (TypeError, IOError):
|
||||
tools.warning("Unable to open index file.")
|
||||
return False
|
||||
|
||||
if ident not in list(bibtex.keys()):
|
||||
return False
|
||||
else:
|
||||
subprocess.Popen(['xdg-open', bibtex[ident]['file']])
|
||||
return True
|
||||
|
||||
|
||||
def resync():
|
||||
diff = backend.diffFilesIndex()
|
||||
|
||||
if diff is False:
|
||||
return False
|
||||
|
||||
for key in diff:
|
||||
entry = diff[key]
|
||||
if entry['file'] == '':
|
||||
print("\nFound entry in index without associated file: " +
|
||||
entry['ID'])
|
||||
print("Title:\t"+entry['title'])
|
||||
loop = True
|
||||
while confirm:
|
||||
filename = tools.rawInput("File to import for this entry " +
|
||||
"(leave empty to delete the " +
|
||||
"entry)? ")
|
||||
if filename == '':
|
||||
break
|
||||
else:
|
||||
if 'doi' in list(entry.keys()):
|
||||
doi = fetcher.findArticleID(filename, only=["DOI"])
|
||||
if doi is not False and doi != entry['doi']:
|
||||
loop = tools.rawInput("Found DOI does not " +
|
||||
"match bibtex entry " +
|
||||
"DOI, continue anyway " +
|
||||
"? [y/N]")
|
||||
loop = (loop.lower() != 'y')
|
||||
if 'Eprint' in list(entry.keys()):
|
||||
arxiv = fetcher.findArticleID(filename, only=["arXiv"])
|
||||
if arxiv is not False and arxiv != entry['Eprint']:
|
||||
loop = tools.rawInput("Found arXiv id does " +
|
||||
"not match bibtex " +
|
||||
"entry arxiv id, " +
|
||||
"continue anyway ? [y/N]")
|
||||
loop = (loop.lower() != 'y')
|
||||
if 'isbn' in list(entry.keys()):
|
||||
isbn = fetcher.findISBN(filename)
|
||||
if isbn is not False and isbn != entry['isbn']:
|
||||
loop = tools.rawInput("Found ISBN does not " +
|
||||
"match bibtex entry " +
|
||||
"ISBN, continue anyway " +
|
||||
"? [y/N]")
|
||||
loop = (loop.lower() != 'y')
|
||||
continue
|
||||
if filename == '':
|
||||
backend.deleteId(entry['ID'])
|
||||
print("Deleted entry \""+entry['ID']+"\".")
|
||||
else:
|
||||
new_name = backend.getNewName(filename, entry)
|
||||
try:
|
||||
shutil.copy2(filename, new_name)
|
||||
print("Imported new file "+filename+" for entry " +
|
||||
entry['ID']+".")
|
||||
except shutil.Error:
|
||||
new_name = False
|
||||
sys.exit("Unable to move file to library dir " +
|
||||
config.get("folder")+".")
|
||||
backend.bibtexEdit(entry['ID'], {'file': filename})
|
||||
else:
|
||||
print("Found file without any associated entry in index:")
|
||||
print(entry['file'])
|
||||
action = ''
|
||||
while action.lower() not in ['import', 'delete']:
|
||||
action = tools.rawInput("What to do? [import / delete] ")
|
||||
action = action.lower()
|
||||
if action == 'import':
|
||||
tmp = tempfile.NamedTemporaryFile()
|
||||
shutil.copy(entry['file'], tmp.name)
|
||||
filetype = tools.getExtension(entry['file'])
|
||||
try:
|
||||
os.remove(entry['file'])
|
||||
except OSError:
|
||||
tools.warning("Unable to delete file "+entry['file'])
|
||||
if not addFile(tmp.name, filetype):
|
||||
tools.warning("Unable to reimport file "+entry['file'])
|
||||
tmp.close()
|
||||
else:
|
||||
backend.deleteFile(entry['file'])
|
||||
print(entry['file'] + " removed from disk and " +
|
||||
"index.")
|
||||
# Check for empty tag dirs
|
||||
for i in os.listdir(config.get("folder")):
|
||||
if os.path.isdir(i) and not os.listdir(config.get("folder") + i):
|
||||
try:
|
||||
os.rmdir(config.get("folder") + i)
|
||||
except OSError:
|
||||
tools.warning("Found empty tag dir "+config.get("folder") + i +
|
||||
" but could not delete it.")
|
||||
|
||||
|
||||
def update(entry):
|
||||
update = backend.updateArXiv(entry)
|
||||
if update is not False:
|
||||
print("New version found for "+entry)
|
||||
print("\t Title: "+update['title'])
|
||||
confirm = tools.rawInput("Download it ? [Y/n] ")
|
||||
if confirm.lower() == 'n':
|
||||
return
|
||||
new_name = downloadFile('http://arxiv.org/pdf/'+update['eprint'],
|
||||
'article', False)
|
||||
if new_name is not False:
|
||||
print(update['eprint']+" successfully imported as "+new_name)
|
||||
else:
|
||||
tools.warning("An error occurred while downloading "+url)
|
||||
confirm = tools.rawInput("Delete previous version ? [y/N] ")
|
||||
if confirm.lower() == 'y':
|
||||
if not backend.deleteId(entry):
|
||||
if not backend.deleteFile(entry):
|
||||
tools.warning("Unable to remove previous version.")
|
||||
return
|
||||
print("Previous version successfully deleted.")
|
||||
|
||||
|
||||
def commandline_arg(bytestring):
|
||||
# UTF-8 encoding for python2
|
||||
if sys.version_info >= (3, 0):
|
||||
unicode_string = bytestring
|
||||
else:
|
||||
unicode_string = bytestring.decode(sys.getfilesystemencoding())
|
||||
return unicode_string
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description="A bibliography " +
|
||||
"management tool.")
|
||||
subparsers = parser.add_subparsers(help="sub-command help", dest='parser')
|
||||
:returns: Parsed arguments from the parser.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="A bibliography management tool.")
|
||||
parser.add_argument("-c", "--config", default=None,
|
||||
help="path to a custom config dir.")
|
||||
subparsers = parser.add_subparsers(help="sub-command help", dest='command')
|
||||
subparsers.required = True # Fix for Python 3.3.5
|
||||
|
||||
parser_download = subparsers.add_parser('download', help="download help")
|
||||
parser_download.add_argument('-t', '--type', default=None,
|
||||
choices=['article', 'book'],
|
||||
help="type of the file to download",
|
||||
type=commandline_arg)
|
||||
parser_download.add_argument('-m', '--manual', default=False,
|
||||
action='store_true',
|
||||
help="disable auto-download of bibtex")
|
||||
parser_download.add_argument('-y', default=False,
|
||||
help="Confirm all")
|
||||
parser_download.add_argument('--tag', default='',
|
||||
help="Tag", type=commandline_arg)
|
||||
help="Tag")
|
||||
parser_download.add_argument('--keep', default=False,
|
||||
help="Do not remove the file")
|
||||
parser_download.add_argument('url', nargs='+',
|
||||
help="url of the file to import",
|
||||
type=commandline_arg)
|
||||
help="url of the file to import")
|
||||
parser_download.set_defaults(func='download')
|
||||
|
||||
parser_import = subparsers.add_parser('import', help="import help")
|
||||
parser_import.add_argument('-t', '--type', default=None,
|
||||
choices=['article', 'book'],
|
||||
help="type of the file to import",
|
||||
type=commandline_arg)
|
||||
parser_import.add_argument('-m', '--manual', default=False,
|
||||
action='store_true',
|
||||
help="disable auto-download of bibtex")
|
||||
parser_import.add_argument('-y', default=False,
|
||||
help="Confirm all")
|
||||
parser_import.add_argument('--tag', default='', help="Tag",
|
||||
type=commandline_arg)
|
||||
parser_import.add_argument('--tag', default='', help="Tag")
|
||||
parser_import.add_argument('--in-place', default=False,
|
||||
dest="inplace", action='store_true',
|
||||
help="Leave the imported file in place",)
|
||||
parser_import.add_argument('file', nargs='+',
|
||||
help="path to the file to import",
|
||||
type=commandline_arg)
|
||||
help="path to the file to import")
|
||||
parser_import.add_argument('--skip', nargs='+',
|
||||
help="path to files to skip", default=[],
|
||||
type=commandline_arg)
|
||||
help="path to files to skip", default=[])
|
||||
parser_import.set_defaults(func='import')
|
||||
|
||||
parser_delete = subparsers.add_parser('delete', help="delete help")
|
||||
parser_delete.add_argument('entries', metavar='entry', nargs='+',
|
||||
help="a filename or an identifier",
|
||||
type=commandline_arg)
|
||||
help="a filename or an identifier")
|
||||
parser_delete.add_argument('--skip', nargs='+',
|
||||
help="path to files to skip", default=[],
|
||||
type=commandline_arg)
|
||||
help="path to files to skip", default=[])
|
||||
group = parser_delete.add_mutually_exclusive_group()
|
||||
group.add_argument('--id', action="store_true", default=False,
|
||||
help="id based deletion")
|
||||
@ -514,11 +83,9 @@ if __name__ == '__main__':
|
||||
|
||||
parser_edit = subparsers.add_parser('edit', help="edit help")
|
||||
parser_edit.add_argument('entries', metavar='entry', nargs='+',
|
||||
help="a filename or an identifier",
|
||||
type=commandline_arg)
|
||||
help="a filename or an identifier")
|
||||
parser_edit.add_argument('--skip', nargs='+',
|
||||
help="path to files to skip", default=[],
|
||||
type=commandline_arg)
|
||||
help="path to files to skip", default=[])
|
||||
group = parser_edit.add_mutually_exclusive_group()
|
||||
group.add_argument('--id', action="store_true", default=False,
|
||||
help="id based deletion")
|
||||
@ -529,151 +96,174 @@ if __name__ == '__main__':
|
||||
parser_list = subparsers.add_parser('list', help="list help")
|
||||
parser_list.set_defaults(func='list')
|
||||
|
||||
parser_search = subparsers.add_parser('search', help="search help")
|
||||
parser_search.set_defaults(func='search')
|
||||
|
||||
parser_open = subparsers.add_parser('open', help="open help")
|
||||
parser_open.add_argument('ids', metavar='id', nargs='+',
|
||||
help="an identifier",
|
||||
type=commandline_arg)
|
||||
help="an identifier")
|
||||
parser_open.set_defaults(func='open')
|
||||
|
||||
parser_export = subparsers.add_parser('export', help="export help")
|
||||
parser_export.add_argument('ids', metavar='id', nargs='+',
|
||||
help="an identifier",
|
||||
type=commandline_arg)
|
||||
parser_export.add_argument('entries', metavar='entry', nargs='+',
|
||||
help="a filename or an identifier")
|
||||
parser_export.add_argument('--skip', nargs='+',
|
||||
help="path to files to skip", default=[])
|
||||
group = parser_export.add_mutually_exclusive_group()
|
||||
group.add_argument('--id', action="store_true", default=False,
|
||||
help="id based deletion")
|
||||
group.add_argument('--file', action="store_true", default=False,
|
||||
help="file based deletion")
|
||||
parser_export.set_defaults(func='export')
|
||||
|
||||
parser_resync = subparsers.add_parser('resync', help="resync help")
|
||||
parser_resync.set_defaults(func='resync')
|
||||
|
||||
parser_update = subparsers.add_parser('update', help="update help")
|
||||
parser_update.add_argument('--entries', metavar='entry', nargs='+',
|
||||
help="a filename or an identifier",
|
||||
type=commandline_arg)
|
||||
parser_update.add_argument('entries', metavar='entry', nargs='+',
|
||||
help="a filename or an identifier")
|
||||
parser_update.add_argument('--skip', nargs='+',
|
||||
help="path to files to skip", default=[])
|
||||
group = parser_update.add_mutually_exclusive_group()
|
||||
group.add_argument('--id', action="store_true", default=False,
|
||||
help="id based deletion")
|
||||
group.add_argument('--file', action="store_true", default=False,
|
||||
help="file based deletion")
|
||||
parser_update.set_defaults(func='update')
|
||||
|
||||
parser_search = subparsers.add_parser('search', help="search help")
|
||||
parser_search.add_argument('query', metavar='entry', nargs='+',
|
||||
help="your query, see README for more info.",
|
||||
type=commandline_arg)
|
||||
parser_search.set_defaults(func='search')
|
||||
return parser.parse_args()
|
||||
|
||||
args = parser.parse_args()
|
||||
try:
|
||||
if args.func == 'download':
|
||||
skipped = []
|
||||
for url in args.url:
|
||||
new_name = downloadFile(url, args.type, args.manual, args.y,
|
||||
args.tag)
|
||||
if new_name is not False:
|
||||
print(url+" successfully imported as "+new_name)
|
||||
else:
|
||||
tools.warning("An error occurred while downloading "+url)
|
||||
skipped.append(url)
|
||||
if len(skipped) > 0:
|
||||
print("\nSkipped files:")
|
||||
for i in skipped:
|
||||
print(i)
|
||||
sys.exit()
|
||||
|
||||
if args.func == 'import':
|
||||
skipped = []
|
||||
for filename in list(set(args.file) - set(args.skip)):
|
||||
new_name = addFile(filename, args.type, args.manual, args.y,
|
||||
args.tag, not args.inplace)
|
||||
if new_name is not False:
|
||||
print(filename+" successfully imported as " +
|
||||
new_name+".")
|
||||
else:
|
||||
tools.warning("An error occurred while importing " +
|
||||
filename)
|
||||
skipped.append(filename)
|
||||
if len(skipped) > 0:
|
||||
print("\nSkipped files:")
|
||||
for i in skipped:
|
||||
print(i)
|
||||
sys.exit()
|
||||
def main():
|
||||
"""
|
||||
Main function.
|
||||
"""
|
||||
global config
|
||||
|
||||
elif args.func == 'delete':
|
||||
skipped = []
|
||||
for filename in list(set(args.entries) - set(args.skip)):
|
||||
if not args.force:
|
||||
confirm = tools.rawInput("Are you sure you want to " +
|
||||
"delete "+filename+" ? [y/N] ")
|
||||
else:
|
||||
confirm = 'y'
|
||||
# Parse arguments
|
||||
args = parse_args()
|
||||
|
||||
if confirm.lower() == 'y':
|
||||
if args.file or not backend.deleteId(filename, args.keep):
|
||||
if(args.id or
|
||||
not backend.deleteFile(filename, args.keep)):
|
||||
tools.warning("Unable to delete "+filename)
|
||||
sys.exit(1)
|
||||
# Load the custom config if needed
|
||||
if args.config is not None:
|
||||
config = Config(base_config_path=args.config)
|
||||
|
||||
print(filename+" successfully deleted.")
|
||||
else:
|
||||
skipped.append(filename)
|
||||
|
||||
if len(skipped) > 0:
|
||||
print("\nSkipped files:")
|
||||
for i in skipped:
|
||||
print(i)
|
||||
sys.exit()
|
||||
|
||||
elif args.func == 'edit':
|
||||
for filename in list(set(args.entries) - set(args.skip)):
|
||||
if args.file:
|
||||
file_id = 'file'
|
||||
elif args.id:
|
||||
file_id = 'id'
|
||||
else:
|
||||
file_id = 'both'
|
||||
editEntry(filename, file_id)
|
||||
sys.exit()
|
||||
|
||||
elif args.func == 'list':
|
||||
listPapers = backend.getEntries(full=True)
|
||||
if not listPapers:
|
||||
sys.exit()
|
||||
listPapers = [v["file"] for k, v in listPapers.items()]
|
||||
listPapers.sort()
|
||||
for paper in listPapers:
|
||||
print(paper)
|
||||
sys.exit()
|
||||
|
||||
elif args.func == 'search':
|
||||
raise Exception('TODO')
|
||||
|
||||
elif args.func == 'open':
|
||||
for filename in args.ids:
|
||||
if not openFile(filename):
|
||||
sys.exit("Unable to open file associated " +
|
||||
"to ident "+filename)
|
||||
sys.exit()
|
||||
|
||||
elif args.func == 'export':
|
||||
bibtex = ''
|
||||
for id in args.ids:
|
||||
bibtex += tools.parsed2Bibtex(backend.getBibtex(id,
|
||||
clean=True))
|
||||
print(bibtex.strip())
|
||||
sys.exit
|
||||
|
||||
elif args.func == 'resync':
|
||||
confirm = tools.rawInput("Resync files and bibtex index? [y/N] ")
|
||||
if confirm.lower() == 'y':
|
||||
resync()
|
||||
sys.exit()
|
||||
|
||||
elif args.func == 'update':
|
||||
if args.entries is None:
|
||||
entries = backend.getEntries()
|
||||
# Download command
|
||||
if args.func == 'download':
|
||||
skipped = []
|
||||
for url in args.url:
|
||||
# Try to download the URL
|
||||
new_name = commands.download(url, args.manual, args.y,
|
||||
args.tag)
|
||||
if new_name is not None:
|
||||
print("%s successfully imported as %s." % (url, new_name))
|
||||
else:
|
||||
entries = args.entries
|
||||
for entry in entries:
|
||||
update(entry)
|
||||
sys.exit()
|
||||
tools.warning("An error occurred while downloading %s." %
|
||||
(url,))
|
||||
skipped.append(url)
|
||||
# Output URLs with errors
|
||||
if len(skipped) > 0:
|
||||
tools.warning("Skipped URLs:")
|
||||
for i in skipped:
|
||||
tools.warning(i)
|
||||
|
||||
# Import command
|
||||
elif args.func == 'import':
|
||||
skipped = []
|
||||
# Handle exclusions
|
||||
files_to_process = list(set(args.file) - set(args.skip))
|
||||
for filename in files_to_process:
|
||||
# Try to import the file
|
||||
new_name = commands.import_file(filename,
|
||||
args.manual, args.y,
|
||||
args.tag, not args.inplace)
|
||||
if new_name is not None:
|
||||
print("%s successfully imported as %s." % (filename, new_name))
|
||||
else:
|
||||
tools.warning("An error occurred while importing %s." %
|
||||
(filename,))
|
||||
skipped.append(filename)
|
||||
# Output files with errors
|
||||
if len(skipped) > 0:
|
||||
tools.warning("Skipped files:")
|
||||
for i in skipped:
|
||||
tools.warning(i)
|
||||
|
||||
# Delete command
|
||||
elif args.func == 'delete':
|
||||
skipped = []
|
||||
# Handle exclusions
|
||||
items_to_process = list(set(args.entries) - set(args.skip))
|
||||
for item in items_to_process:
|
||||
# Confirm before deletion
|
||||
if not args.force:
|
||||
confirm = input("Are you sure you want to delete %s? [y/N] " %
|
||||
(item,))
|
||||
else:
|
||||
confirm = 'y'
|
||||
|
||||
# Try to delete the item
|
||||
if confirm.lower() == 'y':
|
||||
file_or_id = file_or_id_from_args(args)
|
||||
commands.delete(item, args.keep, file_or_id)
|
||||
print("%s successfully deleted." % (item,))
|
||||
else:
|
||||
skipped.append(item)
|
||||
# Output items with errors
|
||||
if len(skipped) > 0:
|
||||
tools.warning("Skipped items:")
|
||||
for i in skipped:
|
||||
tools.warning(i)
|
||||
|
||||
# Edit command
|
||||
elif args.func == 'edit':
|
||||
# Handle exclusions
|
||||
items_to_process = list(set(args.entries) - set(args.skip))
|
||||
for item in items_to_process:
|
||||
file_or_id = file_or_id_from_args(args)
|
||||
commands.edit(item, file_or_id)
|
||||
|
||||
# List command
|
||||
elif args.func == 'list':
|
||||
# List all available items
|
||||
for id, file in commands.list_entries().items():
|
||||
# And print them as "identifier: file"
|
||||
print("%s: %s" % (id, file))
|
||||
|
||||
# Open command
|
||||
elif args.func == 'open':
|
||||
# Open each entry
|
||||
for id in args.ids:
|
||||
if not commands.open(id):
|
||||
# And warn the user about missing files or errors
|
||||
tools.warning("Unable to open file associated with ident %s." %
|
||||
(id,))
|
||||
|
||||
# Export command
|
||||
elif args.func == 'export':
|
||||
# Handle exclusions
|
||||
items_to_process = list(set(args.entries) - set(args.skip))
|
||||
for item in items_to_process:
|
||||
file_or_id = file_or_id_from_args(args)
|
||||
print(commands.export(item, file_or_id))
|
||||
|
||||
# Resync command
|
||||
elif args.func == 'resync':
|
||||
confirm = input("Resync files and bibtex index? [y/N] ")
|
||||
if confirm.lower() == 'y':
|
||||
commands.resync()
|
||||
|
||||
# Update command
|
||||
elif args.func == 'update':
|
||||
# Handle exclusions
|
||||
items_to_process = list(set(args.entries) - set(args.skip))
|
||||
for item in items_to_process:
|
||||
file_or_id = file_or_id_from_args(args)
|
||||
updates = commands.update(args.entries)
|
||||
# TODO \/
|
||||
print("%d new versions of papers were found:" % (len(updates)))
|
||||
for item in updates:
|
||||
print(item)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
sys.exit()
|
||||
|
@ -1,2 +0,0 @@
|
||||
#!/usr/bin/env python2
|
||||
# -*- coding: utf-8 -*-
|
@ -1,336 +0,0 @@
|
||||
# -*- coding: utf8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# "THE NO-ALCOHOL BEER-WARE LICENSE" (Revision 42):
|
||||
# Phyks (webmaster@phyks.me) wrote this file. As long as you retain this notice
|
||||
# you can do whatever you want with this stuff (and you can also do whatever
|
||||
# you want with this stuff without retaining it, but that's not cool...). If we
|
||||
# meet some day, and you think this stuff is worth it, you can buy me a
|
||||
# <del>beer</del> soda in return.
|
||||
# Phyks
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
from __future__ import unicode_literals
|
||||
import os
|
||||
import re
|
||||
import libbmc.tools as tools
|
||||
import libbmc.fetcher as fetcher
|
||||
import bibtexparser
|
||||
from libbmc.config import Config
|
||||
from codecs import open
|
||||
|
||||
|
||||
config = Config()
|
||||
|
||||
|
||||
def getNewName(src, bibtex, tag='', override_format=None):
|
||||
"""
|
||||
Return the formatted name according to config for the given
|
||||
bibtex entry
|
||||
"""
|
||||
authors = re.split(' and ', bibtex['author'])
|
||||
|
||||
if bibtex['ENTRYTYPE'] == 'article':
|
||||
if override_format is None:
|
||||
new_name = config.get("format_articles")
|
||||
else:
|
||||
new_name = override_format
|
||||
try:
|
||||
new_name = new_name.replace("%j", bibtex['journal'])
|
||||
except KeyError:
|
||||
pass
|
||||
elif bibtex['ENTRYTYPE'] == 'book':
|
||||
if override_format is None:
|
||||
new_name = config.get("format_books")
|
||||
else:
|
||||
new_name = override_format
|
||||
|
||||
new_name = new_name.replace("%t", bibtex['title'])
|
||||
try:
|
||||
new_name = new_name.replace("%Y", bibtex['year'])
|
||||
except KeyError:
|
||||
pass
|
||||
new_name = new_name.replace("%f", authors[0].split(',')[0].strip())
|
||||
new_name = new_name.replace("%l", authors[-1].split(',')[0].strip())
|
||||
new_name = new_name.replace("%a", ', '.join([i.split(',')[0].strip()
|
||||
for i in authors]))
|
||||
if('archiveprefix' in bibtex and
|
||||
'arXiv' in bibtex['archiveprefix']):
|
||||
new_name = new_name.replace("%v",
|
||||
'-' +
|
||||
bibtex['eprint'][bibtex['eprint'].
|
||||
rfind('v'):])
|
||||
else:
|
||||
new_name = new_name.replace("%v", '')
|
||||
|
||||
for custom in config.get("format_custom"):
|
||||
new_name = custom(new_name)
|
||||
|
||||
if tag == '':
|
||||
new_name = (config.get("folder") + tools.slugify(new_name) +
|
||||
tools.getExtension(src))
|
||||
else:
|
||||
if not os.path.isdir(config.get("folder") + tag):
|
||||
try:
|
||||
os.mkdir(config.get("folder") + tag)
|
||||
except OSError:
|
||||
tools.warning("Unable to create tag dir " +
|
||||
config.get("folder")+tag+".")
|
||||
|
||||
new_name = (config.get("folder") + tools.slugify(tag) + '/' +
|
||||
tools.slugify(new_name) + tools.getExtension(src))
|
||||
|
||||
return new_name
|
||||
|
||||
|
||||
def bibtexAppend(data):
|
||||
"""Append data to the main bibtex file
|
||||
|
||||
data is a dict for one entry in bibtex, as the one from bibtexparser output
|
||||
"""
|
||||
try:
|
||||
with open(config.get("folder")+'index.bib', 'a', encoding='utf-8') \
|
||||
as fh:
|
||||
fh.write(tools.parsed2Bibtex(data)+"\n")
|
||||
except IOError as e:
|
||||
raise e
|
||||
tools.warning("Unable to open index file.")
|
||||
return False
|
||||
|
||||
|
||||
def bibtexEdit(ident, modifs):
|
||||
"""Update ident key in bibtex file, modifications are in modifs dict"""
|
||||
|
||||
try:
|
||||
with open(config.get("folder")+'index.bib', 'r', encoding='utf-8') \
|
||||
as fh:
|
||||
bibtex = bibtexparser.load(fh)
|
||||
bibtex = bibtex.entries_dict
|
||||
except (IOError, TypeError):
|
||||
tools.warning("Unable to open index file.")
|
||||
return False
|
||||
|
||||
for key in modifs.keys():
|
||||
bibtex[ident][key] = modifs[key]
|
||||
bibtexRewrite(bibtex)
|
||||
|
||||
|
||||
def bibtexRewrite(data):
|
||||
"""Rewrite the bibtex index file.
|
||||
|
||||
data is a dict of bibtex entry dict.
|
||||
"""
|
||||
bibtex = ''
|
||||
for entry in data.keys():
|
||||
bibtex += tools.parsed2Bibtex(data[entry])+"\n"
|
||||
try:
|
||||
with open(config.get("folder")+'index.bib', 'w', encoding='utf-8') \
|
||||
as fh:
|
||||
fh.write(bibtex)
|
||||
except (IOError, TypeError):
|
||||