2014-04-25 14:13:37 +02:00
|
|
|
#!/usr/bin/env python2
|
|
|
|
# -*- coding: utf8 -*-
|
2014-08-03 19:10:30 +02:00
|
|
|
from __future__ import unicode_literals
|
2014-04-25 16:53:08 +02:00
|
|
|
|
2014-05-03 02:05:46 +02:00
|
|
|
import argparse
|
2014-04-28 23:35:06 +02:00
|
|
|
import os
|
2014-04-24 00:18:49 +02:00
|
|
|
import shutil
|
2014-04-24 16:18:56 +02:00
|
|
|
import subprocess
|
2014-04-28 23:35:06 +02:00
|
|
|
import sys
|
|
|
|
import tempfile
|
2014-04-24 00:18:49 +02:00
|
|
|
from bibtexparser.bparser import BibTexParser
|
2014-05-09 23:37:17 +02:00
|
|
|
from codecs import open
|
2014-07-11 04:50:16 +02:00
|
|
|
from libbmc.config import Config
|
|
|
|
from libbmc import backend
|
|
|
|
from libbmc import fetcher
|
|
|
|
from libbmc import tearpages
|
|
|
|
from libbmc import tools
|
2014-04-24 00:18:49 +02:00
|
|
|
|
2014-06-30 23:02:30 +02:00
|
|
|
|
|
|
|
config = Config()
|
2014-04-27 09:46:43 +02:00
|
|
|
EDITOR = os.environ.get('EDITOR') if os.environ.get('EDITOR') else 'vim'
|
|
|
|
|
2014-04-24 00:18:49 +02:00
|
|
|
|
2014-05-14 14:53:56 +02:00
|
|
|
def checkBibtex(filename, bibtex_string):
|
2014-05-02 00:07:49 +02:00
|
|
|
print("The bibtex entry found for "+filename+" is:")
|
2014-04-25 01:13:19 +02:00
|
|
|
|
2014-05-14 22:45:25 +02:00
|
|
|
bibtex = BibTexParser(bibtex_string)
|
2014-04-25 14:13:37 +02:00
|
|
|
bibtex = bibtex.get_entry_dict()
|
2014-05-15 00:11:56 +02:00
|
|
|
try:
|
2014-08-03 21:52:01 +02:00
|
|
|
bibtex = bibtex[list(bibtex.keys())[0]]
|
2014-06-09 14:55:24 +02:00
|
|
|
# Check entries are correct
|
|
|
|
assert bibtex['title']
|
2014-06-11 14:50:57 +02:00
|
|
|
if bibtex['type'] == 'article':
|
|
|
|
assert bibtex['authors']
|
|
|
|
elif bibtex['type'] == 'book':
|
|
|
|
assert bibtex['author']
|
2014-06-09 14:55:24 +02:00
|
|
|
assert bibtex['year']
|
|
|
|
# Print the bibtex and confirm
|
2014-06-11 14:50:57 +02:00
|
|
|
print(tools.parsed2Bibtex(bibtex))
|
2014-05-15 00:11:56 +02:00
|
|
|
check = tools.rawInput("Is it correct? [Y/n] ")
|
2014-06-11 14:50:57 +02:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
sys.exit()
|
2014-07-01 20:31:19 +02:00
|
|
|
except (KeyError, AssertionError):
|
2014-05-15 00:11:56 +02:00
|
|
|
check = 'n'
|
2014-05-14 14:53:56 +02:00
|
|
|
|
2014-05-08 22:07:52 +02:00
|
|
|
try:
|
|
|
|
old_filename = bibtex['file']
|
2014-07-01 20:31:19 +02:00
|
|
|
except KeyError:
|
2014-05-08 22:07:52 +02:00
|
|
|
old_filename = False
|
2014-04-25 16:53:08 +02:00
|
|
|
|
2014-04-25 14:13:37 +02:00
|
|
|
while check.lower() == 'n':
|
2014-04-27 09:46:43 +02:00
|
|
|
with tempfile.NamedTemporaryFile(suffix=".tmp") as tmpfile:
|
2014-08-03 23:34:17 +02:00
|
|
|
tmpfile.write(bibtex_string.encode('utf-8'))
|
2014-04-27 09:46:43 +02:00
|
|
|
tmpfile.flush()
|
|
|
|
subprocess.call([EDITOR, tmpfile.name])
|
2014-05-08 22:07:52 +02:00
|
|
|
tmpfile.seek(0)
|
2014-08-03 23:34:17 +02:00
|
|
|
bibtex = BibTexParser(tmpfile.read().decode('utf-8')+"\n")
|
2014-04-27 09:46:43 +02:00
|
|
|
|
|
|
|
bibtex = bibtex.get_entry_dict()
|
2014-05-14 14:53:56 +02:00
|
|
|
try:
|
2014-08-03 21:52:01 +02:00
|
|
|
bibtex = bibtex[list(bibtex.keys())[0]]
|
2014-07-01 20:31:19 +02:00
|
|
|
except KeyError:
|
2014-05-14 14:53:56 +02:00
|
|
|
tools.warning("Invalid bibtex entry")
|
2014-05-15 00:11:56 +02:00
|
|
|
bibtex_string = ''
|
|
|
|
tools.rawInput("Press Enter to go back to editor.")
|
|
|
|
continue
|
|
|
|
if('authors' not in bibtex and 'title' not in bibtex and 'year' not in
|
|
|
|
bibtex):
|
|
|
|
tools.warning("Invalid bibtex entry")
|
|
|
|
bibtex_string = ''
|
|
|
|
tools.rawInput("Press Enter to go back to editor.")
|
|
|
|
continue
|
|
|
|
|
2014-05-08 22:07:52 +02:00
|
|
|
if old_filename is not False and 'file' not in bibtex:
|
2014-05-07 22:13:03 +02:00
|
|
|
tools.warning("Invalid bibtex entry. No filename given.")
|
|
|
|
tools.rawInput("Press Enter to go back to editor.")
|
|
|
|
check = 'n'
|
2014-04-27 09:46:43 +02:00
|
|
|
else:
|
2014-05-15 00:11:56 +02:00
|
|
|
bibtex_string = tools.parsed2Bibtex(bibtex)
|
2014-05-07 22:13:03 +02:00
|
|
|
print("\nThe bibtex entry for "+filename+" is:")
|
|
|
|
print(bibtex_string)
|
|
|
|
check = tools.rawInput("Is it correct? [Y/n] ")
|
2014-05-08 22:07:52 +02:00
|
|
|
if old_filename is not False and old_filename != bibtex['file']:
|
2014-05-07 22:13:03 +02:00
|
|
|
try:
|
2014-05-14 14:53:56 +02:00
|
|
|
print("Moving file to new location…")
|
2014-05-07 22:13:03 +02:00
|
|
|
shutil.move(old_filename, bibtex['file'])
|
2014-07-01 20:31:19 +02:00
|
|
|
except shutil.Error:
|
2014-05-07 22:13:03 +02:00
|
|
|
tools.warning("Unable to move file "+old_filename+" to " +
|
|
|
|
bibtex['file']+". You should check it manually.")
|
2014-05-14 14:53:56 +02:00
|
|
|
|
2014-04-25 01:13:19 +02:00
|
|
|
return bibtex
|
|
|
|
|
|
|
|
|
2014-05-17 17:23:56 +02:00
|
|
|
def addFile(src, filetype, manual, autoconfirm, tag):
|
2014-04-24 00:18:49 +02:00
|
|
|
"""
|
|
|
|
Add a file to the library
|
|
|
|
"""
|
2014-05-03 02:16:31 +02:00
|
|
|
doi = False
|
|
|
|
arxiv = False
|
|
|
|
isbn = False
|
2014-04-24 22:39:27 +02:00
|
|
|
|
2014-05-03 02:16:31 +02:00
|
|
|
if not manual:
|
2014-05-26 16:12:21 +02:00
|
|
|
try:
|
|
|
|
if filetype == 'article' or filetype is None:
|
|
|
|
doi = fetcher.findDOI(src)
|
|
|
|
if doi is False and (filetype == 'article' or filetype is None):
|
|
|
|
arxiv = fetcher.findArXivId(src)
|
2014-05-03 02:16:31 +02:00
|
|
|
|
2014-05-26 16:12:21 +02:00
|
|
|
if filetype == 'book' or (doi is False and arxiv is False and
|
2014-05-26 16:50:58 +02:00
|
|
|
filetype is None):
|
2014-05-26 16:12:21 +02:00
|
|
|
isbn = fetcher.findISBN(src)
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
doi = False
|
|
|
|
arxiv = False
|
|
|
|
isbn = False
|
2014-04-24 22:39:27 +02:00
|
|
|
|
2014-05-02 00:07:49 +02:00
|
|
|
if doi is False and isbn is False and arxiv is False:
|
2014-04-24 22:39:27 +02:00
|
|
|
if filetype is None:
|
2014-05-02 00:07:49 +02:00
|
|
|
tools.warning("Could not determine the DOI nor the arXiv id nor " +
|
2014-05-11 19:29:42 +02:00
|
|
|
"the ISBN for "+src+". Switching to manual entry.")
|
2014-05-02 00:07:49 +02:00
|
|
|
doi_arxiv_isbn = ''
|
2014-06-30 00:19:38 +02:00
|
|
|
while(doi_arxiv_isbn not in
|
|
|
|
['doi', 'arxiv', 'isbn', 'manual', 'skip']):
|
|
|
|
doi_arxiv_isbn = (tools.rawInput("DOI / arXiv " +
|
|
|
|
"/ ISBN / manual / skip? ").
|
|
|
|
lower())
|
2014-05-02 00:07:49 +02:00
|
|
|
if doi_arxiv_isbn == 'doi':
|
|
|
|
doi = tools.rawInput('DOI? ')
|
|
|
|
elif doi_arxiv_isbn == 'arxiv':
|
|
|
|
arxiv = tools.rawInput('arXiv id? ')
|
2014-05-05 02:15:50 +02:00
|
|
|
elif doi_arxiv_isbn == 'isbn':
|
2014-05-02 00:07:49 +02:00
|
|
|
isbn = tools.rawInput('ISBN? ')
|
2014-06-08 20:40:08 +02:00
|
|
|
elif doi_arxiv_isbn == 'skip':
|
|
|
|
return False
|
2014-04-24 22:39:27 +02:00
|
|
|
elif filetype == 'article':
|
2014-05-02 00:07:49 +02:00
|
|
|
tools.warning("Could not determine the DOI nor the arXiv id for " +
|
|
|
|
src+", switching to manual entry.")
|
|
|
|
doi_arxiv = ''
|
2014-06-08 20:40:08 +02:00
|
|
|
while doi_arxiv not in ['doi', 'arxiv', 'manual', 'skip']:
|
2014-06-30 00:19:38 +02:00
|
|
|
doi_arxiv = (tools.rawInput("DOI / arXiv / manual / skip? ").
|
|
|
|
lower())
|
2014-05-02 00:07:49 +02:00
|
|
|
if doi_arxiv == 'doi':
|
|
|
|
doi = tools.rawInput('DOI? ')
|
2014-05-05 02:15:50 +02:00
|
|
|
elif doi_arxiv == 'arxiv':
|
2014-05-02 00:07:49 +02:00
|
|
|
arxiv = tools.rawInput('arXiv id? ')
|
2014-06-08 20:40:08 +02:00
|
|
|
elif doi_arxiv == 'skip':
|
|
|
|
return False
|
2014-04-24 22:39:27 +02:00
|
|
|
elif filetype == 'book':
|
2014-06-08 20:40:08 +02:00
|
|
|
isbn_manual = ''
|
|
|
|
while isbn_manual not in ['isbn', 'manual', 'skip']:
|
|
|
|
isbn_manual = tools.rawInput("ISBN / manual / skip? ").lower()
|
|
|
|
if isbn_manual == 'isbn':
|
2014-06-30 00:19:38 +02:00
|
|
|
isbn = (tools.rawInput('ISBN? ').
|
|
|
|
replace(' ', '').
|
|
|
|
replace('-', ''))
|
2014-06-08 20:40:08 +02:00
|
|
|
elif isbn_manual == 'skip':
|
|
|
|
return False
|
2014-04-24 22:39:27 +02:00
|
|
|
elif doi is not False:
|
2014-04-24 16:18:56 +02:00
|
|
|
print("DOI for "+src+" is "+doi+".")
|
2014-05-02 00:07:49 +02:00
|
|
|
elif arxiv is not False:
|
|
|
|
print("ArXiv id for "+src+" is "+arxiv+".")
|
2014-04-24 22:39:27 +02:00
|
|
|
elif isbn is not False:
|
|
|
|
print("ISBN for "+src+" is "+isbn+".")
|
2014-04-24 16:18:56 +02:00
|
|
|
|
2014-04-26 23:26:25 +02:00
|
|
|
if doi is not False and doi != '':
|
2014-04-25 14:13:37 +02:00
|
|
|
# Add extra \n for bibtexparser
|
2014-04-28 23:35:06 +02:00
|
|
|
bibtex = fetcher.doi2Bib(doi).strip().replace(',', ",\n")+"\n"
|
2014-05-02 00:07:49 +02:00
|
|
|
elif arxiv is not False and arxiv != '':
|
|
|
|
bibtex = fetcher.arXiv2Bib(arxiv).strip().replace(',', ",\n")+"\n"
|
2014-04-26 23:26:25 +02:00
|
|
|
elif isbn is not False and isbn != '':
|
2014-04-25 14:13:37 +02:00
|
|
|
# Idem
|
2014-04-28 23:35:06 +02:00
|
|
|
bibtex = fetcher.isbn2Bib(isbn).strip()+"\n"
|
2014-04-26 23:26:25 +02:00
|
|
|
else:
|
|
|
|
bibtex = ''
|
2014-04-27 09:46:43 +02:00
|
|
|
|
2014-05-14 22:45:25 +02:00
|
|
|
bibtex = BibTexParser(bibtex)
|
2014-05-14 14:53:56 +02:00
|
|
|
bibtex = bibtex.get_entry_dict()
|
|
|
|
if len(bibtex) > 0:
|
2014-08-03 23:34:17 +02:00
|
|
|
bibtex_name = list(bibtex.keys())[0]
|
2014-05-14 14:53:56 +02:00
|
|
|
bibtex = bibtex[bibtex_name]
|
|
|
|
bibtex_string = tools.parsed2Bibtex(bibtex)
|
|
|
|
else:
|
|
|
|
bibtex_string = ''
|
2014-04-24 19:38:52 +02:00
|
|
|
|
2014-05-17 17:23:56 +02:00
|
|
|
if not autoconfirm:
|
|
|
|
bibtex = checkBibtex(src, bibtex_string)
|
|
|
|
|
|
|
|
if not autoconfirm:
|
|
|
|
tag = tools.rawInput("Tag for this paper (leave empty for default) ? ")
|
|
|
|
else:
|
|
|
|
tag = args.tag
|
2014-05-05 00:19:29 +02:00
|
|
|
bibtex['tag'] = tag
|
|
|
|
|
|
|
|
new_name = backend.getNewName(src, bibtex, tag)
|
2014-04-24 00:18:49 +02:00
|
|
|
|
2014-04-24 21:19:27 +02:00
|
|
|
while os.path.exists(new_name):
|
2014-04-28 22:23:05 +02:00
|
|
|
tools.warning("file "+new_name+" already exists.")
|
|
|
|
default_rename = new_name.replace(tools.getExtension(new_name),
|
|
|
|
" (2)"+tools.getExtension(new_name))
|
2014-05-02 00:07:49 +02:00
|
|
|
rename = tools.rawInput("New name ["+default_rename+"]? ")
|
2014-04-24 21:19:27 +02:00
|
|
|
if rename == '':
|
|
|
|
new_name = default_rename
|
|
|
|
else:
|
|
|
|
new_name = rename
|
2014-04-25 15:36:54 +02:00
|
|
|
bibtex['file'] = new_name
|
2014-04-27 09:46:43 +02:00
|
|
|
|
2014-04-24 00:18:49 +02:00
|
|
|
try:
|
|
|
|
shutil.copy2(src, new_name)
|
2014-07-01 20:31:19 +02:00
|
|
|
except shutil.Error:
|
2014-04-26 11:52:19 +02:00
|
|
|
new_name = False
|
2014-06-30 00:19:38 +02:00
|
|
|
sys.exit("Unable to move file to library dir " +
|
|
|
|
config.get("folder")+".")
|
2014-04-24 00:18:49 +02:00
|
|
|
|
2014-04-26 23:26:25 +02:00
|
|
|
# Remove first page of IOP papers
|
2014-05-09 23:37:17 +02:00
|
|
|
try:
|
|
|
|
if 'IOP' in bibtex['publisher'] and bibtex['type'] == 'article':
|
2014-08-03 23:34:17 +02:00
|
|
|
tearpages.tearpage(new_name)
|
2014-07-01 20:31:19 +02:00
|
|
|
except (KeyError, shutil.Error, IOError):
|
2014-05-09 23:37:17 +02:00
|
|
|
pass
|
2014-04-26 23:26:25 +02:00
|
|
|
|
2014-04-28 23:35:06 +02:00
|
|
|
backend.bibtexAppend(bibtex)
|
2014-04-26 11:52:19 +02:00
|
|
|
return new_name
|
2014-04-24 00:18:49 +02:00
|
|
|
|
|
|
|
|
2014-05-05 00:19:29 +02:00
|
|
|
def editEntry(entry, file_id='both'):
|
2014-05-04 01:50:41 +02:00
|
|
|
bibtex = backend.getBibtex(entry, file_id)
|
|
|
|
if bibtex is False:
|
|
|
|
tools.warning("Entry "+entry+" does not exist.")
|
2014-05-14 14:53:56 +02:00
|
|
|
return False
|
2014-05-04 01:50:41 +02:00
|
|
|
|
|
|
|
if file_id == 'file':
|
|
|
|
filename = entry
|
|
|
|
else:
|
|
|
|
filename = bibtex['file']
|
2014-05-14 14:53:56 +02:00
|
|
|
new_bibtex = checkBibtex(filename, tools.parsed2Bibtex(bibtex))
|
2014-05-05 00:19:29 +02:00
|
|
|
|
|
|
|
# Tag update
|
|
|
|
if new_bibtex['tag'] != bibtex['tag']:
|
|
|
|
print("Editing tag, moving file.")
|
|
|
|
new_name = backend.getNewName(new_bibtex['file'],
|
|
|
|
new_bibtex,
|
|
|
|
new_bibtex['tag'])
|
|
|
|
|
|
|
|
while os.path.exists(new_name):
|
|
|
|
tools.warning("file "+new_name+" already exists.")
|
|
|
|
default_rename = new_name.replace(tools.getExtension(new_name),
|
|
|
|
" (2)" +
|
|
|
|
tools.getExtension(new_name))
|
|
|
|
rename = tools.rawInput("New name ["+default_rename+"]? ")
|
|
|
|
if rename == '':
|
|
|
|
new_name = default_rename
|
|
|
|
else:
|
|
|
|
new_name = rename
|
|
|
|
new_bibtex['file'] = new_name
|
|
|
|
|
|
|
|
try:
|
|
|
|
shutil.move(bibtex['file'], new_bibtex['file'])
|
2014-07-01 20:31:19 +02:00
|
|
|
except shutil.Error:
|
|
|
|
tools.warning('Unable to move file '+bibtex['file']+' to ' +
|
|
|
|
new_bibtex['file'] + ' according to tag edit.')
|
2014-05-05 00:19:29 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
if not os.listdir(os.path.dirname(bibtex['file'])):
|
|
|
|
os.rmdir(os.path.dirname(bibtex['file']))
|
2014-07-01 20:31:19 +02:00
|
|
|
except OSError:
|
2014-05-05 00:19:29 +02:00
|
|
|
tools.warning("Unable to delete empty tag dir " +
|
|
|
|
os.path.dirname(bibtex['file']))
|
|
|
|
|
2014-05-04 01:50:41 +02:00
|
|
|
try:
|
2014-06-30 00:19:38 +02:00
|
|
|
with open(config.get("folder")+'index.bib', 'r', encoding='utf-8') \
|
|
|
|
as fh:
|
2014-05-14 22:45:25 +02:00
|
|
|
index = BibTexParser(fh.read())
|
2014-05-04 01:50:41 +02:00
|
|
|
index = index.get_entry_dict()
|
2014-07-01 20:31:19 +02:00
|
|
|
except (TypeError, IOError):
|
2014-05-04 01:50:41 +02:00
|
|
|
tools.warning("Unable to open index file.")
|
|
|
|
return False
|
2014-05-05 00:19:29 +02:00
|
|
|
|
|
|
|
index[new_bibtex['id']] = new_bibtex
|
2014-05-04 01:50:41 +02:00
|
|
|
backend.bibtexRewrite(index)
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2014-05-17 17:23:56 +02:00
|
|
|
def downloadFile(url, filetype, manual, autoconfirm, tag):
|
2014-05-09 23:37:17 +02:00
|
|
|
print('Downloading '+url)
|
2014-04-28 22:23:05 +02:00
|
|
|
dl, contenttype = fetcher.download(url)
|
2014-04-26 11:52:19 +02:00
|
|
|
|
2014-04-26 18:40:32 +02:00
|
|
|
if dl is not False:
|
2014-05-09 23:37:17 +02:00
|
|
|
print('Download finished')
|
2014-04-26 23:26:25 +02:00
|
|
|
tmp = tempfile.NamedTemporaryFile(suffix='.'+contenttype)
|
2014-04-26 18:27:01 +02:00
|
|
|
|
2014-08-03 23:34:17 +02:00
|
|
|
with open(tmp.name, 'w+', encoding='utf-8') as fh:
|
2014-05-09 23:37:17 +02:00
|
|
|
fh.write(dl)
|
2014-05-17 17:23:56 +02:00
|
|
|
new_name = addFile(tmp.name, filetype, manual, autoconfirm, tag)
|
2014-06-08 20:40:08 +02:00
|
|
|
if new_name is False:
|
|
|
|
return False
|
2014-04-26 18:27:01 +02:00
|
|
|
tmp.close()
|
2014-04-26 11:52:19 +02:00
|
|
|
return new_name
|
|
|
|
else:
|
2014-04-28 22:23:05 +02:00
|
|
|
tools.warning("Could not fetch "+url)
|
2014-04-26 11:52:19 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
|
2014-05-02 12:46:37 +02:00
|
|
|
def openFile(ident):
|
|
|
|
try:
|
2014-06-30 00:19:38 +02:00
|
|
|
with open(config.get("folder")+'index.bib', 'r', encoding='utf-8') \
|
|
|
|
as fh:
|
2014-05-14 22:45:25 +02:00
|
|
|
bibtex = BibTexParser(fh.read())
|
2014-05-02 12:46:37 +02:00
|
|
|
bibtex = bibtex.get_entry_dict()
|
2014-07-01 20:31:19 +02:00
|
|
|
except (TypeError, IOError):
|
2014-05-02 12:46:37 +02:00
|
|
|
tools.warning("Unable to open index file.")
|
|
|
|
return False
|
|
|
|
|
2014-08-03 21:52:01 +02:00
|
|
|
if ident not in list(bibtex.keys()):
|
2014-05-02 12:46:37 +02:00
|
|
|
return False
|
|
|
|
else:
|
|
|
|
subprocess.Popen(['xdg-open', bibtex[ident]['file']])
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2014-05-01 00:45:31 +02:00
|
|
|
def resync():
|
|
|
|
diff = backend.diffFilesIndex()
|
|
|
|
|
2014-05-14 17:07:57 +02:00
|
|
|
if diff is False:
|
|
|
|
return False
|
|
|
|
|
|
|
|
for key in diff:
|
|
|
|
entry = diff[key]
|
2014-05-01 00:45:31 +02:00
|
|
|
if entry['file'] == '':
|
2014-05-14 17:07:57 +02:00
|
|
|
print("\nFound entry in index without associated file: " +
|
|
|
|
entry['id'])
|
|
|
|
print("Title:\t"+entry['title'])
|
|
|
|
loop = True
|
|
|
|
while confirm:
|
2014-05-01 00:45:31 +02:00
|
|
|
filename = tools.rawInput("File to import for this entry " +
|
|
|
|
"(leave empty to delete the " +
|
2014-05-02 00:07:49 +02:00
|
|
|
"entry)? ")
|
2014-05-01 00:45:31 +02:00
|
|
|
if filename == '':
|
|
|
|
break
|
|
|
|
else:
|
2014-08-03 21:52:01 +02:00
|
|
|
if 'doi' in list(entry.keys()):
|
2014-05-01 00:45:31 +02:00
|
|
|
doi = fetcher.findDOI(filename)
|
|
|
|
if doi is not False and doi != entry['doi']:
|
2014-05-14 17:07:57 +02:00
|
|
|
loop = tools.rawInput("Found DOI does not " +
|
2014-05-17 17:23:56 +02:00
|
|
|
"match bibtex entry " +
|
|
|
|
"DOI, continue anyway " +
|
|
|
|
"? [y/N]")
|
2014-05-14 17:07:57 +02:00
|
|
|
loop = (loop.lower() != 'y')
|
2014-08-03 21:52:01 +02:00
|
|
|
if 'Eprint' in list(entry.keys()):
|
2014-05-02 00:07:49 +02:00
|
|
|
arxiv = fetcher.findArXivId(filename)
|
|
|
|
if arxiv is not False and arxiv != entry['Eprint']:
|
2014-05-14 17:07:57 +02:00
|
|
|
loop = tools.rawInput("Found arXiv id does " +
|
2014-05-17 17:23:56 +02:00
|
|
|
"not match bibtex " +
|
|
|
|
"entry arxiv id, " +
|
|
|
|
"continue anyway ? [y/N]")
|
2014-05-14 17:07:57 +02:00
|
|
|
loop = (loop.lower() != 'y')
|
2014-08-03 21:52:01 +02:00
|
|
|
if 'isbn' in list(entry.keys()):
|
2014-05-01 00:45:31 +02:00
|
|
|
isbn = fetcher.findISBN(filename)
|
|
|
|
if isbn is not False and isbn != entry['isbn']:
|
2014-05-14 17:07:57 +02:00
|
|
|
loop = tools.rawInput("Found ISBN does not " +
|
2014-05-17 17:23:56 +02:00
|
|
|
"match bibtex entry " +
|
|
|
|
"ISBN, continue anyway " +
|
|
|
|
"? [y/N]")
|
2014-05-14 17:07:57 +02:00
|
|
|
loop = (loop.lower() != 'y')
|
2014-05-01 00:45:31 +02:00
|
|
|
continue
|
|
|
|
if filename == '':
|
|
|
|
backend.deleteId(entry['id'])
|
2014-05-14 17:07:57 +02:00
|
|
|
print("Deleted entry \""+entry['id']+"\".")
|
2014-05-01 00:45:31 +02:00
|
|
|
else:
|
|
|
|
new_name = backend.getNewName(filename, entry)
|
|
|
|
try:
|
|
|
|
shutil.copy2(filename, new_name)
|
2014-05-14 17:07:57 +02:00
|
|
|
print("Imported new file "+filename+" for entry " +
|
|
|
|
entry['id']+".")
|
2014-07-01 20:31:19 +02:00
|
|
|
except shutil.Error:
|
2014-05-01 00:45:31 +02:00
|
|
|
new_name = False
|
|
|
|
sys.exit("Unable to move file to library dir " +
|
2014-06-30 00:19:38 +02:00
|
|
|
config.get("folder")+".")
|
2014-05-01 00:45:31 +02:00
|
|
|
backend.bibtexEdit(entry['id'], {'file': filename})
|
|
|
|
else:
|
2014-05-14 17:07:57 +02:00
|
|
|
print("Found file without any associated entry in index:")
|
|
|
|
print(entry['file'])
|
2014-05-01 00:45:31 +02:00
|
|
|
action = ''
|
|
|
|
while action.lower() not in ['import', 'delete']:
|
2014-05-02 00:07:49 +02:00
|
|
|
action = tools.rawInput("What to do? [import / delete] ")
|
2014-05-01 00:45:31 +02:00
|
|
|
action = action.lower()
|
|
|
|
if action == 'import':
|
|
|
|
tmp = tempfile.NamedTemporaryFile()
|
|
|
|
shutil.copy(entry['file'], tmp.name)
|
|
|
|
filetype = tools.getExtension(entry['file'])
|
|
|
|
try:
|
|
|
|
os.remove(entry['file'])
|
2014-07-01 20:31:19 +02:00
|
|
|
except OSError:
|
2014-05-01 00:45:31 +02:00
|
|
|
tools.warning("Unable to delete file "+entry['file'])
|
|
|
|
if not addFile(tmp.name, filetype):
|
|
|
|
tools.warning("Unable to reimport file "+entry['file'])
|
|
|
|
tmp.close()
|
|
|
|
else:
|
|
|
|
backend.deleteFile(entry['file'])
|
|
|
|
print(entry['file'] + " removed from disk and " +
|
|
|
|
"index.")
|
2014-05-05 00:19:29 +02:00
|
|
|
# Check for empty tag dirs
|
2014-06-30 00:19:38 +02:00
|
|
|
for i in os.listdir(config.get("folder")):
|
|
|
|
if os.path.isdir(i) and not os.listdir(config.get("folder") + i):
|
2014-05-05 00:19:29 +02:00
|
|
|
try:
|
2014-06-30 00:19:38 +02:00
|
|
|
os.rmdir(config.get("folder") + i)
|
2014-07-01 20:31:19 +02:00
|
|
|
except OSError:
|
2014-06-30 00:19:38 +02:00
|
|
|
tools.warning("Found empty tag dir "+config.get("folder") + i +
|
2014-05-05 00:19:29 +02:00
|
|
|
" but could not delete it.")
|
2014-05-01 00:45:31 +02:00
|
|
|
|
|
|
|
|
2014-05-14 22:45:25 +02:00
|
|
|
def update(entry):
|
2014-05-07 22:04:46 +02:00
|
|
|
update = backend.updateArXiv(entry)
|
|
|
|
if update is not False:
|
|
|
|
print("New version found for "+entry)
|
2014-05-14 22:45:25 +02:00
|
|
|
print("\t Title: "+update['title'])
|
|
|
|
confirm = tools.rawInput("Download it ? [Y/n] ")
|
|
|
|
if confirm.lower() == 'n':
|
|
|
|
return
|
|
|
|
new_name = downloadFile('http://arxiv.org/pdf/'+update['eprint'],
|
|
|
|
'article', False)
|
|
|
|
if new_name is not False:
|
|
|
|
print(update['eprint']+" successfully imported as "+new_name)
|
|
|
|
else:
|
|
|
|
tools.warning("An error occurred while downloading "+url)
|
|
|
|
confirm = tools.rawInput("Delete previous version ? [y/N] ")
|
|
|
|
if confirm.lower() == 'y':
|
|
|
|
if not backend.deleteId(entry):
|
|
|
|
if not backend.deleteFile(entry):
|
|
|
|
tools.warning("Unable to remove previous version.")
|
|
|
|
return
|
|
|
|
print("Previous version successfully deleted.")
|
2014-05-07 22:04:46 +02:00
|
|
|
|
|
|
|
|
2014-04-24 00:18:49 +02:00
|
|
|
if __name__ == '__main__':
|
2014-05-03 02:05:46 +02:00
|
|
|
parser = argparse.ArgumentParser(description="A bibliography " +
|
|
|
|
"management tool.")
|
2014-08-03 00:09:07 +02:00
|
|
|
subparsers = parser.add_subparsers(help="sub-command help", dest='parser')
|
|
|
|
subparsers.required = True # Fix for Python 3.3.5
|
2014-05-03 02:05:46 +02:00
|
|
|
|
|
|
|
parser_download = subparsers.add_parser('download', help="download help")
|
|
|
|
parser_download.add_argument('-t', '--type', default=None,
|
|
|
|
choices=['article', 'book'],
|
2014-05-03 02:16:31 +02:00
|
|
|
help="type of the file to download")
|
|
|
|
parser_download.add_argument('-m', '--manual', default=False,
|
|
|
|
action='store_true',
|
|
|
|
help="disable auto-download of bibtex")
|
2014-05-17 17:23:56 +02:00
|
|
|
parser_download.add_argument('-y', default=False,
|
|
|
|
help="Confirm all")
|
|
|
|
parser_download.add_argument('--tag', default='', help="Tag")
|
2014-05-03 02:05:46 +02:00
|
|
|
parser_download.add_argument('url', nargs='+',
|
|
|
|
help="url of the file to import")
|
|
|
|
parser_download.set_defaults(func='download')
|
|
|
|
|
|
|
|
parser_import = subparsers.add_parser('import', help="import help")
|
|
|
|
parser_import.add_argument('-t', '--type', default=None,
|
|
|
|
choices=['article', 'book'],
|
2014-05-03 02:16:31 +02:00
|
|
|
help="type of the file to import")
|
|
|
|
parser_import.add_argument('-m', '--manual', default=False,
|
|
|
|
action='store_true',
|
|
|
|
help="disable auto-download of bibtex")
|
2014-05-17 17:23:56 +02:00
|
|
|
parser_import.add_argument('-y', default=False,
|
|
|
|
help="Confirm all")
|
|
|
|
parser_import.add_argument('--tag', default='', help="Tag")
|
2014-05-03 02:05:46 +02:00
|
|
|
parser_import.add_argument('file', nargs='+',
|
|
|
|
help="path to the file to import")
|
2014-05-15 18:34:59 +02:00
|
|
|
parser_import.add_argument('--skip', nargs='+',
|
2014-06-08 18:54:24 +02:00
|
|
|
help="path to files to skip", default=[])
|
2014-05-03 02:05:46 +02:00
|
|
|
parser_import.set_defaults(func='import')
|
|
|
|
|
|
|
|
parser_delete = subparsers.add_parser('delete', help="delete help")
|
2014-05-04 01:50:41 +02:00
|
|
|
parser_delete.add_argument('entries', metavar='entry', nargs='+',
|
2014-05-03 02:05:46 +02:00
|
|
|
help="a filename or an identifier")
|
2014-05-15 18:34:59 +02:00
|
|
|
parser_delete.add_argument('--skip', nargs='+',
|
2014-06-08 18:54:24 +02:00
|
|
|
help="path to files to skip", default=[])
|
2014-05-04 01:50:41 +02:00
|
|
|
group = parser_delete.add_mutually_exclusive_group()
|
|
|
|
group.add_argument('--id', action="store_true", default=False,
|
2014-05-05 00:19:29 +02:00
|
|
|
help="id based deletion")
|
2014-05-04 01:50:41 +02:00
|
|
|
group.add_argument('--file', action="store_true", default=False,
|
2014-05-05 00:19:29 +02:00
|
|
|
help="file based deletion")
|
2014-05-03 02:16:31 +02:00
|
|
|
parser_delete.add_argument('-f', '--force', default=False,
|
|
|
|
action='store_true',
|
|
|
|
help="delete without confirmation")
|
2014-05-03 02:05:46 +02:00
|
|
|
parser_delete.set_defaults(func='delete')
|
2014-05-05 00:19:29 +02:00
|
|
|
|
2014-05-04 01:50:41 +02:00
|
|
|
parser_edit = subparsers.add_parser('edit', help="edit help")
|
|
|
|
parser_edit.add_argument('entries', metavar='entry', nargs='+',
|
2014-05-05 00:19:29 +02:00
|
|
|
help="a filename or an identifier")
|
2014-05-15 18:34:59 +02:00
|
|
|
parser_edit.add_argument('--skip', nargs='+',
|
2014-06-08 18:54:24 +02:00
|
|
|
help="path to files to skip", default=[])
|
2014-05-04 01:50:41 +02:00
|
|
|
group = parser_edit.add_mutually_exclusive_group()
|
|
|
|
group.add_argument('--id', action="store_true", default=False,
|
2014-05-05 00:19:29 +02:00
|
|
|
help="id based deletion")
|
2014-05-04 01:50:41 +02:00
|
|
|
group.add_argument('--file', action="store_true", default=False,
|
2014-05-05 00:19:29 +02:00
|
|
|
help="file based deletion")
|
2014-05-04 01:50:41 +02:00
|
|
|
parser_edit.set_defaults(func='edit')
|
2014-05-03 02:05:46 +02:00
|
|
|
|
|
|
|
parser_list = subparsers.add_parser('list', help="list help")
|
|
|
|
parser_list.set_defaults(func='list')
|
|
|
|
|
|
|
|
parser_search = subparsers.add_parser('search', help="search help")
|
|
|
|
parser_search.set_defaults(func='search')
|
|
|
|
|
|
|
|
parser_open = subparsers.add_parser('open', help="open help")
|
|
|
|
parser_open.add_argument('ids', metavar='id', nargs='+',
|
|
|
|
help="an identifier")
|
|
|
|
parser_open.set_defaults(func='open')
|
|
|
|
|
2014-05-27 12:17:59 +02:00
|
|
|
parser_export = subparsers.add_parser('export', help="export help")
|
|
|
|
parser_export.add_argument('ids', metavar='id', nargs='+',
|
2014-06-08 20:40:08 +02:00
|
|
|
help="an identifier")
|
2014-05-27 12:17:59 +02:00
|
|
|
parser_export.set_defaults(func='export')
|
|
|
|
|
2014-05-03 02:05:46 +02:00
|
|
|
parser_resync = subparsers.add_parser('resync', help="resync help")
|
|
|
|
parser_resync.set_defaults(func='resync')
|
|
|
|
|
2014-05-07 22:04:46 +02:00
|
|
|
parser_update = subparsers.add_parser('update', help="update help")
|
2014-05-09 23:50:19 +02:00
|
|
|
parser_update.add_argument('--entries', metavar='entry', nargs='+',
|
2014-05-07 22:04:46 +02:00
|
|
|
help="a filename or an identifier")
|
|
|
|
parser_update.set_defaults(func='update')
|
|
|
|
|
2014-05-08 22:07:52 +02:00
|
|
|
parser_search = subparsers.add_parser('search', help="search help")
|
2014-05-09 23:50:19 +02:00
|
|
|
parser_search.add_argument('query', metavar='entry', nargs='+',
|
2014-05-08 22:07:52 +02:00
|
|
|
help="your query, see README for more info.")
|
|
|
|
parser_search.set_defaults(func='search')
|
|
|
|
|
2014-05-03 02:05:46 +02:00
|
|
|
args = parser.parse_args()
|
2014-04-24 21:19:27 +02:00
|
|
|
try:
|
2014-05-03 02:05:46 +02:00
|
|
|
if args.func == 'download':
|
2014-06-08 20:40:08 +02:00
|
|
|
skipped = []
|
2014-05-03 02:05:46 +02:00
|
|
|
for url in args.url:
|
2014-05-17 17:23:56 +02:00
|
|
|
new_name = downloadFile(url, args.type, args.manual, args.y,
|
|
|
|
args.tag)
|
2014-05-03 02:05:46 +02:00
|
|
|
if new_name is not False:
|
|
|
|
print(url+" successfully imported as "+new_name)
|
|
|
|
else:
|
|
|
|
tools.warning("An error occurred while downloading "+url)
|
2014-06-08 20:40:08 +02:00
|
|
|
skipped.append(url)
|
|
|
|
if len(skipped) > 0:
|
|
|
|
print("\nSkipped files:")
|
|
|
|
for i in skipped:
|
|
|
|
print(i)
|
2014-04-26 11:52:19 +02:00
|
|
|
sys.exit()
|
2014-04-24 00:18:49 +02:00
|
|
|
|
2014-05-03 02:05:46 +02:00
|
|
|
if args.func == 'import':
|
2014-06-08 20:40:08 +02:00
|
|
|
skipped = []
|
2014-05-15 18:34:59 +02:00
|
|
|
for filename in list(set(args.file) - set(args.skip)):
|
2014-05-17 17:23:56 +02:00
|
|
|
new_name = addFile(filename, args.type, args.manual, args.y,
|
|
|
|
args.tag)
|
2014-05-03 02:05:46 +02:00
|
|
|
if new_name is not False:
|
2014-06-08 19:18:43 +02:00
|
|
|
print(filename+" successfully imported as " +
|
2014-05-03 02:05:46 +02:00
|
|
|
new_name+".")
|
|
|
|
else:
|
|
|
|
tools.warning("An error occurred while importing " +
|
|
|
|
filename)
|
2014-06-08 20:40:08 +02:00
|
|
|
skipped.append(filename)
|
|
|
|
if len(skipped) > 0:
|
|
|
|
print("\nSkipped files:")
|
|
|
|
for i in skipped:
|
|
|
|
print(i)
|
2014-04-24 21:19:27 +02:00
|
|
|
sys.exit()
|
2014-04-24 00:18:49 +02:00
|
|
|
|
2014-05-03 02:05:46 +02:00
|
|
|
elif args.func == 'delete':
|
2014-06-08 20:40:08 +02:00
|
|
|
skipped = []
|
2014-05-15 18:34:59 +02:00
|
|
|
for filename in list(set(args.entries) - set(args.skip)):
|
2014-05-03 02:16:31 +02:00
|
|
|
if not args.force:
|
|
|
|
confirm = tools.rawInput("Are you sure you want to " +
|
|
|
|
"delete "+filename+" ? [y/N] ")
|
|
|
|
else:
|
|
|
|
confirm = 'y'
|
2014-04-25 15:36:54 +02:00
|
|
|
|
2014-05-03 02:05:46 +02:00
|
|
|
if confirm.lower() == 'y':
|
2014-05-04 01:50:41 +02:00
|
|
|
if args.file or not backend.deleteId(filename):
|
|
|
|
if args.id or not backend.deleteFile(filename):
|
2014-05-03 02:05:46 +02:00
|
|
|
tools.warning("Unable to delete "+filename)
|
|
|
|
sys.exit(1)
|
2014-04-25 15:36:54 +02:00
|
|
|
|
2014-05-03 02:05:46 +02:00
|
|
|
print(filename+" successfully deleted.")
|
2014-06-08 20:40:08 +02:00
|
|
|
else:
|
|
|
|
skipped.append(filename)
|
|
|
|
|
|
|
|
if len(skipped) > 0:
|
|
|
|
print("\nSkipped files:")
|
|
|
|
for i in skipped:
|
|
|
|
print(i)
|
2014-04-25 15:36:54 +02:00
|
|
|
sys.exit()
|
2014-04-25 14:22:34 +02:00
|
|
|
|
2014-05-09 23:50:57 +02:00
|
|
|
elif args.func == 'edit':
|
2014-05-15 18:34:59 +02:00
|
|
|
for filename in list(set(args.entries) - set(args.skip)):
|
2014-05-04 01:50:41 +02:00
|
|
|
if args.file:
|
|
|
|
file_id = 'file'
|
|
|
|
elif args.id:
|
|
|
|
file_id = 'id'
|
|
|
|
else:
|
|
|
|
file_id = 'both'
|
|
|
|
editEntry(filename, file_id)
|
|
|
|
sys.exit()
|
|
|
|
|
2014-05-03 02:05:46 +02:00
|
|
|
elif args.func == 'list':
|
2014-06-30 00:19:38 +02:00
|
|
|
listPapers = tools.listDir(config.get("folder"))
|
2014-05-14 22:52:17 +02:00
|
|
|
listPapers.sort()
|
|
|
|
|
|
|
|
for paper in listPapers:
|
|
|
|
if tools.getExtension(paper) not in [".pdf", ".djvu"]:
|
|
|
|
continue
|
|
|
|
print(paper)
|
2014-04-24 00:18:49 +02:00
|
|
|
|
2014-05-03 02:05:46 +02:00
|
|
|
elif args.func == 'search':
|
2014-04-24 21:19:27 +02:00
|
|
|
raise Exception('TODO')
|
2014-04-25 15:36:54 +02:00
|
|
|
|
2014-05-03 02:05:46 +02:00
|
|
|
elif args.func == 'open':
|
|
|
|
for filename in args.ids:
|
2014-05-02 22:08:24 +02:00
|
|
|
if not openFile(filename):
|
|
|
|
sys.exit("Unable to open file associated " +
|
2014-05-03 02:05:46 +02:00
|
|
|
"to ident "+filename)
|
2014-05-04 01:50:41 +02:00
|
|
|
sys.exit()
|
2014-06-08 18:54:24 +02:00
|
|
|
|
2014-05-27 12:17:59 +02:00
|
|
|
elif args.func == 'export':
|
|
|
|
bibtex = ''
|
|
|
|
for id in args.ids:
|
|
|
|
bibtex += tools.parsed2Bibtex(backend.getBibtex(id,
|
|
|
|
clean=True))
|
|
|
|
print(bibtex.strip())
|
|
|
|
sys.exit
|
2014-05-02 12:46:37 +02:00
|
|
|
|
2014-05-03 02:05:46 +02:00
|
|
|
elif args.func == 'resync':
|
2014-05-02 00:07:49 +02:00
|
|
|
confirm = tools.rawInput("Resync files and bibtex index? [y/N] ")
|
2014-05-01 00:45:31 +02:00
|
|
|
if confirm.lower() == 'y':
|
|
|
|
resync()
|
2014-05-04 01:50:41 +02:00
|
|
|
sys.exit()
|
2014-05-01 00:45:31 +02:00
|
|
|
|
2014-05-07 22:04:46 +02:00
|
|
|
elif args.func == 'update':
|
|
|
|
if args.entries is None:
|
|
|
|
entries = backend.getEntries()
|
|
|
|
else:
|
|
|
|
entries = args.entries
|
|
|
|
for entry in entries:
|
|
|
|
update(entry)
|
|
|
|
sys.exit()
|
|
|
|
|
2014-04-24 21:19:27 +02:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
sys.exit()
|