Autoconfirm option + flake8
This commit is contained in:
parent
33c377c74c
commit
e3bc6f2d92
@ -138,7 +138,6 @@ Here are some sources of inspirations for this project :
|
|||||||
|
|
||||||
A list of ideas and TODO. Don't hesitate to give feedback on the ones you really want or to propose your owns.
|
A list of ideas and TODO. Don't hesitate to give feedback on the ones you really want or to propose your owns.
|
||||||
|
|
||||||
40. Option to automatically confirm
|
|
||||||
50. Anti-duplicate ?
|
50. Anti-duplicate ?
|
||||||
55. Customization options for naming
|
55. Customization options for naming
|
||||||
60. Check stored versions when updating arxiv papers
|
60. Check stored versions when updating arxiv papers
|
||||||
|
11
backend.py
11
backend.py
@ -7,7 +7,6 @@ import tools
|
|||||||
import fetcher
|
import fetcher
|
||||||
import params
|
import params
|
||||||
from bibtexparser.bparser import BibTexParser
|
from bibtexparser.bparser import BibTexParser
|
||||||
from bibtexparser.customization import homogeneize_latex_encoding
|
|
||||||
from codecs import open
|
from codecs import open
|
||||||
|
|
||||||
|
|
||||||
@ -159,20 +158,20 @@ def deleteFile(filename):
|
|||||||
os.remove(bibtex[key]['file'])
|
os.remove(bibtex[key]['file'])
|
||||||
except:
|
except:
|
||||||
tools.warning("Unable to delete file associated to id " +
|
tools.warning("Unable to delete file associated to id " +
|
||||||
key+" : "+bibtex[key]['file'])
|
key+" : "+bibtex[key]['file'])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if not os.listdir(os.path.dirname(filename)):
|
if not os.listdir(os.path.dirname(filename)):
|
||||||
os.rmdir(os.path.dirname(filename))
|
os.rmdir(os.path.dirname(filename))
|
||||||
except:
|
except:
|
||||||
tools.warning("Unable to delete empty tag dir " +
|
tools.warning("Unable to delete empty tag dir " +
|
||||||
os.path.dirname(filename))
|
os.path.dirname(filename))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
del(bibtex[key])
|
del(bibtex[key])
|
||||||
except KeyError:
|
except KeyError:
|
||||||
tools.warning("No associated bibtex entry in index for " +
|
tools.warning("No associated bibtex entry in index for " +
|
||||||
"file " + bibtex[key]['file'])
|
"file " + bibtex[key]['file'])
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
if found:
|
if found:
|
||||||
@ -190,7 +189,7 @@ def diffFilesIndex():
|
|||||||
* only file entry if file with missing bibtex entry
|
* only file entry if file with missing bibtex entry
|
||||||
"""
|
"""
|
||||||
files = tools.listDir(params.folder)
|
files = tools.listDir(params.folder)
|
||||||
files = [ i for i in files if tools.getExtension(i) in ['.pdf', '.djvu'] ]
|
files = [i for i in files if tools.getExtension(i) in ['.pdf', '.djvu']]
|
||||||
try:
|
try:
|
||||||
with open(params.folder+'index.bib', 'r', encoding='utf-8') as fh:
|
with open(params.folder+'index.bib', 'r', encoding='utf-8') as fh:
|
||||||
index = BibTexParser(fh.read())
|
index = BibTexParser(fh.read())
|
||||||
@ -261,7 +260,7 @@ def updateArXiv(entry):
|
|||||||
"""
|
"""
|
||||||
bibtex = getBibtex(entry)
|
bibtex = getBibtex(entry)
|
||||||
# Check arXiv
|
# Check arXiv
|
||||||
if('archiveprefix' not in bibtex or
|
if('archiveprefix' not in bibtex or
|
||||||
'arXiv' not in bibtex['archiveprefix']):
|
'arXiv' not in bibtex['archiveprefix']):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ def download(url):
|
|||||||
dl += buf
|
dl += buf
|
||||||
dl_size += len(buf)
|
dl_size += len(buf)
|
||||||
done = int(50 * dl_size / size)
|
done = int(50 * dl_size / size)
|
||||||
sys.stdout.write("\r[%s%s]"%('='*done,' '*(50-done)))
|
sys.stdout.write("\r[%s%s]" % ('='*done, ' '*(50-done)))
|
||||||
sys.stdout.write(" "+str(int(float(done)/52*100))+"%")
|
sys.stdout.write(" "+str(int(float(done)/52*100))+"%")
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
contenttype = False
|
contenttype = False
|
||||||
@ -56,7 +56,7 @@ def download(url):
|
|||||||
|
|
||||||
|
|
||||||
isbn_re = re.compile(r"isbn (([0-9]{3}[ -])?[0-9][ -][0-9]{2}[ -][0-9]{6}[ -][0-9])",
|
isbn_re = re.compile(r"isbn (([0-9]{3}[ -])?[0-9][ -][0-9]{2}[ -][0-9]{6}[ -][0-9])",
|
||||||
re.IGNORECASE)
|
re.IGNORECASE)
|
||||||
|
|
||||||
|
|
||||||
def findISBN(src):
|
def findISBN(src):
|
||||||
|
50
main.py
50
main.py
@ -13,7 +13,6 @@ import tearpages
|
|||||||
import tools
|
import tools
|
||||||
import params
|
import params
|
||||||
from bibtexparser.bparser import BibTexParser
|
from bibtexparser.bparser import BibTexParser
|
||||||
from bibtexparser.customization import homogeneize_latex_encoding
|
|
||||||
from codecs import open
|
from codecs import open
|
||||||
|
|
||||||
EDITOR = os.environ.get('EDITOR') if os.environ.get('EDITOR') else 'vim'
|
EDITOR = os.environ.get('EDITOR') if os.environ.get('EDITOR') else 'vim'
|
||||||
@ -59,7 +58,6 @@ def checkBibtex(filename, bibtex_string):
|
|||||||
tools.rawInput("Press Enter to go back to editor.")
|
tools.rawInput("Press Enter to go back to editor.")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
||||||
if old_filename is not False and 'file' not in bibtex:
|
if old_filename is not False and 'file' not in bibtex:
|
||||||
tools.warning("Invalid bibtex entry. No filename given.")
|
tools.warning("Invalid bibtex entry. No filename given.")
|
||||||
tools.rawInput("Press Enter to go back to editor.")
|
tools.rawInput("Press Enter to go back to editor.")
|
||||||
@ -80,7 +78,7 @@ def checkBibtex(filename, bibtex_string):
|
|||||||
return bibtex
|
return bibtex
|
||||||
|
|
||||||
|
|
||||||
def addFile(src, filetype, manual):
|
def addFile(src, filetype, manual, autoconfirm, tag):
|
||||||
"""
|
"""
|
||||||
Add a file to the library
|
Add a file to the library
|
||||||
"""
|
"""
|
||||||
@ -152,9 +150,14 @@ def addFile(src, filetype, manual):
|
|||||||
bibtex_string = tools.parsed2Bibtex(bibtex)
|
bibtex_string = tools.parsed2Bibtex(bibtex)
|
||||||
else:
|
else:
|
||||||
bibtex_string = ''
|
bibtex_string = ''
|
||||||
bibtex = checkBibtex(src, bibtex_string)
|
|
||||||
|
|
||||||
tag = tools.rawInput("Tag for this paper (leave empty for default) ? ")
|
if not autoconfirm:
|
||||||
|
bibtex = checkBibtex(src, bibtex_string)
|
||||||
|
|
||||||
|
if not autoconfirm:
|
||||||
|
tag = tools.rawInput("Tag for this paper (leave empty for default) ? ")
|
||||||
|
else:
|
||||||
|
tag = args.tag
|
||||||
bibtex['tag'] = tag
|
bibtex['tag'] = tag
|
||||||
|
|
||||||
new_name = backend.getNewName(src, bibtex, tag)
|
new_name = backend.getNewName(src, bibtex, tag)
|
||||||
@ -244,7 +247,7 @@ def editEntry(entry, file_id='both'):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def downloadFile(url, filetype, manual):
|
def downloadFile(url, filetype, manual, autoconfirm, tag):
|
||||||
print('Downloading '+url)
|
print('Downloading '+url)
|
||||||
dl, contenttype = fetcher.download(url)
|
dl, contenttype = fetcher.download(url)
|
||||||
|
|
||||||
@ -254,7 +257,7 @@ def downloadFile(url, filetype, manual):
|
|||||||
|
|
||||||
with open(tmp.name, 'w+') as fh:
|
with open(tmp.name, 'w+') as fh:
|
||||||
fh.write(dl)
|
fh.write(dl)
|
||||||
new_name = addFile(tmp.name, filetype, manual)
|
new_name = addFile(tmp.name, filetype, manual, autoconfirm, tag)
|
||||||
tmp.close()
|
tmp.close()
|
||||||
return new_name
|
return new_name
|
||||||
else:
|
else:
|
||||||
@ -302,25 +305,25 @@ def resync():
|
|||||||
doi = fetcher.findDOI(filename)
|
doi = fetcher.findDOI(filename)
|
||||||
if doi is not False and doi != entry['doi']:
|
if doi is not False and doi != entry['doi']:
|
||||||
loop = tools.rawInput("Found DOI does not " +
|
loop = tools.rawInput("Found DOI does not " +
|
||||||
"match bibtex entry " +
|
"match bibtex entry " +
|
||||||
"DOI, continue anyway " +
|
"DOI, continue anyway " +
|
||||||
"? [y/N]")
|
"? [y/N]")
|
||||||
loop = (loop.lower() != 'y')
|
loop = (loop.lower() != 'y')
|
||||||
if 'Eprint' in entry.keys():
|
if 'Eprint' in entry.keys():
|
||||||
arxiv = fetcher.findArXivId(filename)
|
arxiv = fetcher.findArXivId(filename)
|
||||||
if arxiv is not False and arxiv != entry['Eprint']:
|
if arxiv is not False and arxiv != entry['Eprint']:
|
||||||
loop = tools.rawInput("Found arXiv id does " +
|
loop = tools.rawInput("Found arXiv id does " +
|
||||||
"not match bibtex " +
|
"not match bibtex " +
|
||||||
"entry arxiv id, " +
|
"entry arxiv id, " +
|
||||||
"continue anyway ? [y/N]")
|
"continue anyway ? [y/N]")
|
||||||
loop = (loop.lower() != 'y')
|
loop = (loop.lower() != 'y')
|
||||||
if 'isbn' in entry.keys():
|
if 'isbn' in entry.keys():
|
||||||
isbn = fetcher.findISBN(filename)
|
isbn = fetcher.findISBN(filename)
|
||||||
if isbn is not False and isbn != entry['isbn']:
|
if isbn is not False and isbn != entry['isbn']:
|
||||||
loop = tools.rawInput("Found ISBN does not " +
|
loop = tools.rawInput("Found ISBN does not " +
|
||||||
"match bibtex entry " +
|
"match bibtex entry " +
|
||||||
"ISBN, continue anyway " +
|
"ISBN, continue anyway " +
|
||||||
"? [y/N]")
|
"? [y/N]")
|
||||||
loop = (loop.lower() != 'y')
|
loop = (loop.lower() != 'y')
|
||||||
continue
|
continue
|
||||||
if filename == '':
|
if filename == '':
|
||||||
@ -404,6 +407,9 @@ if __name__ == '__main__':
|
|||||||
parser_download.add_argument('-m', '--manual', default=False,
|
parser_download.add_argument('-m', '--manual', default=False,
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help="disable auto-download of bibtex")
|
help="disable auto-download of bibtex")
|
||||||
|
parser_download.add_argument('-y', default=False,
|
||||||
|
help="Confirm all")
|
||||||
|
parser_download.add_argument('--tag', default='', help="Tag")
|
||||||
parser_download.add_argument('url', nargs='+',
|
parser_download.add_argument('url', nargs='+',
|
||||||
help="url of the file to import")
|
help="url of the file to import")
|
||||||
parser_download.set_defaults(func='download')
|
parser_download.set_defaults(func='download')
|
||||||
@ -415,6 +421,9 @@ if __name__ == '__main__':
|
|||||||
parser_import.add_argument('-m', '--manual', default=False,
|
parser_import.add_argument('-m', '--manual', default=False,
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help="disable auto-download of bibtex")
|
help="disable auto-download of bibtex")
|
||||||
|
parser_import.add_argument('-y', default=False,
|
||||||
|
help="Confirm all")
|
||||||
|
parser_import.add_argument('--tag', default='', help="Tag")
|
||||||
parser_import.add_argument('file', nargs='+',
|
parser_import.add_argument('file', nargs='+',
|
||||||
help="path to the file to import")
|
help="path to the file to import")
|
||||||
parser_import.add_argument('--skip', nargs='+',
|
parser_import.add_argument('--skip', nargs='+',
|
||||||
@ -440,7 +449,7 @@ if __name__ == '__main__':
|
|||||||
parser_edit.add_argument('entries', metavar='entry', nargs='+',
|
parser_edit.add_argument('entries', metavar='entry', nargs='+',
|
||||||
help="a filename or an identifier")
|
help="a filename or an identifier")
|
||||||
parser_edit.add_argument('--skip', nargs='+',
|
parser_edit.add_argument('--skip', nargs='+',
|
||||||
help="path to files to skip")
|
help="path to files to skip")
|
||||||
group = parser_edit.add_mutually_exclusive_group()
|
group = parser_edit.add_mutually_exclusive_group()
|
||||||
group.add_argument('--id', action="store_true", default=False,
|
group.add_argument('--id', action="store_true", default=False,
|
||||||
help="id based deletion")
|
help="id based deletion")
|
||||||
@ -468,7 +477,6 @@ if __name__ == '__main__':
|
|||||||
parser_update.set_defaults(func='update')
|
parser_update.set_defaults(func='update')
|
||||||
|
|
||||||
parser_search = subparsers.add_parser('search', help="search help")
|
parser_search = subparsers.add_parser('search', help="search help")
|
||||||
# TODO: Check
|
|
||||||
parser_search.add_argument('query', metavar='entry', nargs='+',
|
parser_search.add_argument('query', metavar='entry', nargs='+',
|
||||||
help="your query, see README for more info.")
|
help="your query, see README for more info.")
|
||||||
parser_search.set_defaults(func='search')
|
parser_search.set_defaults(func='search')
|
||||||
@ -477,7 +485,8 @@ if __name__ == '__main__':
|
|||||||
try:
|
try:
|
||||||
if args.func == 'download':
|
if args.func == 'download':
|
||||||
for url in args.url:
|
for url in args.url:
|
||||||
new_name = downloadFile(url, args.type, args.manual)
|
new_name = downloadFile(url, args.type, args.manual, args.y,
|
||||||
|
args.tag)
|
||||||
if new_name is not False:
|
if new_name is not False:
|
||||||
print(url+" successfully imported as "+new_name)
|
print(url+" successfully imported as "+new_name)
|
||||||
else:
|
else:
|
||||||
@ -486,7 +495,8 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
if args.func == 'import':
|
if args.func == 'import':
|
||||||
for filename in list(set(args.file) - set(args.skip)):
|
for filename in list(set(args.file) - set(args.skip)):
|
||||||
new_name = addFile(filename, args.type, args.manual)
|
new_name = addFile(filename, args.type, args.manual, args.y,
|
||||||
|
args.tag)
|
||||||
if new_name is not False:
|
if new_name is not False:
|
||||||
print(sys.argv[2]+" successfully imported as " +
|
print(sys.argv[2]+" successfully imported as " +
|
||||||
new_name+".")
|
new_name+".")
|
||||||
|
Loading…
Reference in New Issue
Block a user