* Skip option in manual fallback
* Homogeneize manual entry for books / articles * Sum up skipped elements * Trailing whitespaces in search
This commit is contained in:
parent
059b6c26a4
commit
3591f0cb1f
46
bmc.py
46
bmc.py
@ -106,29 +106,37 @@ def addFile(src, filetype, manual, autoconfirm, tag):
|
||||
tools.warning("Could not determine the DOI nor the arXiv id nor " +
|
||||
"the ISBN for "+src+". Switching to manual entry.")
|
||||
doi_arxiv_isbn = ''
|
||||
while doi_arxiv_isbn not in ['doi', 'arxiv', 'isbn', 'manual']:
|
||||
while doi_arxiv_isbn not in ['doi', 'arxiv', 'isbn', 'manual', 'skip']:
|
||||
doi_arxiv_isbn = tools.rawInput("DOI / arXiv " +
|
||||
"/ ISBN / manual? ").lower()
|
||||
"/ ISBN / manual / skip? ").lower()
|
||||
if doi_arxiv_isbn == 'doi':
|
||||
doi = tools.rawInput('DOI? ')
|
||||
elif doi_arxiv_isbn == 'arxiv':
|
||||
arxiv = tools.rawInput('arXiv id? ')
|
||||
elif doi_arxiv_isbn == 'isbn':
|
||||
isbn = tools.rawInput('ISBN? ')
|
||||
elif doi_arxiv_isbn == 'skip':
|
||||
return False
|
||||
elif filetype == 'article':
|
||||
tools.warning("Could not determine the DOI nor the arXiv id for " +
|
||||
src+", switching to manual entry.")
|
||||
doi_arxiv = ''
|
||||
while doi_arxiv not in ['doi', 'arxiv', 'manual']:
|
||||
doi_arxiv = tools.rawInput("DOI / arXiv / manual? ").lower()
|
||||
while doi_arxiv not in ['doi', 'arxiv', 'manual', 'skip']:
|
||||
doi_arxiv = tools.rawInput("DOI / arXiv / manual / skip? ").lower()
|
||||
if doi_arxiv == 'doi':
|
||||
doi = tools.rawInput('DOI? ')
|
||||
elif doi_arxiv == 'arxiv':
|
||||
arxiv = tools.rawInput('arXiv id? ')
|
||||
elif doi_arxiv == 'skip':
|
||||
return False
|
||||
elif filetype == 'book':
|
||||
tools.warning("Could not determine the ISBN for "+src +
|
||||
", switching to manual entry.")
|
||||
isbn = tools.rawInput('ISBN? ')
|
||||
isbn_manual = ''
|
||||
while isbn_manual not in ['isbn', 'manual', 'skip']:
|
||||
isbn_manual = tools.rawInput("ISBN / manual / skip? ").lower()
|
||||
if isbn_manual == 'isbn':
|
||||
isbn = tools.rawInput('ISBN? ')
|
||||
elif isbn_manual == 'skip':
|
||||
return False
|
||||
elif doi is not False:
|
||||
print("DOI for "+src+" is "+doi+".")
|
||||
elif arxiv is not False:
|
||||
@ -263,6 +271,8 @@ def downloadFile(url, filetype, manual, autoconfirm, tag):
|
||||
with open(tmp.name, 'w+') as fh:
|
||||
fh.write(dl)
|
||||
new_name = addFile(tmp.name, filetype, manual, autoconfirm, tag)
|
||||
if new_name is False:
|
||||
return False
|
||||
tmp.close()
|
||||
return new_name
|
||||
else:
|
||||
@ -475,7 +485,7 @@ if __name__ == '__main__':
|
||||
|
||||
parser_export = subparsers.add_parser('export', help="export help")
|
||||
parser_export.add_argument('ids', metavar='id', nargs='+',
|
||||
help="an identifier")
|
||||
help="an identifier")
|
||||
parser_export.set_defaults(func='export')
|
||||
|
||||
parser_resync = subparsers.add_parser('resync', help="resync help")
|
||||
@ -494,6 +504,7 @@ if __name__ == '__main__':
|
||||
args = parser.parse_args()
|
||||
try:
|
||||
if args.func == 'download':
|
||||
skipped = []
|
||||
for url in args.url:
|
||||
new_name = downloadFile(url, args.type, args.manual, args.y,
|
||||
args.tag)
|
||||
@ -501,9 +512,15 @@ if __name__ == '__main__':
|
||||
print(url+" successfully imported as "+new_name)
|
||||
else:
|
||||
tools.warning("An error occurred while downloading "+url)
|
||||
skipped.append(url)
|
||||
if len(skipped) > 0:
|
||||
print("\nSkipped files:")
|
||||
for i in skipped:
|
||||
print(i)
|
||||
sys.exit()
|
||||
|
||||
if args.func == 'import':
|
||||
skipped = []
|
||||
for filename in list(set(args.file) - set(args.skip)):
|
||||
new_name = addFile(filename, args.type, args.manual, args.y,
|
||||
args.tag)
|
||||
@ -513,9 +530,15 @@ if __name__ == '__main__':
|
||||
else:
|
||||
tools.warning("An error occurred while importing " +
|
||||
filename)
|
||||
skipped.append(filename)
|
||||
if len(skipped) > 0:
|
||||
print("\nSkipped files:")
|
||||
for i in skipped:
|
||||
print(i)
|
||||
sys.exit()
|
||||
|
||||
elif args.func == 'delete':
|
||||
skipped = []
|
||||
for filename in list(set(args.entries) - set(args.skip)):
|
||||
if not args.force:
|
||||
confirm = tools.rawInput("Are you sure you want to " +
|
||||
@ -530,6 +553,13 @@ if __name__ == '__main__':
|
||||
sys.exit(1)
|
||||
|
||||
print(filename+" successfully deleted.")
|
||||
else:
|
||||
skipped.append(filename)
|
||||
|
||||
if len(skipped) > 0:
|
||||
print("\nSkipped files:")
|
||||
for i in skipped:
|
||||
print(i)
|
||||
sys.exit()
|
||||
|
||||
elif args.func == 'edit':
|
||||
|
34
search.py
34
search.py
@ -6,7 +6,7 @@ http://pyparsing.wikispaces.com/file/view/searchparser.py/30112816/searchparser.
|
||||
|
||||
version 2006-03-09
|
||||
|
||||
This search query parser uses the excellent Pyparsing module
|
||||
This search query parser uses the excellent Pyparsing module
|
||||
(http://pyparsing.sourceforge.net/) to parse search queries by users.
|
||||
It handles:
|
||||
|
||||
@ -34,7 +34,7 @@ are permitted provided that the following conditions are met:
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
* Neither the name of Estrate nor the names of its contributors may be used
|
||||
to endorse or promote products derived from this software without specific
|
||||
@ -45,10 +45,10 @@ ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
CONTRIBUTORS:
|
||||
@ -72,12 +72,12 @@ class SearchQueryParser:
|
||||
'wordwildcard': self.evaluateWordWildcard,
|
||||
}
|
||||
self._parser = self.parser()
|
||||
|
||||
|
||||
def parser(self):
|
||||
"""
|
||||
This function returns a parser.
|
||||
The grammar should be like most full text search engines (Google, Tsearch, Lucene).
|
||||
|
||||
|
||||
Grammar:
|
||||
- a query consists of alphanumeric words, with an optional '*' wildcard
|
||||
at the end of a word
|
||||
@ -89,20 +89,20 @@ class SearchQueryParser:
|
||||
- if an operator is missing, use an 'and' operator
|
||||
"""
|
||||
operatorOr = Forward()
|
||||
|
||||
|
||||
operatorWord = Group(Combine(Word(alphanums) + Suppress('*'))).setResultsName('wordwildcard') | \
|
||||
Group(Word(alphanums)).setResultsName('word')
|
||||
|
||||
|
||||
operatorQuotesContent = Forward()
|
||||
operatorQuotesContent << (
|
||||
(operatorWord + operatorQuotesContent) | operatorWord
|
||||
)
|
||||
|
||||
|
||||
operatorQuotes = Group(
|
||||
Or([Suppress('"') + operatorQuotesContent + Suppress('"'),
|
||||
Suppress('\'') + operatorQuotesContent + Suppress('\'')]
|
||||
)).setResultsName("quotes") | operatorWord
|
||||
|
||||
|
||||
operatorParenthesis = Group(
|
||||
(Suppress("(") + operatorOr + Suppress(")"))
|
||||
).setResultsName("parenthesis") | operatorQuotes
|
||||
@ -118,7 +118,7 @@ class SearchQueryParser:
|
||||
).setResultsName("and") | Group(
|
||||
operatorNot + OneOrMore(~oneOf("and or") + operatorAnd)
|
||||
).setResultsName("and") | operatorNot)
|
||||
|
||||
|
||||
operatorOr << (Group(
|
||||
operatorAnd + Suppress(Keyword("or", caseless=True)) + operatorOr
|
||||
).setResultsName("or") | operatorAnd)
|
||||
@ -163,7 +163,7 @@ class SearchQueryParser:
|
||||
|
||||
def evaluateWordWildcard(self, argument):
|
||||
return self.GetWordWildcard(argument[0])
|
||||
|
||||
|
||||
def evaluate(self, argument):
|
||||
return self._methods[argument.getName()](argument)
|
||||
|
||||
@ -236,7 +236,7 @@ class ParserTest(SearchQueryParser):
|
||||
7: 'nothing',
|
||||
8: 'helper',
|
||||
}
|
||||
|
||||
|
||||
index = {
|
||||
'help': set((1, 2, 4, 5)),
|
||||
'me': set((2,)),
|
||||
@ -270,7 +270,7 @@ class ParserTest(SearchQueryParser):
|
||||
if self.docs[item].count(search_string):
|
||||
result.add(item)
|
||||
return result
|
||||
|
||||
|
||||
def GetNot(self, not_set):
|
||||
all = set(self.docs.keys())
|
||||
return all.difference(not_set)
|
||||
@ -290,7 +290,7 @@ class ParserTest(SearchQueryParser):
|
||||
print '>>>>>>>>>>>>>>>>>>>>>>Test ERROR<<<<<<<<<<<<<<<<<<<<<'
|
||||
print ''
|
||||
return all_ok
|
||||
|
||||
|
||||
if __name__=='__main__':
|
||||
if ParserTest().Test():
|
||||
print 'All tests OK'
|
||||
|
Loading…
Reference in New Issue
Block a user