Commit 17d3e867 authored by Ophélie Gagnard's avatar Ophélie Gagnard Committed by Ophélie Gagnard

Download: Add new criteria and new syntax for selection.

Syntax:
KEY{>=,<=,>,<,==,:}VALUE
VALUE is expected to be json dump except when the delimiter is ":"
With ":" delimiter, accepted special words are "max" and "min".

Selection:
For the sake of lisibility, the shadir entries are preprocessed in order
to eliminate invalid metadata. This is slower but the metadata is
exepected to be small as each entry may require a database access on
the server shacache side.
parent 7e71f9da
...@@ -16,7 +16,9 @@ import argparse ...@@ -16,7 +16,9 @@ import argparse
import hashlib import hashlib
import json import json
import logging import logging
import operator
import os import os
import re
import ssl import ssl
import shutil import shutil
import sys import sys
...@@ -50,7 +52,6 @@ UPLOAD_TIMEOUT = 60 * 60 ...@@ -50,7 +52,6 @@ UPLOAD_TIMEOUT = 60 * 60
logger = logging.getLogger('networkcache') logger = logging.getLogger('networkcache')
logger.setLevel(logging.INFO) logger.setLevel(logging.INFO)
class short_exc_info(tuple): class short_exc_info(tuple):
def __str__(self): def __str__(self):
...@@ -411,7 +412,7 @@ class NetworkcacheClient(object): ...@@ -411,7 +412,7 @@ class NetworkcacheClient(object):
return CheckResponse(self._request('cache', sha512sum), sha512sum) return CheckResponse(self._request('cache', sha512sum), sha512sum)
def select(self, key, wanted_metadata_dict={}, required_key_list=frozenset()): def select(self, key, wanted_metadata_dict={}, required_key_list=frozenset()):
'''Return an iterator over shadir entries that match given criteria ''' Return an iterator over shadir entries that match given criteria
''' '''
required_key_test = frozenset(required_key_list).issubset required_key_test = frozenset(required_key_list).issubset
data_list = self.select_generic(key, self.signature_certificate_list) data_list = self.select_generic(key, self.signature_certificate_list)
...@@ -473,6 +474,63 @@ class NetworkcacheClient(object): ...@@ -473,6 +474,63 @@ class NetworkcacheClient(object):
pass pass
return False return False
class NetworkcacheFilter(object):
special_word_mapping = {"max":max, "min":max}
parse_criterion = re.compile("([<>]=?|==|:)").split
operator_mapping = {
">=": operator.ge,
"<=": operator.le,
">": operator.gt,
"<": operator.lt,
"==": operator.eq,
}
def __init__(self, criterion_list=()):
''' Return a list of parsed selection criteria
'''
if type(criterion_list) is tuple and len(criterion_list) == 3:
self.criterion_list = criterion_list
elif type(criterion_list) is list:
parsed_criterion_list = []
for criterion in criterion_list:
parsed_criterion = self.parse_criterion(criterion, maxsplit=1)
if len(parsed_criterion) != 3:
raise NetworkcacheException(
'Could not parse criterion: missing or invalid separator (%s)'
% criterion)
if parsed_criterion[1] != ':':
parsed_criterion[2] = json.loads(parsed_criterion[2])
elif parsed_criterion[2] not in self.special_word_mapping:
raise NetworkcacheException('Unknown special word %r'
% parsed_criterion[2])
parsed_criterion_list.append(parsed_criterion)
self.criterion_list = parsed_criterion_list
else:
raise NetworkcacheException('Invalid criteria: %s' % criterion_list)
def __call__(self, data_dict_iterator):
''' Return a list of shadir entries that match given criteria
'''
# converting generator into list because the min/max case whould exhaust it
data_dict_list = list(data_dict_iterator)
for key, op, value in self.criterion_list:
filtered_data_dict_list = []
if op == ":":
extremum = self.special_word_mapping[value]((data_dict[key] for data_dict in data_dict_list if key in data_dict))
filtered_data_dict_list += [data_dict for data_dict in data_dict_list if data_dict[key] == extremum]
else:
for data_dict in data_dict_list:
try:
if key in data_dict and self.operator_mapping[op](data_dict[key], value):
filtered_data_dict_list.append(data_dict)
except TypeError:
logger.info('Comparison failed: %s %s %s'
' with types: %s %s %s',
data_dict[key], op, value,
type(data_dict[key]), type(op), type(value))
data_dict_list = filtered_data_dict_list
return data_dict_list
class NetworkcacheException(Exception): class NetworkcacheException(Exception):
pass pass
...@@ -522,14 +580,14 @@ def cmd_upload(*args): ...@@ -522,14 +580,14 @@ def cmd_upload(*args):
if args.metadata: if args.metadata:
with open(args.metadata) as g: with open(args.metadata) as g:
try: try:
metadata_dict = json.loads(g.read()) metadata_dict = json.load(g)
if type(metadata_dict) != dict: except json.decoder.JSONDecodeError as e:
raise NetworkcacheException("Not a json-serializable dictionary: %s" % args.metadata) sys.exit("%s: %s" % (args.metadata, e))
except json.decoder.JSONDecodeError: if type(metadata_dict) is not dict:
raise NetworkcacheException("Invalid json in %s" % args.metadata) sys.exit("Not a dictionary: %s" % args.metadata)
else: else:
metadata_dict = dict() metadata_dict = {}
metadata_dict.update(dict(x.split('=', 1) for x in args.meta)) metadata_dict.update(x.split('=', 1) for x in args.meta)
if args.id: if args.id:
metadata_dict.setdefault('id', args.id) metadata_dict.setdefault('id', args.id)
key = args.id key = args.id
...@@ -545,16 +603,19 @@ def cmd_download(*args): ...@@ -545,16 +603,19 @@ def cmd_download(*args):
parser = _newArgumentParser("URL of data to download." + key_help) parser = _newArgumentParser("URL of data to download." + key_help)
parser.add_argument('--id', parser.add_argument('--id',
help="Identifier of the shadir URL, overriding --prefix-key and --suffix-key.") help="Identifier of the shadir URL, overriding --prefix-key and --suffix-key.")
parser.add_argument('meta', nargs='*', metavar='KEY=VALUE', parser.add_argument('meta', nargs='*', metavar='KEY{>=,<=,==,<,>,:}VALUE',
help="Extra metadata.") help='Extra metadata. VALUE is expected to be a json dump except when the separator is ":".')
args = parser.parse_args(args or sys.argv[1:]) args = parser.parse_args(args or sys.argv[1:])
nc = NetworkcacheClient(args.config) nc = NetworkcacheClient(args.config)
kw = dict(x.split('=', 1) for x in args.meta)
if args.id: if args.id:
key = args.id key = args.id
else: else:
urlmd5 = hashlib.md5(args.url.encode()).hexdigest() urlmd5 = hashlib.md5(args.url.encode()).hexdigest()
key = args.prefix_key + urlmd5 + args.suffix_key key = args.prefix_key + urlmd5 + args.suffix_key
f = sys.stdout f = sys.stdout
shutil.copyfileobj(nc.download(next(nc.select(key, kw))['sha512']), data_list = NetworkcacheFilter(args.meta)(nc.select(key))
getattr(f, 'buffer', f)) if not data_list:
sys.exit("No result found with given criteria.")
shutil.copyfileobj(nc.download(data_list[0]['sha512']),
getattr(f, 'buffer', f))
f.close()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment