Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

distant auth #11

Merged
merged 24 commits into from
Aug 18, 2019
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
1cd68bf
adding some decorator's decorators in preparation for distant auth pr…
JulienParis Jun 17, 2019
581f74f
remapping auth endpoints in dedicated .env file for distant auth
JulienParis Jun 17, 2019
2750061
adding some args for stats query
JulienParis Jun 17, 2019
10b28da
preparing decorators and auth_distant for distant auth in login and r…
JulienParis Jun 18, 2019
63110a2
first draft for distant auth on login
JulienParis Jun 18, 2019
2c0e8ef
more about anonymous_required distant auth
JulienParis Jun 19, 2019
d8f1935
@distannt_auth as global endpoint decorator + docrators' decorator|fu…
JulienParis Jun 20, 2019
9949a30
adding distant_auth decorator on every endpoint in api_auth and api_u…
JulienParis Jun 20, 2019
9184d4d
adding custom jwt_required_sd and jwt_optional_sd decorators
JulienParis Jun 24, 2019
9929efd
addinng url_append args to distant auth queries (update user for inst…
JulienParis Jun 24, 2019
a88a738
added rreturn_fields arg compatible with dso|dso and is_map true|false
JulienParis Jun 24, 2019
b9375e6
beginning to add stats endpoint with query string args + payload json
JulienParis Jun 25, 2019
88da3bb
adding first function for stats (mongodb aggregation pipeline)
JulienParis Jun 30, 2019
2f63d95
more on stats query / no agg_level in payload
JulienParis Jun 30, 2019
baf9ffe
changed get_claims to distant auth switcher + added flask_csv to requ…
JulienParis Jun 30, 2019
7466e65
cleaning unwindder in query_stats + added export csv endpoint on dsi|…
JulienParis Jul 1, 2019
8968990
added prj exporrt funnctionnn mimicking dso export endpoint (both sha…
JulienParis Jul 1, 2019
26fcc09
adding search_for and search_filter to stats query
JulienParis Jul 1, 2019
831e062
refacto search_for adding to query inn query_utils + search_filters
JulienParis Jul 1, 2019
39c5f95
changing lightly stats args in request args building process
JulienParis Jul 2, 2019
69b69f9
fixing major bug in mapping dsi_to_dmf
JulienParis Jul 2, 2019
1f180d9
added oid_dsi field in dso_docs structure + fixing field_return response
JulienParis Jul 3, 2019
e94c75d
minor fix to avoid error when empty payload on query_stats
JulienParis Jul 14, 2019
5024bd1
new format for aggregation : marshallers and query format
JulienParis Jul 22, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
changing lightly stats args in request args building process
  • Loading branch information
JulienParis committed Jul 2, 2019
commit 39c5f95f0dacc0d6c1d53e6c5854c453382e8b3f
14 changes: 13 additions & 1 deletion solidata_api/_core/queries_db/query_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,10 @@ def Query_db_stats (
search_for = query_args.get('search_for', None )
# search_in = query_args.get('search_in', None )
search_filters = query_args.get('search_filters', None )
as_series = query_args.get('as_series', False )
descending = query_args.get('descending', False )

### prepare sorting order
if descending :
sort_order = -1
else :
Expand Down Expand Up @@ -261,9 +264,18 @@ def Query_db_stats (
### check and run pipeline
log.debug( "q_aggregate : \n%s", pformat(q_aggregate) )
results = db_collection.aggregate(q_aggregate)
message = "stats required for this {}".format(document_type_full)
message = "stats required for this {}".format(document_type_full)
document_out = list(results)

### transform list of results as series (e.g. for apexChart format)
if as_series :
log.debug( "as_series : %s", as_series )
serie_format = query_args.get('serie_format', "apexCharts" )
log.debug( "serie_format : %s", serie_format )

### TO DO ...





Expand Down
126 changes: 73 additions & 53 deletions solidata_api/_parsers/parser_classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,13 @@

class RequestParserBuilder :

def __init__( self,
def __init__( self,
add_pagination = False,
add_sorting = False,
add_slice_query = True,
add_queries = False,
add_data_query = False,
add_extra_options_query = False,
add_utils_query = False,
add_map_query = False,
add_filter_query = False,
Expand Down Expand Up @@ -89,73 +90,73 @@ def __init__( self,
required=False,
default=True,
help='just retrieve a slice of the f_data',
# location = 'values'
location = 'args'
)

if add_queries :

# self.baseParser.add_argument('q_title',
# # action='append', ### multiple values
# type=str,
# required=False,
# help='find documents matching this string in the title',
# # location = 'values'
# # action='append', ### multiple values
# type=str,
# required=False,
# help='find documents matching this string in the title',
# location = 'args'
# )
# self.baseParser.add_argument('q_description',
# # action='append', ### multiple values
# type=str,
# required=False,
# help='find documents matching this string in the description',
# # location = 'values'
# # action='append', ### multiple values
# type=str,
# required=False,
# help='find documents matching this string in the description',
# location = 'args'
# )
self.baseParser.add_argument('search_for',
action='append',
type=str,
required=False,
help='find data in documents matching this string in records',
# location = 'values'
location = 'args'
)
self.baseParser.add_argument('tags',
action='split',
type=str,
required=False,
help='find documents matching this list of tags oid (separated by commas)',
# location = 'values'
location = 'args'
)
self.baseParser.add_argument('oids',
action='split', ### expects string where values are separated by commas
type=str,
required=False,
help='find documents matching this list of oid to find (separated by commas)',
# location = 'values'
location = 'args'
)
self.baseParser.add_argument('only_stats',
type=inputs.boolean,
required=False,
default=False,
help='just retrieve the stats of the result',
# location = 'values'
location = 'args'
)
self.baseParser.add_argument('ignore_teams',
type=inputs.boolean,
required=False,
default=False,
help='if true retrieve results mixing docs user is in the team or not',
# location = 'values'
location = 'args'
)
self.baseParser.add_argument('pivot_results',
type=inputs.boolean,
required=False,
default=False,
help='pivot results',
# location = 'values'
location = 'args'
)
self.baseParser.add_argument('normalize',
type=inputs.boolean,
required=False,
default=False,
help='normalize results',
# location = 'values'
location = 'args'
)

if add_map_query :
Expand All @@ -165,29 +166,29 @@ def __init__( self,
required=False,
default=False,
help='get light results for map display : only sd_id, lat, lon',
# location = 'values'
location = 'args'
)
self.baseParser.add_argument('as_latlng',
type=inputs.boolean,
required=False,
default=False,
help='coordinates as latlng tuple',
# location = 'values'
location = 'args'
)
self.baseParser.add_argument('only_geocoded',
type=inputs.boolean,
required=False,
default=True,
help='retrieve only geocoded items',
# location = 'values'
location = 'args'
)
self.baseParser.add_argument('geo_precision',
type=int,
required=False,
default=6,
choices=[0,1,2,3,4,5,6],
help='precision of the coordinates as float numbers',
# location = 'values'
location = 'args'
)

if add_filter_query :
Expand All @@ -197,15 +198,15 @@ def __init__( self,
required=False,
default=False,
help='retrieve uniques values for each tag or category column in records',
# location = 'values'
location = 'args'
)
self.baseParser.add_argument('get_uniques',
type=str,
required=False,
# default=None,
choices=dmf_types_uniques,
help='retrieve uniques values for each column in records : text, tag, category, other',
# location = 'values'
location = 'args'
)

if add_data_query :
Expand Down Expand Up @@ -260,11 +261,11 @@ def __init__( self,
location = 'args'
)
# self.baseParser.add_argument('only_f_data',
# type=inputs.boolean,
# required=False,
# default=False,
# help='just retrieve the f_data of the result',
# # location = 'args'
# type=inputs.boolean,
# required=False,
# default=False,
# help='just retrieve the f_data of the result',
# # location = 'args'
# )

if add_utils_query :
Expand All @@ -274,40 +275,57 @@ def __init__( self,
required=False,
default=False,
help='just retrieve the complete f_data docs from the result',
# location = 'values'
location = 'args'
)
self.baseParser.add_argument('only_stats',
type=inputs.boolean,
required=False,
default=False,
help='just retrieve the stats of the result',
# location = 'values'
location = 'args'
)
self.baseParser.add_argument('normalize',
type=inputs.boolean,
required=False,
default=False,
help='normalize results (aka data) in response',
# location = 'values'
location = 'args'
)

if add_stats_query :
if add_extra_options_query :

self.baseParser.add_argument('fields_to_return',
action='split',
type=str,
required=False,
help='return fields values of this list of fields title (separated by commas)',
# location = 'values'
location = 'args'
)
self.baseParser.add_argument('agreg_categs',
action='split',

if add_stats_query :

# self.baseParser.add_argument('agreg_categs',
# action='split',
# type=str,
# required=False,
# help='agregate unique values of this list of fields title (separated by commas)',
# location = 'args'
# )
self.baseParser.add_argument('as_series',
type=inputs.boolean,
required=False,
default=False,
help='retrieve only geocoded items',
location = 'args'
)
self.baseParser.add_argument('serie_format',
type=str,
required=False,
help='agregate unique values of this list of fields title (separated by commas)',
# location = 'values'
default="apexCharts",
help='return serie as format',
location = 'args'
)

if add_shuffle :

self.baseParser.add_argument('shuffle_seed',
Expand All @@ -316,17 +334,17 @@ def __init__( self,
required=False,
default=None,
help='shuffle the list of results given a seed',
# location = 'values'
location = 'args'
)

if add_files :

self.baseParser.add_argument('data_file',
type=FileStorage,
# location=['files', 'form'],
location='files',
required=False,
help='any data file : tsv, csv, xml, xls, xlsx',
location='files',
)
self.baseParser.add_argument('csv_separator',
type=str,
Expand All @@ -337,11 +355,11 @@ def __init__( self,
location = 'values'
)
# self.baseParser.add_argument(
# 'form_file',
# type=FileStorage,
# location='form',
# required=False,
# help='any data file : tsv, csv, xml, xls, xlsx',
# 'form_file',
# type=FileStorage,
# location='form',
# required=False,
# help='any data file : tsv, csv, xml, xls, xlsx',
# )
# self.baseParser.add_argument(
# 'xls_file',
Expand Down Expand Up @@ -407,8 +425,8 @@ def get_parser (self) :
return self.baseParser


q_minimal = RequestParserBuilder()
query_min_arguments = q_minimal.get_parser
q_minimal = RequestParserBuilder()
query_min_arguments = q_minimal.get_parser

q_arguments = RequestParserBuilder( add_queries=True )
query_arguments = q_arguments.get_parser
Expand All @@ -421,7 +439,7 @@ def get_parser (self) :
query_data_arguments = q_data.get_parser

q_files = RequestParserBuilder( add_files=True )
file_parser = q_files.get_parser
file_parser = q_files.get_parser

q_pagination = RequestParserBuilder(
add_pagination=True,
Expand All @@ -443,7 +461,8 @@ def get_parser (self) :
add_slice_query=False,
add_data_query=True,
add_utils_query=True,
add_stats_query=True,
# add_stats_query=True,
add_extra_options_query=True,
add_map_query=True,
add_filter_query=True,
# add_shuffle=True,
Expand All @@ -456,7 +475,8 @@ def get_parser (self) :
add_slice_query=False,
add_data_query=True,
add_utils_query=True,
add_stats_query=True,
# add_stats_query=True,
add_extra_options_query=True,
add_map_query=True,
add_filter_query=True,
# add_shuffle=True,
Expand All @@ -469,7 +489,7 @@ def get_parser (self) :
add_sorting=True,
add_slice_query=False,
add_data_query=True,
# add_stats_query=True,
add_stats_query=True,
# add_map_query=True,
# add_filter_query=True
)
Expand Down