Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Include return progress arg #76

Merged
merged 41 commits into from
Jan 15, 2024
Merged
Show file tree
Hide file tree
Changes from 19 commits
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
a299d0a
ignore local venv from pycharm
burnout87 Nov 29, 2023
b20e995
ignore local venv from pycharm
burnout87 Nov 29, 2023
54d4e34
ignore local venv from pycharm
burnout87 Nov 29, 2023
9410dd8
empty get_progress_run
burnout87 Nov 29, 2023
1f8e643
dedicated test
burnout87 Nov 29, 2023
ef0b0de
reverted test-requires
burnout87 Nov 29, 2023
d333c8c
test_return_progress
burnout87 Nov 29, 2023
c5ea387
setting path json files
burnout87 Nov 29, 2023
51cc8fc
get_progress_run requests workflow status and then the output content
burnout87 Dec 20, 2023
87d69de
returning trace endpoint content
burnout87 Dec 21, 2023
fde637d
test html output
burnout87 Dec 22, 2023
cd288bb
NB2WProgressProduct product type
burnout87 Dec 22, 2023
64e6923
bug fix
burnout87 Dec 22, 2023
d9713cf
extended test
burnout87 Dec 22, 2023
1a97e1d
checking content type to build product list
burnout87 Dec 22, 2023
866a06b
properly extracting workflow_status
burnout87 Dec 22, 2023
91e6301
extended test
burnout87 Dec 22, 2023
4ab2afb
path join
burnout87 Jan 3, 2024
908cda3
not needed responses
burnout87 Jan 3, 2024
3b1f104
no kwargs for args get_progress_run
burnout87 Jan 3, 2024
e6a4896
added logging arg for compatibility
burnout87 Jan 4, 2024
81e55b0
added logging arg for compatibility
burnout87 Jan 4, 2024
65f9ce0
better var naming
burnout87 Jan 4, 2024
231a011
extract jobdir only if status is not is done or started
burnout87 Jan 4, 2024
cc6d354
removed type_key for NB2WProgressProduct
burnout87 Jan 4, 2024
8b7921e
prod_list_factory adjustment
burnout87 Jan 4, 2024
b0d7a3d
extracting more response info from res in build_product_list
burnout87 Jan 4, 2024
c34a044
re-inserted call to get_html_draw
burnout87 Jan 4, 2024
c3e4722
not calling right arg
burnout87 Jan 4, 2024
4d86e18
adapted test
burnout87 Jan 4, 2024
5ba4f27
no need for write and get_html_draw for NB2WProgressProduct
burnout87 Jan 4, 2024
979a79d
test name
burnout87 Jan 4, 2024
3d3d460
removed unused var
burnout87 Jan 10, 2024
dac3a92
Update dispatcher_plugin_nb2workflow/dataserver_dispatcher.py
burnout87 Jan 10, 2024
98bdcca
not needed check
burnout87 Jan 10, 2024
3334125
build_product_list conditions commented
burnout87 Jan 10, 2024
e24247f
logging in case of error from backend
burnout87 Jan 10, 2024
9c07b19
passing the correct param for sentry
burnout87 Jan 10, 2024
5718cab
better exception handling
burnout87 Jan 10, 2024
4030c7c
better exception handling
burnout87 Jan 10, 2024
aa9fea8
not needed import
burnout87 Jan 10, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,7 @@ dispatcher_plugin_nb2workflow/config_dir/data_server_conf.yml
.env
**.nb2workflow**
function.xml
download_*
download_*
/venv
/.idea
/build
38 changes: 38 additions & 0 deletions dispatcher_plugin_nb2workflow/dataserver_dispatcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,44 @@ def test_has_input_products(self, instrument, logger=None):
query_out.set_done('input products check skipped')
return query_out, []

def get_progress_run(self, **kwargs):
dsavchenko marked this conversation as resolved.
Show resolved Hide resolved

query_out = QueryOutput()
res_trace = None

task = kwargs.get('task', self.task)
param_dict = kwargs.get('param_dict', self.param_dict)

run_asynch = kwargs.get('run_asynch', None)
call_back_url = kwargs.get('call_back_url', None)
dsavchenko marked this conversation as resolved.
Show resolved Hide resolved
if run_asynch is not None and run_asynch and call_back_url is not None:
burnout87 marked this conversation as resolved.
Show resolved Hide resolved
param_dict['_async_request_callback'] = call_back_url
param_dict['_async_request'] = "yes"

url = os.path.join(self.data_server_url, 'api/v1.0/get', task.strip('/'))
res = requests.get(url, params=param_dict)
if res.status_code in [200, 201]:
res_data = res.json()
workflow_status = res_data['workflow_status'] if run_asynch else 'done'
resroot = res_data['data'] if run_asynch and workflow_status == 'done' else res_data
jobdir = resroot['jobdir'].split('/')[-1]
if workflow_status == 'started' or workflow_status == 'done':
trace_url = os.path.join(self.data_server_url, 'trace', jobdir, task.strip('/'))
res_trace = requests.get(trace_url)

query_out.set_status(0, job_status=workflow_status)
else:
try:
query_out.set_failed('Error in the backend',
message='connection status code: ' + str(res.status_code),
extra_message=res.json()['exceptions'][0])
except:
volodymyrss marked this conversation as resolved.
Show resolved Hide resolved
query_out.set_failed('Error in the backend',
message='connection status code: ' + str(res.status_code),
extra_message=res.text)

return res_trace, query_out

def run_query(self,
call_back_url = None,
run_asynch = True,
Expand Down
33 changes: 27 additions & 6 deletions dispatcher_plugin_nb2workflow/products.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,19 +78,19 @@ def prod_list_factory(cls, output_description_dict, output, out_dir = None, onto
prod_list = []
for key in output_description_dict.keys():
owl_type = output_description_dict[key]['owl_type']

extra_kw = {}
extra_ttl = output_description_dict[key].get('extra_ttl')
if extra_ttl == '\n': extra_ttl = None
if extra_ttl == '\n': extra_ttl = None
if extra_ttl:
extra_kw = {'extra_ttl': extra_ttl}

try:
prod_list.extend( mapping.get(owl_type, cls)._init_as_list(output[key],
out_dir=out_dir,
name=key,
prod_list.extend( mapping.get(owl_type, cls)._init_as_list(output[key],
out_dir=out_dir,
name=key,
**extra_kw
)
)
burnout87 marked this conversation as resolved.
Show resolved Hide resolved
)
except Exception as e:
logger.warning('unable to construct %s product: %s from this: %s ', key, e, output[key])
Expand Down Expand Up @@ -174,6 +174,27 @@ def write(self):
def get_html_draw(self):
return {'image': {'div': '<br>'+self.data_prod, 'script': ''} }


class NB2WProgressProduct(NB2WProduct):
# TODO to be adapted
type_key = 'http://odahub.io/ontology#ODAProgressProduct'
dsavchenko marked this conversation as resolved.
Show resolved Hide resolved

def __init__(self, progress_html_data, out_dir=None, name='progress'):
self.out_dir = out_dir
self.name = name
self.progress_data = progress_html_data

def write(self):
file_path = os.path.join(self.out_dir, self.name)
with open(file_path, 'w') as fd:
fd.write(self.progress_data)
self.file_path = file_path

def get_html_draw(self):
# TODO not sure is correct, to verify
return {'image': {'div': '<br>' + self.progress_data, 'script': ''}}
burnout87 marked this conversation as resolved.
Show resolved Hide resolved


class NB2WPictureProduct(NB2WProduct):
type_key = 'http://odahub.io/ontology#ODAPictureProduct'

Expand Down
25 changes: 19 additions & 6 deletions dispatcher_plugin_nb2workflow/queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
NB2WBinaryProduct,
NB2WPictureProduct,
NB2WTextProduct,
NB2WParameterProduct)
NB2WParameterProduct,
NB2WProgressProduct)
from .dataserver_dispatcher import NB2WDataDispatcher
from cdci_data_analysis.analysis.ontology import Ontology
import os
Expand Down Expand Up @@ -124,20 +125,28 @@ def get_data_server_query(self, instrument, config=None, **kwargs):

def build_product_list(self, instrument, res, out_dir, api=False):
prod_list = []
_output = None
if out_dir is None:
out_dir = './'
if 'output' in res.json().keys(): # in synchronous mode
_o_dict = res.json()
res_content_type = res.headers.get('content-type', None)
if res_content_type is not None and res_content_type == 'application/json':
dsavchenko marked this conversation as resolved.
Show resolved Hide resolved
volodymyrss marked this conversation as resolved.
Show resolved Hide resolved
if 'output' in res.json().keys(): # in synchronous mode
_o_dict = res.json()
else:
_o_dict = res.json()['data']
_output = _o_dict['output']
prod_list = NB2WProduct.prod_list_factory(self.backend_output_dict, _output, out_dir, self.ontology_path)
else:
_o_dict = res.json()['data']
prod_list = NB2WProduct.prod_list_factory(self.backend_output_dict, _o_dict['output'], out_dir, self.ontology_path)
_o_text = res.content.decode()
prod_list.append(NB2WProgressProduct(_o_text, out_dir))

return prod_list

def process_product_method(self, instrument, prod_list, api=False):
query_out = QueryOutput()


np_dp_list, bin_dp_list, tab_dp_list, bin_im_dp_list, text_dp_list = [], [], [], [], []
np_dp_list, bin_dp_list, tab_dp_list, bin_im_dp_list, text_dp_list, progress_dp_list = [], [], [], [], [], []
if api is True:
for product in prod_list.prod_list:
if isinstance(product, NB2WAstropyTableProduct):
Expand All @@ -152,6 +161,9 @@ def process_product_method(self, instrument, prod_list, api=False):
text_dp_list.append({'name': product.name,
'value': product.parameter_obj.value,
'meta_data': {'uri': product.type_key}})
elif isinstance(product, NB2WProgressProduct):
progress_dp_list.append({'name': product.name,
'value': product.progress_data})
else: # NB2WProduct contains NumpyDataProd by default
np_dp_list.append(product.dispatcher_data_prod.data)

Expand All @@ -160,6 +172,7 @@ def process_product_method(self, instrument, prod_list, api=False):
query_out.prod_dictionary['binary_data_product_list'] = bin_dp_list
query_out.prod_dictionary['binary_image_product_list'] = bin_im_dp_list
query_out.prod_dictionary['text_product_list'] = text_dp_list
query_out.prod_dictionary['progress_product_list'] = progress_dp_list
else:
prod_name_list, file_name_list, image_list = [], [], []
for product in prod_list.prod_list:
Expand Down
39 changes: 32 additions & 7 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,11 @@
import os
from xprocess import ProcessStarter
import requests
from urllib.parse import urlparse, parse_qs
from werkzeug.wrappers import Request
from werkzeug.wrappers import Response

from pytest_httpserver.httpserver import MappingQueryMatcher

config_one_instrument = """
instruments:
Expand All @@ -18,29 +23,49 @@
dummy_cache: ""
"""


@pytest.fixture(scope="session")
def httpserver_listen_address():
return ("127.0.0.1", 9494)


def lightcurve_handler(request: Request):
parsed_request_query = parse_qs(urlparse(request.url).query)
async_request = parsed_request_query.get('_async_request', ['no'])
responses_path = os.path.join(os.path.dirname(__file__), 'responses')
if async_request[0] == 'yes':
with open(os.path.join(responses_path, 'lightcurve_async.json'), 'r') as fd:
runjson_async = json.loads(fd.read())
response_data = json.dumps(runjson_async, indent=4)
return Response(response_data, status=200, content_type='application/json')
else:
with open(os.path.join(responses_path, 'lightcurve.json'), 'r') as fd:
runjson = json.loads(fd.read())
response_data = json.dumps(runjson, indent=4)
return Response(response_data, status=200, content_type='application/json')


@pytest.fixture
def mock_backend(httpserver):
with open('tests/responses/options.json', 'r') as fd:
responses_path = os.path.join(os.path.dirname(__file__), 'responses')
with open(os.path.join(responses_path, 'options.json'), 'r') as fd:
respjson = json.loads(fd.read())
with open('tests/responses/lightcurve.json', 'r') as fd:
runjson = json.loads(fd.read())
with open('tests/responses/table.json', 'r') as fd:
with open(os.path.join(responses_path, 'table.json'), 'r') as fd:
table_json = json.loads(fd.read())
with open('tests/responses/ascii_binary.json', 'r') as fd:
with open(os.path.join(responses_path, 'ascii_binary.json'), 'r') as fd:
bin_json = json.loads(fd.read())
with open('tests/responses/image.json', 'r') as fd:
with open(os.path.join(responses_path, 'image.json'), 'r') as fd:
image_json = json.loads(fd.read())
with open(os.path.join(responses_path, 'test_output.html'), 'r') as fd:
test_output_html = fd.read()

httpserver.expect_request('/').respond_with_data('')
httpserver.expect_request(f'/api/v1.0/options').respond_with_json(respjson)
httpserver.expect_request(f'/api/v1.0/get/lightcurve').respond_with_json(runjson)
httpserver.expect_request(f'/api/v1.0/get/lightcurve').respond_with_handler(lightcurve_handler)
httpserver.expect_request(f'/api/v1.0/get/table').respond_with_json(table_json)
httpserver.expect_request(f'/api/v1.0/get/ascii_binary').respond_with_json(bin_json)
httpserver.expect_request(f'/api/v1.0/get/image').respond_with_json(image_json)
httpserver.expect_request(f'/trace/nb2w-ylp5ovnm/lightcurve').respond_with_data(test_output_html)

@pytest.fixture(scope='session')
def conf_file(tmp_path_factory):
Expand Down
54 changes: 27 additions & 27 deletions tests/responses/lightcurve.json
Original file line number Diff line number Diff line change
@@ -1,31 +1,31 @@
{
"exceptions": [],
"jobdir": "/tmp/nb2w-ylp5ovnm",
"output": {
"comment": "TEST COMMENT",
"result": {
"data_unit_list": [
{
"binarys": "gASViAAAAAAAAACMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMDF9yZWNvbnN0cnVjdJSTlIwFbnVtcHmUjAduZGFycmF5lJOUSwCFlEMBYpSHlFKUKEsBSwCFlGgDjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYolDAJR0lGIu",
"data": null,
"dt": "float64",
"hdu_type": "primary",
"header": {},
"meta_data": {},
"name": "PRIMARY"
},
{
"binarys": "gASVSgIAAAAAAACMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMDF9yZWNvbnN0cnVjdJSTlIwFbnVtcHmUjAduZGFycmF5lJOUSwCFlEMBYpSHlFKUKEsBSw+FlGgDjAVkdHlwZZSTlGgDjAZyZWNvcmSUk5SJiIeUUpQoSwOMAXyUTowEVElNRZSMA01BR5SMBUVSUk9SlIeUfZQoaBJoDIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksAhpRoE2gZSwiGlGgUaBlLEIaUdUsYSwFLEHSUYolCaAEAAPq60f3H/+xA45OxEQyaMkAAGAI6vZ01P1vTV1/U/+xA8Z+uXLdfMkC4KvrsBFDLv73r3cDg/+xAqmwoM3ilMkBOx6cP8OHRvx4EZCLt/+xAvm2vIN1UM0AAk1ZTtRVhP4Ac6oP5/+xAP+Yv/0kiM0BQeyxPf2rBP+E0cOUFAO1ACfJh8efLMkAYPwqo3F3BP0NN9kYSAO1A45X7q0LCMkCm0LAsL9rav6RlfKgeAO1AmBRt8WoFMkAAp3+k19i0PwZ+AgorAO1AREP5KDpXMkB+rBOUTbXVP2eWiGs3AO1Afzp3ai0/MkAMhhO/YH7Kv8muDs1DAO1AebqLa4JiM0Bg3Zm/PTrfPyrHlC5QAO1AiNjhzba/M0AATzOPbCmoP4zfGpBcAO1AmGN+Sb+sMkAYA+IVlGbdP+33oPFoAO1AgNCZnSP8M0BkoSpaL2XCv08QJ1N1AO1AWkJcOFlDMkAgJoaFKaSRP5R0lGIu",
"data": null,
"dt": "(numpy.record, [('TIME', '<f8'), ('MAG', '<f8'), ('ERROR', '<f8')])",
"hdu_type": "bintable",
"header": {},
"meta_data": {},
"name": "LIGHTCURVE"
}
],
"meta_data": "{}",
"name": "lightcurve"
"exceptions": [],
"jobdir": "/tmp/nb2w-ylp5ovnm",
"output": {
"comment": "TEST COMMENT",
"result": {
"data_unit_list": [
{
"binarys": "gASViAAAAAAAAACMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMDF9yZWNvbnN0cnVjdJSTlIwFbnVtcHmUjAduZGFycmF5lJOUSwCFlEMBYpSHlFKUKEsBSwCFlGgDjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYolDAJR0lGIu",
"data": null,
"dt": "float64",
"hdu_type": "primary",
"header": {},
"meta_data": {},
"name": "PRIMARY"
},
{
"binarys": "gASVSgIAAAAAAACMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMDF9yZWNvbnN0cnVjdJSTlIwFbnVtcHmUjAduZGFycmF5lJOUSwCFlEMBYpSHlFKUKEsBSw+FlGgDjAVkdHlwZZSTlGgDjAZyZWNvcmSUk5SJiIeUUpQoSwOMAXyUTowEVElNRZSMA01BR5SMBUVSUk9SlIeUfZQoaBJoDIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksAhpRoE2gZSwiGlGgUaBlLEIaUdUsYSwFLEHSUYolCaAEAAPq60f3H/+xA45OxEQyaMkAAGAI6vZ01P1vTV1/U/+xA8Z+uXLdfMkC4KvrsBFDLv73r3cDg/+xAqmwoM3ilMkBOx6cP8OHRvx4EZCLt/+xAvm2vIN1UM0AAk1ZTtRVhP4Ac6oP5/+xAP+Yv/0kiM0BQeyxPf2rBP+E0cOUFAO1ACfJh8efLMkAYPwqo3F3BP0NN9kYSAO1A45X7q0LCMkCm0LAsL9rav6RlfKgeAO1AmBRt8WoFMkAAp3+k19i0PwZ+AgorAO1AREP5KDpXMkB+rBOUTbXVP2eWiGs3AO1Afzp3ai0/MkAMhhO/YH7Kv8muDs1DAO1AebqLa4JiM0Bg3Zm/PTrfPyrHlC5QAO1AiNjhzba/M0AATzOPbCmoP4zfGpBcAO1AmGN+Sb+sMkAYA+IVlGbdP+33oPFoAO1AgNCZnSP8M0BkoSpaL2XCv08QJ1N1AO1AWkJcOFlDMkAgJoaFKaSRP5R0lGIu",
"data": null,
"dt": "(numpy.record, [('TIME', '<f8'), ('MAG', '<f8'), ('ERROR', '<f8')])",
"hdu_type": "bintable",
"header": {},
"meta_data": {},
"name": "LIGHTCURVE"
}
],
"meta_data": "{}",
"name": "lightcurve"
}
}
}
35 changes: 35 additions & 0 deletions tests/responses/lightcurve_async.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
{
"comment": "",
"workflow_status": "done",
"data": {
"exceptions": [],
"jobdir": "/tmp/nb2w-ylp5ovnm",
"output": {
"comment": "TEST COMMENT",
"result": {
"data_unit_list": [
{
"binarys": "gASViAAAAAAAAACMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMDF9yZWNvbnN0cnVjdJSTlIwFbnVtcHmUjAduZGFycmF5lJOUSwCFlEMBYpSHlFKUKEsBSwCFlGgDjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYolDAJR0lGIu",
"data": null,
"dt": "float64",
"hdu_type": "primary",
"header": {},
"meta_data": {},
"name": "PRIMARY"
},
{
"binarys": "gASVSgIAAAAAAACMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMDF9yZWNvbnN0cnVjdJSTlIwFbnVtcHmUjAduZGFycmF5lJOUSwCFlEMBYpSHlFKUKEsBSw+FlGgDjAVkdHlwZZSTlGgDjAZyZWNvcmSUk5SJiIeUUpQoSwOMAXyUTowEVElNRZSMA01BR5SMBUVSUk9SlIeUfZQoaBJoDIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksAhpRoE2gZSwiGlGgUaBlLEIaUdUsYSwFLEHSUYolCaAEAAPq60f3H/+xA45OxEQyaMkAAGAI6vZ01P1vTV1/U/+xA8Z+uXLdfMkC4KvrsBFDLv73r3cDg/+xAqmwoM3ilMkBOx6cP8OHRvx4EZCLt/+xAvm2vIN1UM0AAk1ZTtRVhP4Ac6oP5/+xAP+Yv/0kiM0BQeyxPf2rBP+E0cOUFAO1ACfJh8efLMkAYPwqo3F3BP0NN9kYSAO1A45X7q0LCMkCm0LAsL9rav6RlfKgeAO1AmBRt8WoFMkAAp3+k19i0PwZ+AgorAO1AREP5KDpXMkB+rBOUTbXVP2eWiGs3AO1Afzp3ai0/MkAMhhO/YH7Kv8muDs1DAO1AebqLa4JiM0Bg3Zm/PTrfPyrHlC5QAO1AiNjhzba/M0AATzOPbCmoP4zfGpBcAO1AmGN+Sb+sMkAYA+IVlGbdP+33oPFoAO1AgNCZnSP8M0BkoSpaL2XCv08QJ1N1AO1AWkJcOFlDMkAgJoaFKaSRP5R0lGIu",
"data": null,
"dt": "(numpy.record, [('TIME', '<f8'), ('MAG', '<f8'), ('ERROR', '<f8')])",
"hdu_type": "bintable",
"header": {},
"meta_data": {},
"name": "LIGHTCURVE"
}
],
"meta_data": "{}",
"name": "lightcurve"
}
}
}
}
Loading
Loading