diff --git a/.github/workflows/qiita-ci.yml b/.github/workflows/qiita-ci.yml index 2960caf6e..51bae2a9b 100644 --- a/.github/workflows/qiita-ci.yml +++ b/.github/workflows/qiita-ci.yml @@ -104,9 +104,7 @@ jobs: - name: Install plugins shell: bash -l {0} run: | - wget https://data.qiime2.org/distro/core/qiime2-2022.11-py38-linux-conda.yml - conda env create --quiet -n qtp-biom --file qiime2-2022.11-py38-linux-conda.yml - rm qiime2-2022.11-py38-linux-conda.yml + conda env create -n qtp-biom --file https://data.qiime2.org/distro/amplicon/qiime2-amplicon-2024.5-py39-linux-conda.yml export QIITA_ROOTCA_CERT=`pwd`/qiita_core/support_files/ci_rootca.crt export QIITA_CONFIG_FP=`pwd`/qiita_core/support_files/config_test.cfg export REDBIOM_HOST="http://localhost:7379" diff --git a/qiita_db/artifact.py b/qiita_db/artifact.py index c19648276..f116236f7 100644 --- a/qiita_db/artifact.py +++ b/qiita_db/artifact.py @@ -1342,23 +1342,6 @@ def _helper(sql_edges, edges, nodes): # If the job is in success we don't need to do anything # else since it would've been added by the code above if jstatus != 'success': - # Connect the job with his input artifacts, the - # input artifacts may or may not exist yet, so we - # need to check both the input_artifacts and the - # pending properties - for in_art in n_obj.input_artifacts: - iid = in_art.id - if iid not in nodes and iid in extra_nodes: - nodes[iid] = extra_nodes[iid] - _add_edge(edges, nodes[iid], nodes[n_obj.id]) - - pending = n_obj.pending - for pred_id in pending: - for pname in pending[pred_id]: - in_node_id = '%s:%s' % ( - pred_id, pending[pred_id][pname]) - _add_edge(edges, nodes[in_node_id], - nodes[n_obj.id]) if jstatus != 'error': # If the job is not errored, we can add the @@ -1380,6 +1363,34 @@ def _helper(sql_edges, edges, nodes): queue.append(cjob.id) if cjob.id not in nodes: nodes[cjob.id] = ('job', cjob) + + # including the outputs + for o_name, o_type in cjob.command.outputs: + node_id = '%s:%s' % (cjob.id, o_name) + node = TypeNode( + id=node_id, job_id=cjob.id, + name=o_name, type=o_type) + if node_id not in nodes: + nodes[node_id] = ('type', node) + + # Connect the job with his input artifacts, the + # input artifacts may or may not exist yet, so we + # need to check both the input_artifacts and the + # pending properties + for in_art in n_obj.input_artifacts: + iid = in_art.id + if iid not in nodes and iid in extra_nodes: + nodes[iid] = extra_nodes[iid] + _add_edge(edges, nodes[iid], nodes[n_obj.id]) + + pending = n_obj.pending + for pred_id in pending: + for pname in pending[pred_id]: + in_node_id = '%s:%s' % ( + pred_id, pending[pred_id][pname]) + _add_edge(edges, nodes[in_node_id], + nodes[n_obj.id]) + elif n_type == 'type': # Connect this 'future artifact' with the job that will # generate it diff --git a/qiita_db/support_files/patches/test_db_sql/91.sql b/qiita_db/support_files/patches/91.sql similarity index 100% rename from qiita_db/support_files/patches/test_db_sql/91.sql rename to qiita_db/support_files/patches/91.sql diff --git a/qiita_db/util.py b/qiita_db/util.py index e77648cb0..839e4ff40 100644 --- a/qiita_db/util.py +++ b/qiita_db/util.py @@ -2775,7 +2775,7 @@ def update_resource_allocation_table(weeks=1, test=None): sacct = [ 'sacct', '-p', '--format=JobID,ElapsedRaw,MaxRSS,Submit,Start,End,CPUTimeRAW,' - 'ReqMem,AllocCPUs,AveVMSize', '--starttime', + 'ReqMem,AllocCPUs,AveVMSize,MaxVMSizeNode', '--starttime', dates[0].strftime('%Y-%m-%d'), '--endtime', dates[1].strftime('%Y-%m-%d'), '--user', 'qiita', '--state', 'CD'] @@ -2894,6 +2894,7 @@ def merge_rows(rows): df['MaxRSSRaw'] = df.MaxRSS.apply(lambda x: MaxRSS_helper(str(x))) df['ElapsedRawTime'] = df.ElapsedRaw.apply( lambda x: timedelta(seconds=float(x))) + df.replace({np.nan: None}, inplace=True) for index, row in df.iterrows(): with qdb.sql_connection.TRN: diff --git a/setup.py b/setup.py index caa72a46f..6f641a609 100644 --- a/setup.py +++ b/setup.py @@ -105,7 +105,7 @@ install_requires=['psycopg2', 'click', 'bcrypt', 'pandas<2.0', 'biom-format', 'tornado<6.0', 'toredis', 'redis', 'scp', 'pyparsing', 'h5py', 'natsort', 'nose', 'pep8', - 'networkx', 'humanize', 'wtforms<3.0.0', 'nltk', + 'networkx', 'humanize', 'wtforms<3.0.0', 'nltk<=3.8.1', 'openpyxl', 'sphinx-bootstrap-theme', 'Sphinx<3.0', 'gitpython', 'redbiom', 'pyzmq', 'sphinx_rtd_theme', 'paramiko', 'seaborn', 'matplotlib', 'scipy<=1.10.1',