Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into nearest_node
Browse files Browse the repository at this point in the history
  • Loading branch information
perlman committed May 4, 2023
2 parents 05fe378 + 4ec48dc commit f1f45b8
Show file tree
Hide file tree
Showing 8 changed files with 603 additions and 216 deletions.
14 changes: 9 additions & 5 deletions .github/workflows/run-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@ on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ matrix.python-version }}-${{ matrix.igraph }}
cancel-in-progress: true
strategy:
fail-fast: false
matrix:
python-version:
- '3.7'
Expand All @@ -14,11 +18,11 @@ jobs:
- '3.10'
igraph: ["igraph", "no-igraph"]
steps:
# This cancels any such job that is still runnning
- name: Cancel Previous Runs
uses: styfle/[email protected]
with:
access_token: ${{ github.token }}
# # This cancels any such job that is still runnning
# - name: Cancel Previous Runs
# uses: styfle/[email protected]
# with:
# access_token: ${{ github.token }}
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
Expand Down
2 changes: 2 additions & 0 deletions docs/source/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@ Functions to fetch annotations:
pymaid.get_annotations
pymaid.get_annotation_details
pymaid.get_user_annotations
pymaid.get_annotation_id
pymaid.get_entity_graph

Nodes
-----
Expand Down
23 changes: 3 additions & 20 deletions docs/source/python2cytoscape.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -745,7 +745,7 @@
"Updating networks\n",
"+++++++++++++++++\n",
"\n",
"Another powerful usage of the cyREST API is to have scripts running in the background to automatically update, layout or style networks. Pymaid's :func:`~pymaid.cytoscape.watch_network` lets you constantly update a defined network:\n",
"Another powerful usage of the cyREST API is to have scripts running in the background to automatically update, layout or style networks. Pymaid's ``pymaid.cytoscape.watch_network`` lets you constantly update a defined network:\n",
"\n",
"In this example, we are watching a set of *\"seed\"* neurons and their direct downstream partners. "
]
Expand Down Expand Up @@ -774,7 +774,7 @@
"source": [
"This function runs in an infinite loop. In order to interupt it, simply press ``CTRL-C`` if you are in terminal. In a Jupyter notebook, hit the **stop** button in the top toolbar.\n",
"\n",
"Similar to the the earlier example, :func:`~pymaid.cytoscape.watch_network` also allows to collapse neurons into groups:"
"Similar to the the earlier example, ``pymaid.cytoscape.watch_network`` also allows to collapse neurons into groups:"
]
},
{
Expand All @@ -792,25 +792,8 @@
" group_by={'exc DA1': pymaid.get_skids_by_annotation('glomerulus DA1 right excitatory')}\n",
" )"
]
},
{
"metadata": {
"raw_mimetype": "text/restructuredtext"
},
"cell_type": "raw",
"source": [
"Pymaid-Cytoscape wrappers\n",
"=========================\n",
"\n",
".. autosummary::\n",
" :toctree: generated/\n",
"\n",
" pymaid.cytoscape.generate_network\n",
" pymaid.cytoscape.get_client\n",
" pymaid.cytoscape.watch_network"
]
}
],
"nbformat_minor": 2,
"nbformat": 4
}
}
6 changes: 5 additions & 1 deletion docs/source/whats_new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@ What's new?
* - Version
- Date
-
* - 2.4.0
- 27/05/23
- - :func:`pymaid.get_annotation_graph` deprecated in favour of the new
:func:`pymaid.get_entity_graph`.
* - 2.1.0
- 04/04/22
- With this release we mainly follow some renamed functions in ``navis`` but
Expand Down Expand Up @@ -115,7 +119,7 @@ What's new?
- various bugfixes
* - 0.89
- 14/08/18
- - new function: :func:`~pymaid.cytoscape.watch_network` constantly pushes updates Cytoscape
- - new function: ``pymaid.cytoscape.watch_network`` constantly pushes updates to Cytoscape
- new function: :func:`~pymaid.get_nth_partners` returns neurons connected via n hops
- by default, :func:`~pymaid.plot3d` now chooses the backend automatically: vispy for terminal sessions, plotly for Jupyter notebook/lab
- :func:`~pymaid.get_skids_by_annotation` now accepts negative search criteria
Expand Down
4 changes: 2 additions & 2 deletions pymaid/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
__version__ = "2.3.0"
__version_vector__ = (2, 3, 0)
__version__ = "2.4.0"
__version_vector__ = (2, 4, 0)

from . import config

Expand Down
191 changes: 4 additions & 187 deletions pymaid/fetch/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,14 +56,16 @@
from navis import in_volume
from .landmarks import get_landmarks, get_landmark_groups
from .skeletons import get_skeleton_ids
from .annotations import get_annotation_graph, get_entity_graph, get_annotation_id


__all__ = ['get_annotation_details', 'get_annotation_id',
'get_annotation_list', 'get_annotations', 'get_annotation_graph',
'get_arbor',
'get_connector_details', 'get_connectors',
'get_connector_tags',
'get_contributor_statistics', 'get_edges', 'get_history',
'get_contributor_statistics', 'get_edges', 'get_entity_graph',
'get_history',
'get_logs', 'get_names', 'get_neuron',
'get_neurons', 'get_neurons_in_bbox',
'get_neurons_in_volume', 'get_node_tags', 'get_node_details',
Expand Down Expand Up @@ -1902,191 +1904,6 @@ def get_annotations(x, remote_instance=None):
'No annotations retrieved. Make sure that the skeleton IDs exist.')


def _entities_to_ann_graph(data, annotations_by_id=False, skeletons_by_id=True):
ann_ref = "id" if annotations_by_id else "name"
skel_ref = "id" if skeletons_by_id else "name"

g = nx.DiGraph()

for e in data["entities"]:
is_meta_ann = False

if e.get("type") == "neuron":
skids = e.get("skeleton_ids") or []
if len(skids) != 1:
logger.warning("Neuron with id %s is modelled by %s skeletons, ignoring", e["id"], len(skids))
continue
node_data = {
"name": e["name"],
"neuron_id": e["id"],
"is_skeleton": True,
"id": skids[0],
}
node_id = node_data[skel_ref]
else: # is an annotation
node_data = {
"is_skeleton": False,
"id": e["id"],
"name": e["name"],
}
node_id = node_data[ann_ref]
is_meta_ann = True

anns = e.get("annotations", [])
if not anns:
g.add_node(node_id, **node_data)
continue

for ann in e.get("annotations", []):
g.add_edge(
ann[ann_ref],
node_id,
is_meta_annotation=is_meta_ann,
)

g.nodes[node_id].update(**node_data)

return g


@cache.undo_on_error
def get_annotation_graph(annotations_by_id=False, skeletons_by_id=True, remote_instance=None) -> nx.DiGraph:
"""Get a networkx DiGraph of (meta)annotations and skeletons.
Can be slow for large projects.
Nodes in the graph have data:
Skeletons have
- id
- is_skeleton = True
- neuron_id (different to the skeleton ID)
- name
Annotations have
- id
- name
- is_skeleton = False
Edges in the graph have
- is_meta_annotation (whether it is between two annotations)
Parameters
----------
annotations_by_id : bool, default False
Whether to index nodes representing annotations by their integer ID
(uses name by default)
skeletons_by_id : bool, default True
whether to index nodes representing skeletons by their integer ID
(True by default, otherwise uses the neuron name)
remote_instance : optional CatmaidInstance
Returns
-------
networkx.DiGraph
"""
remote_instance = utils._eval_remote_instance(remote_instance)

query_url = remote_instance.make_url(remote_instance.project_id, "annotations", "query-targets")
post = {
"with_annotations": True,
}
data = remote_instance.fetch(query_url, post)

return _entities_to_ann_graph(data, annotations_by_id, skeletons_by_id)


def filter_by_query(names: pd.Series, query: str, allow_partial: bool = False) -> pd.Series:
"""Get a logical index series into a series of strings based on a query.
Parameters
----------
names : pd.Series of str
Dataframe column of strings to filter
query : str
Query string. leading "~" and "annotation:" will be ignored.
Leading "/" will mean the remainder is used as a regex.
allow_partial : bool, default False
For non-regex queries, whether to check that the query is an exact match or just contained in the name.
Returns
-------
pd.Series of bool
Which names match the given query
"""
if not isinstance(names, pd.Series):
names = pd.Series(names, dtype=str)

for prefix in ["annotation:", "~"]:
if query.startswith(prefix):
logger.warning("Removing '%s' prefix from '%s'", prefix, query)
query = query[len(prefix):]

q = query.strip()
# use a regex
if q.startswith("/"):
re_str = q[1:]
filt = names.str.match(re_str)
else:
filt = names.str.contains(q, regex=False)
if not allow_partial:
filt = np.logical_and(filt, names.str.len() == len(q))

return filt


@cache.wipe_and_retry
def get_annotation_id(annotations, allow_partial=False, raise_not_found=True,
remote_instance=None):
"""Retrieve the annotation ID for single or list of annotation(s).
Parameters
----------
annotations : str | list of str
Single annotations or list of multiple annotations.
allow_partial : bool, optional
If True, will allow partial matches.
raise_not_found : bool, optional
If True raise Exception if no match for any of the
query annotations is found. Else log warning.
remote_instance : CatmaidInstance, optional
If not passed directly, will try using global.
Returns
-------
dict
``{'annotation_name': 'annotation_id', ...}``
"""
remote_instance = utils._eval_remote_instance(remote_instance)

logger.debug('Retrieving list of annotations...')

remote_annotation_list_url = remote_instance._get_annotation_list()
an_list = remote_instance.fetch(remote_annotation_list_url)

# Turn into pandas array
an_list = pd.DataFrame.from_records(an_list['annotations'])

annotations = utils._make_iterable(annotations)
annotation_ids = {}
for an in annotations:
filt = filter_by_query(an_list.name, an, allow_partial)

# Search for matches
res = an_list[filt].set_index('name').id.to_dict()
if not res:
logger.warning('No annotation found for "{}"'.format(an))
annotation_ids.update(res)

if not annotation_ids:
if raise_not_found:
raise Exception('No matching annotation(s) found')
else:
logger.warning('No matching annotation(s) found')

return annotation_ids


@cache.undo_on_error
def find_nodes(tags=None, node_ids=None, skeleton_ids=None,
Expand Down Expand Up @@ -3933,7 +3750,7 @@ def get_paths(sources, targets, n_hops=2, min_synapses=1, return_graph=False,
targets = utils._make_iterable(targets).astype(int)
sources = utils._make_iterable(sources).astype(int)

if isinstance(n_hops, (int, np.int)):
if isinstance(n_hops, (int, np.integer)):
n_hops = [n_hops]

if not utils._is_iterable(n_hops):
Expand Down
Loading

0 comments on commit f1f45b8

Please sign in to comment.