Skip to content

Commit

Permalink
Merge branch 'master' into issues/751_selene_fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
terrorfisch committed Jun 23, 2023
2 parents 0b311ab + 754c69f commit 3a334c1
Show file tree
Hide file tree
Showing 7 changed files with 199 additions and 180 deletions.
93 changes: 91 additions & 2 deletions qupulse/_program/tabor.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import sys
from typing import NamedTuple, Optional, List, Generator, Tuple, Sequence, Mapping, Union, Dict, FrozenSet, cast
from typing import NamedTuple, Optional, List, Generator, Tuple, Sequence, Mapping, Union, Dict, FrozenSet, cast,\
Hashable
from enum import Enum
import operator
from collections import OrderedDict
Expand All @@ -10,7 +11,7 @@

from qupulse.utils.types import ChannelID, TimeType
from qupulse.hardware.awgs.base import ProgramEntry
from qupulse.hardware.util import get_sample_times, voltage_to_uint16
from qupulse.hardware.util import get_sample_times, voltage_to_uint16, find_positions
from qupulse._program.waveforms import Waveform
from qupulse._program._loop import Loop
from qupulse._program.volatile import VolatileRepetitionCount, VolatileProperty
Expand Down Expand Up @@ -726,3 +727,91 @@ def parse_single_seq_program(program: Loop, used_channels: FrozenSet[ChannelID])
waveforms=tuple(waveforms.keys()),
volatile_parameter_positions=volatile_parameter_positions
)


def find_place_for_segments_in_memory(
current_segment_hashes: np.ndarray,
current_segment_references: np.ndarray,
current_segment_capacities: np.ndarray,
total_capacity: int,
new_segment_hashes: Sequence[int],
new_segment_lengths: Sequence[int]) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
"""
1. Find known segments
2. Find empty spaces with fitting length
3. Find empty spaces with bigger length
4. Amend remaining segments
:param segments:
:param segment_lengths:
:return:
"""
new_segment_hashes = np.asarray(new_segment_hashes)
new_segment_lengths = np.asarray(new_segment_lengths)

waveform_to_segment = find_positions(current_segment_hashes, new_segment_hashes)

# separate into known and unknown
unknown = waveform_to_segment == -1
known = ~unknown

known_pos_in_memory = waveform_to_segment[known]

assert len(known_pos_in_memory) == 0 or np.all(current_segment_hashes[known_pos_in_memory] == new_segment_hashes[known])

new_reference_counter = current_segment_references.copy()
new_reference_counter[known_pos_in_memory] += 1

to_upload_size = np.sum(new_segment_lengths[unknown] + 16)
free_points_in_total = total_capacity - np.sum(current_segment_capacities[current_segment_references > 0])
if free_points_in_total < to_upload_size:
raise RuntimeError(f'Not enough free memory. Required {to_upload_size}. Available: {free_points_in_total}')

to_amend = cast(np.ndarray, unknown)
to_insert = np.full(len(new_segment_hashes), fill_value=-1, dtype=np.int64)

reserved_indices = np.flatnonzero(new_reference_counter > 0)
first_free = reserved_indices[-1] + 1 if len(reserved_indices) else 0

free_segments = new_reference_counter[:first_free] == 0
free_segment_count = np.sum(free_segments)

# look for a free segment place with the same length
for segment_idx in np.flatnonzero(to_amend):
if free_segment_count == 0:
break

pos_of_same_length = np.logical_and(free_segments,
new_segment_lengths[segment_idx] == current_segment_capacities[:first_free])
idx_same_length = np.argmax(pos_of_same_length)
if pos_of_same_length[idx_same_length]:
free_segments[idx_same_length] = False
free_segment_count -= 1

to_amend[segment_idx] = False
to_insert[segment_idx] = idx_same_length

# try to find places that are larger than the segments to fit in starting with the large segments and large
# free spaces
segment_indices = np.flatnonzero(to_amend)[np.argsort(new_segment_lengths[to_amend], kind='stable')[::-1]]
capacities = current_segment_capacities[:first_free]
for segment_idx in segment_indices:
free_capacities = capacities[free_segments]
free_segments_indices = np.flatnonzero(free_segments)[np.argsort(free_capacities, kind='stable')[::-1]]

if len(free_segments_indices) == 0:
break

fitting_segment = np.argmax((free_capacities >= new_segment_lengths[segment_idx])[::-1])
fitting_segment = free_segments_indices[fitting_segment]
if current_segment_capacities[fitting_segment] >= new_segment_lengths[segment_idx]:
free_segments[fitting_segment] = False
to_amend[segment_idx] = False
to_insert[segment_idx] = fitting_segment

free_points_at_end = total_capacity - np.sum(current_segment_capacities[:first_free])
if np.sum(new_segment_lengths[to_amend] + 16) > free_points_at_end:
raise RuntimeError('Fragmentation does not allow upload.',
np.sum(new_segment_lengths[to_amend] + 16),
free_points_at_end)

return waveform_to_segment, to_amend, to_insert
94 changes: 12 additions & 82 deletions qupulse/hardware/awgs/tabor.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@

from qupulse.utils.types import ChannelID
from qupulse._program._loop import Loop, make_compatible
from qupulse.hardware.util import voltage_to_uint16, find_positions, traced
from qupulse.hardware.util import voltage_to_uint16, traced
from qupulse.hardware.awgs.base import AWG, AWGAmplitudeOffsetHandling
from qupulse._program.tabor import TaborSegment, TaborException, TaborProgram, PlottableProgram, TaborSequencing,\
make_combined_wave
make_combined_wave, find_place_for_segments_in_memory


__all__ = ['TaborAWGRepresentation', 'TaborChannelPair']
Expand Down Expand Up @@ -543,87 +543,16 @@ def clear(self) -> None:
self.change_armed_program(None)

def _find_place_for_segments_in_memory(self, segments: Sequence, segment_lengths: Sequence) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
"""
1. Find known segments
2. Find empty spaces with fitting length
3. Find empty spaces with bigger length
4. Amend remaining segments
:param segments:
:param segment_lengths:
:return:
"""
segment_hashes = np.fromiter((hash(segment) for segment in segments), count=len(segments), dtype=np.int64)

waveform_to_segment = find_positions(self._segment_hashes, segment_hashes)

# separate into known and unknown
unknown = (waveform_to_segment == -1)
known = ~unknown

known_pos_in_memory = waveform_to_segment[known]

assert len(known_pos_in_memory) == 0 or np.all(self._segment_hashes[known_pos_in_memory] == segment_hashes[known])

new_reference_counter = self._segment_references.copy()
new_reference_counter[known_pos_in_memory] += 1

to_upload_size = np.sum(segment_lengths[unknown] + 16)
free_points_in_total = self.total_capacity - np.sum(self._segment_capacity[self._segment_references > 0])
if free_points_in_total < to_upload_size:
raise MemoryError('Not enough free memory',
free_points_in_total,
to_upload_size,
self._free_points_in_total)

to_amend = cast(np.ndarray, unknown)
to_insert = np.full(len(segments), fill_value=-1, dtype=np.int64)

reserved_indices = np.flatnonzero(new_reference_counter > 0)
first_free = reserved_indices[-1] + 1 if len(reserved_indices) else 0
segment_hashes = np.fromiter((hash(segment) for segment in segments), dtype=np.int64, count=len(segments))

free_segments = new_reference_counter[:first_free] == 0
free_segment_count = np.sum(free_segments)

# look for a free segment place with the same length
for segment_idx in np.flatnonzero(to_amend):
if free_segment_count == 0:
break

pos_of_same_length = np.logical_and(free_segments, segment_lengths[segment_idx] == self._segment_capacity[:first_free])
idx_same_length = np.argmax(pos_of_same_length)
if pos_of_same_length[idx_same_length]:
free_segments[idx_same_length] = False
free_segment_count -= 1

to_amend[segment_idx] = False
to_insert[segment_idx] = idx_same_length

# try to find places that are larger than the segments to fit in starting with the large segments and large
# free spaces
segment_indices = np.flatnonzero(to_amend)[np.argsort(segment_lengths[to_amend])[::-1]]
capacities = self._segment_capacity[:first_free]
for segment_idx in segment_indices:
free_capacities = capacities[free_segments]
free_segments_indices = np.flatnonzero(free_segments)[np.argsort(free_capacities)[::-1]]

if len(free_segments_indices) == 0:
break

fitting_segment = np.argmax((free_capacities >= segment_lengths[segment_idx])[::-1])
fitting_segment = free_segments_indices[fitting_segment]
if self._segment_capacity[fitting_segment] >= segment_lengths[segment_idx]:
free_segments[fitting_segment] = False
to_amend[segment_idx] = False
to_insert[segment_idx] = fitting_segment

free_points_at_end = self.total_capacity - np.sum(self._segment_capacity[:first_free])
if np.sum(segment_lengths[to_amend] + 16) > free_points_at_end:
raise MemoryError('Fragmentation does not allow upload.',
np.sum(segment_lengths[to_amend] + 16),
free_points_at_end,
self._free_points_at_end)

return waveform_to_segment, to_amend, to_insert
return find_place_for_segments_in_memory(
current_segment_hashes=self._segment_hashes,
current_segment_capacities=self._segment_capacity,
current_segment_references=self._segment_references,
total_capacity=self.total_capacity,
new_segment_lengths=segment_lengths,
new_segment_hashes=segment_hashes
)

@with_select
@with_configuration_guard
Expand Down Expand Up @@ -953,3 +882,4 @@ def reset_device(self):
self.device.reset()
elif isinstance(self.device, TaborChannelPair):
self.device.clear()

2 changes: 1 addition & 1 deletion qupulse/hardware/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def voltage_to_uint16(voltage: np.ndarray, output_amplitude: float, output_offse
def find_positions(data: Sequence, to_find: Sequence) -> np.ndarray:
"""Find indices of the first occurrence of the elements of to_find in data. Elements that are not in data result in
-1"""
data_sorter = np.argsort(data)
data_sorter = np.argsort(data, kind='stable')

pos_left = np.searchsorted(data, to_find, side='left', sorter=data_sorter)
pos_right = np.searchsorted(data, to_find, side='right', sorter=data_sorter)
Expand Down
4 changes: 2 additions & 2 deletions qupulse/utils/sympy.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
except ImportError:
_special_functions = {fname: numpy.vectorize(fobject)
for fname, fobject in math.__dict__.items()
if not fname.startswith('_') and fname not in numpy.__dict__}
if callable(fobject) and not fname.startswith('_') and fname not in numpy.__dict__}
warnings.warn('scipy is not installed. This reduces the set of available functions to those present in numpy + '
'manually vectorized functions in math.')

Expand Down Expand Up @@ -363,7 +363,7 @@ def recursive_substitution(expression: sympy.Expr,
return _recursive_substitution(expression, substitutions)


_base_environment = {'builtins': builtins, '__builtins__': builtins}
_base_environment = {'builtins': builtins, '__builtins__': builtins, 'math': math}
_math_environment = {**_base_environment, **math.__dict__}
_numpy_environment = {**_base_environment, **numpy.__dict__}
_sympy_environment = {**_base_environment, **sympy.__dict__}
Expand Down
89 changes: 88 additions & 1 deletion tests/_program/tabor_tests.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import unittest
import itertools
import numpy as np
from copy import deepcopy
from qupulse.utils.types import FrozenDict
from unittest import mock

Expand All @@ -15,7 +16,7 @@
except ImportError:
pytabor = None

from qupulse._program.tabor import TaborException, TaborProgram, \
from qupulse._program.tabor import TaborException, TaborProgram, find_place_for_segments_in_memory,\
TaborSegment, TaborSequencing, PlottableProgram, TableDescription, make_combined_wave, TableEntry
from qupulse._program._loop import Loop
from qupulse._program.volatile import VolatileRepetitionCount
Expand Down Expand Up @@ -704,3 +705,89 @@ def exec_general(self, data_1, data_2, fill_value=None):

with self.assertRaises(ValueError):
make_combined_wave(tabor_segments, destination_array=np.ones(16))


class TaborMemoryManagementTests(unittest.TestCase):
def test_find_place_for_segments_in_memory(self):
# empty
kwargs = dict(
total_capacity=2**20,
current_segment_capacities=np.asarray([], dtype=np.uint32),
current_segment_hashes=np.asarray([], dtype=np.int64),
current_segment_references=np.asarray([], dtype=np.int32),
)
prev_kwargs = deepcopy(kwargs)

segments = np.asarray([-5, -6, -7, -8, -9])
segment_lengths = 192 + np.asarray([32, 16, 64, 32, 16])

w2s, ta, ti = find_place_for_segments_in_memory(
**kwargs,
new_segment_hashes=segments, new_segment_lengths=segment_lengths)
self.assertEqual(w2s.tolist(), [-1, -1, -1, -1, -1])
self.assertEqual(ta.tolist(), [True, True, True, True, True])
self.assertEqual(ti.tolist(), [-1, -1, -1, -1, -1])
np.testing.assert_equal(kwargs, prev_kwargs)

# all new segments
kwargs['current_segment_capacities'] = 192 + np.asarray([0, 16, 32, 16, 0], dtype=np.uint32)
kwargs['current_segment_hashes'] = np.asarray([1, 2, 3, 4, 5], dtype=np.int64)
kwargs['current_segment_references'] = np.asarray([1, 1, 1, 2, 1], dtype=np.int32)
prev_kwargs = deepcopy(kwargs)

w2s, ta, ti = find_place_for_segments_in_memory(new_segment_hashes=segments, new_segment_lengths=segment_lengths, **kwargs)
self.assertEqual(w2s.tolist(), [-1, -1, -1, -1, -1])
self.assertEqual(ta.tolist(), [True, True, True, True, True])
self.assertEqual(ti.tolist(), [-1, -1, -1, -1, -1])
np.testing.assert_equal(kwargs, prev_kwargs)

# some known segments
kwargs['current_segment_capacities'] = 192 + np.asarray([0, 16, 32, 64, 0, 16], dtype=np.uint32)
kwargs['current_segment_hashes'] = np.asarray([1, 2, 3, -7, 5, -9], dtype=np.int64)
kwargs['current_segment_references'] = np.asarray([1, 1, 1, 2, 1, 3], dtype=np.int32)
prev_kwargs = deepcopy(kwargs)

w2s, ta, ti = find_place_for_segments_in_memory(new_segment_hashes=segments, new_segment_lengths=segment_lengths, **kwargs)
self.assertEqual(w2s.tolist(), [-1, -1, 3, -1, 5])
self.assertEqual(ta.tolist(), [True, True, False, True, False])
self.assertEqual(ti.tolist(), [-1, -1, -1, -1, -1])
np.testing.assert_equal(kwargs, prev_kwargs)

# insert some segments with same length
kwargs['current_segment_capacities'] = 192 + np.asarray([0, 16, 32, 64, 0, 16], dtype=np.uint32)
kwargs['current_segment_hashes'] = np.asarray([1, 2, 3, 4, 5, 6], dtype=np.int64)
kwargs['current_segment_references'] = np.asarray([1, 0, 1, 0, 1, 3], dtype=np.int32)
prev_kwargs = deepcopy(kwargs)

w2s, ta, ti = find_place_for_segments_in_memory(new_segment_hashes=segments, new_segment_lengths=segment_lengths, **kwargs)
self.assertEqual(w2s.tolist(), [-1, -1, -1, -1, -1])
self.assertEqual(ta.tolist(), [True, False, False, True, True])
self.assertEqual(ti.tolist(), [-1, 1, 3, -1, -1])
np.testing.assert_equal(kwargs, prev_kwargs)

# insert some segments with smaller length
kwargs['current_segment_capacities'] = 192 + np.asarray([0, 80, 32, 64, 96, 16], dtype=np.uint32)
kwargs['current_segment_hashes'] = np.asarray([1, 2, 3, 4, 5, 6], dtype=np.int64)
kwargs['current_segment_references'] = np.asarray([1, 0, 1, 1, 0, 3], dtype=np.int32)
prev_kwargs = deepcopy(kwargs)

w2s, ta, ti = find_place_for_segments_in_memory(new_segment_hashes=segments, new_segment_lengths=segment_lengths, **kwargs)
self.assertEqual(w2s.tolist(), [-1, -1, -1, -1, -1])
self.assertEqual(ta.tolist(), [True, True, False, False, True])
self.assertEqual(ti.tolist(), [-1, -1, 4, 1, -1])
np.testing.assert_equal(kwargs, prev_kwargs)

# mix everything
segments = np.asarray([-5, -6, -7, -8, -9, -10, -11])
segment_lengths = 192 + np.asarray([32, 16, 64, 32, 16, 0, 0])

kwargs['current_segment_capacities'] = 192 + np.asarray([0, 80, 32, 64, 32, 16], dtype=np.uint32)
kwargs['current_segment_hashes'] = np.asarray([1, 2, 3, 4, -8, 6], dtype=np.int64)
kwargs['current_segment_references'] = np.asarray([1, 0, 1, 0, 1, 0], dtype=np.int32)
prev_kwargs = deepcopy(kwargs)

w2s, ta, ti = find_place_for_segments_in_memory(new_segment_hashes=segments, new_segment_lengths=segment_lengths, **kwargs)
self.assertEqual(w2s.tolist(), [-1, -1, -1, 4, -1, -1, -1])
self.assertEqual(ta.tolist(), [False, True, False, False, True, True, True])
self.assertEqual(ti.tolist(), [1, -1, 3, -1, -1, -1, -1])
np.testing.assert_equal(kwargs, prev_kwargs)
Loading

0 comments on commit 3a334c1

Please sign in to comment.