Skip to content

Commit

Permalink
[minor] rm the __all__ to more cleanly fix operator alias bug (#257)
Browse files Browse the repository at this point in the history
  • Loading branch information
01110011011101010110010001101111 authored Jul 3, 2024
1 parent ba8321c commit c6f8e8b
Showing 1 changed file with 5 additions and 12 deletions.
17 changes: 5 additions & 12 deletions torchquantum/operator/standard_gates/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
from .xx_min_yy import XXMINYY
from .xx_plus_yy import XXPLUSYY

all_variables = [
_all_variables = [
EchoedCrossResonance,
ECR,
GlobalPhase,
Expand Down Expand Up @@ -127,16 +127,8 @@
XXPLUSYY,
]

__all__ = [a().__class__.__name__ for a in all_variables]

# add the aliased and incomptaible classes
__all__.extend(["U", "CH", "QubitUnitary", "QubitUnitaryFast"])

# add the dictionary
__all__.extend(["op_name_dict", "fixed_ops", "parameterized_ops"])

# create the operations dictionary
op_name_dict = {x.op_name: x for x in all_variables}
op_name_dict = {_x.op_name: _x for _x in _all_variables}

# add aliases as well
op_name_dict.update(
Expand All @@ -161,5 +153,6 @@
}
)

fixed_ops = [a().__class__.__name__ for a in all_variables if a.num_params == 0]
parameterized_ops = [a().__class__.__name__ for a in all_variables if a.num_params > 0]
# TODO: make this compatible with aliases
fixed_ops = [_a().__class__.__name__ for _a in _all_variables if _a.num_params == 0]
parameterized_ops = [_a().__class__.__name__ for _a in _all_variables if _a.num_params > 0]

0 comments on commit c6f8e8b

Please sign in to comment.