Skip to content

Commit

Permalink
remuve commented code
Browse files Browse the repository at this point in the history
  • Loading branch information
aerubanov committed Jan 27, 2022
1 parent b700297 commit 71bc74c
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 74 deletions.
57 changes: 0 additions & 57 deletions aesara/graph/opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -1069,63 +1069,6 @@ def recursive_merge(var):
return [recursive_merge(v) for v in variables]


# class LocalOptimizer(abc.ABC):
# """A node-based optimizer."""
#
# def __hash__(self):
# if not hasattr(self, "_optimizer_idx"):
# self._optimizer_idx = _optimizer_idx[0]
# _optimizer_idx[0] += 1
# return self._optimizer_idx
#
# def tracks(self):
# """
# Return the list of op classes that this opt applies to.
#
# Return None to apply to all nodes.
#
# """
# return None
#
# @abc.abstractmethod
# def transform(self, fgraph, node, *args, **kwargs):
# """
# Transform a subgraph whose output is `node`.
#
# Subclasses should implement this function so that it returns one of two
# kinds of things:
#
# - False to indicate that no optimization can be applied to this `node`;
# or
# - <list of variables> to use in place of `node`'s outputs in the
# greater graph.
# - dict(old variables -> new variables). A dictionary that map
# from old variables to new variables to replace.
#
# Parameters
# ----------
# node : an Apply instance
#
# """
#
# raise NotImplementedError()
#
# def add_requirements(self, fgraph):
# """
# If this local optimization wants to add some requirements to the
# fgraph, this is the place to do it.
#
# """
#
# def print_summary(self, stream=sys.stdout, level=0, depth=-1):
# print(f"{' ' * level}{self.__class__.__name__} id={id(self)}", file=stream)
#
# def __str__(self):
# if hasattr(self, "name"):
# return f"{type(self).__name__}[{self.name}]"
# return repr(self)


class LocalMetaOptimizer(LocalOptimizer):
r"""
Base class for meta-optimizers that try a set of `LocalOptimizer`\s
Expand Down
23 changes: 6 additions & 17 deletions aesara/sparse/type.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,13 +150,6 @@ def may_share_memory(a, b):
def make_variable(self, name=None):
return self.Variable(self, name=name)

# def __eq__(self, other):
# return (
# super().__eq__(other)
# and type(self) == type(other)
# and other.format == self.format
# )

def __hash__(self):
return super().__hash__() ^ hash(self.format)

Expand Down Expand Up @@ -222,16 +215,12 @@ def __eq__(self, other):
return other.dtype == self.dtype and other.format == self.format

def is_super(self, otype):
# if (
# isinstance(otype, SparseType)
# and otype.dtype == self.dtype
# and otype.ndim == self.ndim
# and self.format == otype.format
# and otype.broadcastable == self.broadcastable
# ):
# return True
# return False
if self == otype:
if (
isinstance(otype, type(self))
and otype.dtype == self.dtype
and otype.ndim == self.ndim
and self.format == otype.format
):
return True
return False

Expand Down

0 comments on commit 71bc74c

Please sign in to comment.