Skip to content

misc

Home for functions/classes that haven't find a home of their own

t_hstack = partial(torch.cat, dim=-1) module-attribute

Similar to np.hstack

t_vstack = partial(torch.cat, dim=-2) module-attribute

Similar to np.vstack

BayesianDynamicsModel

Bases: DynamicsModel

Source code in bayes_cbf/misc.py
187
188
189
190
191
192
class BayesianDynamicsModel(DynamicsModel):
    @abstractmethod
    def fu_func_gp(self, U):
        """
        return a GaussianProcessBase
        """

fu_func_gp(U) abstractmethod

return a GaussianProcessBase

Source code in bayes_cbf/misc.py
188
189
190
191
192
@abstractmethod
def fu_func_gp(self, U):
    """
    return a GaussianProcessBase
    """

DynamicsModel

Bases: ABC

Represents mode of the form:

ẋ = f(x) + g(x)u

Source code in bayes_cbf/misc.py
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
class DynamicsModel(ABC):
    """
    Represents mode of the form:

    ẋ = f(x) + g(x)u
    """
    def __init__(self):
        self._state = None

    @property
    @abstractmethod
    def ctrl_size(self):
        """
        Dimension of ctrl
        """

    @property
    @abstractmethod
    def state_size(self):
        """
        Dimension of state
        """

    @abstractmethod
    def f_func(self, X):
        """
        ẋ = f(x) + g(x)u

        @param: X : d x self.state_size vector or self.state_size vector
        @returns: f(X)
        """

    @abstractmethod
    def g_func(self, X):
        """
        ẋ = f(x) + g(x)u

        @param: X : d x self.state_size vector or self.state_size vector
        @returns: g(X)
        """

    def normalize_state(self, X_in):
        return X_in

    def forward(self, x, u):
        if x.ndim == 1:
            X_b = x.unsqueeze(0)
        else:
            X_b = x

        if u.ndim == 1:
            U_b = u.unsqueeze(0).unsqueeze(-1)
        elif u.ndim == 2:
            U_b = u.unsqueeze(0)
        else:
            U_b = u

        Xdot_b = self.f_func(X_b) + self.g_func(X_b).bmm(U_b).squeeze(-1)
        if x.ndim == 1:
            xdot = Xdot_b.squeeze(0)
        else:
            xdot = Xdot_b

        return xdot

    def step(self, u, dt):
        x = self._state
        xdot = self.forward(x, u)
        xtp1 = self.normalize_state(x + xdot * dt)
        self._state = xtp1
        return dict(x=xtp1, xdot=xdot)

    def set_init_state(self, x0):
        self._state = x0.clone()

    def F_func(self, X):
        return torch.cat([self.f_func(X).unsqueeze(-1), self.g_func(X)], dim=-1)

ctrl_size() property abstractmethod

Dimension of ctrl

Source code in bayes_cbf/misc.py
118
119
120
121
122
123
@property
@abstractmethod
def ctrl_size(self):
    """
    Dimension of ctrl
    """

f_func(X) abstractmethod

ẋ = f(x) + g(x)u

@param: X : d x self.state_size vector or self.state_size vector @returns: f(X)

Source code in bayes_cbf/misc.py
132
133
134
135
136
137
138
139
@abstractmethod
def f_func(self, X):
    """
    ẋ = f(x) + g(x)u

    @param: X : d x self.state_size vector or self.state_size vector
    @returns: f(X)
    """

g_func(X) abstractmethod

ẋ = f(x) + g(x)u

@param: X : d x self.state_size vector or self.state_size vector @returns: g(X)

Source code in bayes_cbf/misc.py
141
142
143
144
145
146
147
148
@abstractmethod
def g_func(self, X):
    """
    ẋ = f(x) + g(x)u

    @param: X : d x self.state_size vector or self.state_size vector
    @returns: g(X)
    """

state_size() property abstractmethod

Dimension of state

Source code in bayes_cbf/misc.py
125
126
127
128
129
130
@property
@abstractmethod
def state_size(self):
    """
    Dimension of state
    """

plot_to_image(figure)

Converts the matplotlib plot specified by 'figure' to a PNG image and returns it. The supplied figure is closed and inaccessible after this call.

Source code in bayes_cbf/misc.py
290
291
292
293
294
295
296
297
298
299
300
301
302
303
def plot_to_image(figure):
    """Converts the matplotlib plot specified by 'figure' to a PNG image and
    returns it. The supplied figure is closed and inaccessible after this call."""
    # Save the plot to a PNG in memory.
    buf = io.BytesIO()
    figure.savefig(buf, format='png')
    # Closing the figure prevents it from being displayed directly inside
    # the notebook.
    plt.close(figure)
    buf.seek(0)
    # Convert PNG buffer to TF image
    image = Image.open(buf)
    # Add the batch dimension
    return torch.from_numpy(np.asarray(image))

t_hessian(f, x, xp, grad_check=True)

Computes second derivative, Hessian

Source code in bayes_cbf/misc.py
236
237
238
239
240
241
242
243
244
245
def t_hessian(f, x, xp, grad_check=True):
    """
    Computes second derivative, Hessian
    """
    with variable_required_grad(x):
        with variable_required_grad(xp):
            grad_k_func = lambda xs, xt: torch.autograd.grad(
                f(xs, xt), xs, create_graph=True)[0]
            Hxx_k = t_jac(grad_k_func(x, xp), xp)
    return Hxx_k

torch_kron(A, B, batch_dims=1)

B = torch.rand(5,3,3) A = torch.rand(5,2,2) AB = torch_kron(A, B) torch.allclose(AB[1, :3, :3] , A[1, 0,0] * B[1, ...]) True BA = torch_kron(B, A) torch.allclose(BA[1, :2, :2] , B[1, 0,0] * A[1, ...]) True B = torch.rand(3,2) A = torch.rand(2,3) AB = torch_kron(A, B, batch_dims=0) AB.shape = (6, 6) True

Source code in bayes_cbf/misc.py
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
def torch_kron(A, B, batch_dims=1):
    """
    >>> B = torch.rand(5,3,3)
    >>> A = torch.rand(5,2,2)
    >>> AB = torch_kron(A, B)
    >>> torch.allclose(AB[1, :3, :3] , A[1, 0,0] * B[1, ...])
    True
    >>> BA = torch_kron(B, A)
    >>> torch.allclose(BA[1, :2, :2] , B[1, 0,0] * A[1, ...])
    True
    >>> B = torch.rand(3,2)
    >>> A = torch.rand(2,3)
    >>> AB = torch_kron(A, B, batch_dims=0)
    >>> AB.shape = (6, 6)
    True
    """
    assert A.ndim == B.ndim
    b = B.shape[0:batch_dims]
    #assert A.shape[0:batch_dims] == b
    a = A.shape[0:batch_dims]
    B_shape = sum([[1, si] for si in B.shape[batch_dims:]], [])
    A_shape = sum([[si, 1] for si in A.shape[batch_dims:]], [])
    kron_shape = [a*b for a, b in zip_longest(A.shape[batch_dims:],
                                              B.shape[batch_dims:], fillvalue=1)]
    kron = (A.reshape(*a, *A_shape) * B.reshape(*b, *B_shape))
    k = kron.shape[:batch_dims]
    return kron.reshape(*k, *kron_shape)

variable_required_grad(x)

creates context for x requiring gradient

Source code in bayes_cbf/misc.py
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
@contextmanager
def variable_required_grad(x):
    """
    creates context for x requiring gradient
    """
    old_x_requires_grad = x.requires_grad
    if isleaf(x):
        xleaf = x
    else:
        xleaf = x.detach().clone()
    try:
        yield xleaf.requires_grad_(True)
    finally:
        if isleaf(x):
            x.requires_grad_(old_x_requires_grad)