3

这与这个问题有关。我设法充分利用了代码,除了一件奇怪的事情。

这是修改后的代码。

import jax.numpy as jnp
from jax import grad, jit, value_and_grad
from jax import vmap, pmap
from jax import random
import jax
from jax import lax
from jax import custom_jvp


def p_tau(z, tau, alpha=1.5):
    return jnp.clip((alpha - 1) * z - tau, a_min=0) ** (1 / (alpha - 1))


def get_tau(tau, tau_max, tau_min, z_value):
    return lax.cond(z_value < 1,
                    lambda _: (tau, tau_min),
                    lambda _: (tau_max, tau),
                    operand=None
                    )


def body(kwargs, x):
    tau_min = kwargs['tau_min']
    tau_max = kwargs['tau_max']
    z = kwargs['z']
    alpha = kwargs['alpha']

    tau = (tau_min + tau_max) / 2
    z_value = p_tau(z, tau, alpha).sum()
    taus = get_tau(tau, tau_max, tau_min, z_value)
    tau_max, tau_min = taus[0], taus[1]
    return {'tau_min': tau_min, 'tau_max': tau_max, 'z': z, 'alpha': alpha}, None

@jax.partial(jax.jit, static_argnums=(2,))
def map_row(z_input, alpha, T):
    z = (alpha - 1) * z_input

    tau_min, tau_max = jnp.min(z) - 1, jnp.max(z) - z.shape[0] ** (1 - alpha)
    result, _ = lax.scan(body, {'tau_min': tau_min, 'tau_max': tau_max, 'z': z, 'alpha': alpha}, xs=None,
                         length=T)
    tau = (result['tau_max'] + result['tau_min']) / 2
    result = p_tau(z, tau, alpha)
    return result / result.sum()

@jax.partial(jax.jit, static_argnums=(1,3,))
def _entmax(input, axis=-1, alpha=1.5, T=20):
    result = vmap(jax.partial(map_row, alpha=alpha, T=T), axis)(input)
    return result

@jax.partial(custom_jvp, nondiff_argnums=(1, 2, 3,))
def entmax(input, axis=-1, alpha=1.5, T=10):
    return _entmax(input, axis, alpha, T)
    
@jax.partial(jax.jit, static_argnums=(0,2,))
def _entmax_jvp_impl(axis, alpha, T, primals, tangents):
    input = primals[0]
    Y = entmax(input, axis, alpha, T)
    gppr = Y  ** (2 - alpha)
    grad_output = tangents[0]
    dX = grad_output * gppr
    q = dX.sum(axis=axis) / gppr.sum(axis=axis)
    q = jnp.expand_dims(q, axis=axis)
    dX -= q * gppr
    return Y, dX


@entmax.defjvp
def entmax_jvp(axis, alpha, T, primals, tangents):
    return _entmax_jvp_impl(axis, alpha, T, primals, tangents)


import numpy as np
input = jnp.array(np.random.randn(64, 10)).block_until_ready()
weight = jnp.array(np.random.randn(64, 10)).block_until_ready()

def toy(input, weight):
    return (weight*entmax(input, axis=0, alpha=1.5, T=20)).sum()

jax.jit(value_and_grad(toy))(input, weight)

此代码将产生如下错误:

tuple index out of range

这是由这行代码引起的

@jax.partial(jax.jit, static_argnums=(2,))
def map_row(z_input, alpha, T):

即使我只用实体函数替换函数体,错误仍然存​​在。这是一个非常奇怪的行为。但是,让这个东西保持静态对我来说非常重要,因为它有助于展开循环。

4

1 回答 1

2

这个错误是由于我希望很快在 JAX 中修复的一个缺陷:静态参数不能通过关键字传递。换句话说,你应该改变这个:

def toy(input, weight):
    return (weight*entmax(input, axis=0, alpha=1.5, T=20)).sum()

对此:

def toy(input, weight):
    return (weight*entmax(input, 0, 1.5, 20)).sum()

在调用max_row.

此时,由于将跟踪变量传递给需要静态参数的函数,最终会出现 ValueError;该解决方案将类似于How to handle JAX reshape with JIT中的解决方案。


一个额外的说明:这个static_argnums错误最近得到了改进,在下一个版本中会更清楚一点:

ValueError: jitted function has static_argnums=(2,), donate_argnums=() but was called with only 1 positional arguments.
于 2021-01-07T14:23:35.610 回答