def gelu(x):
    if x < -4:
        return 0
    elif -4 <= x < -1.95:
        return 0.011034134 * x**3 - 0.118076129 * x**2 + 0.422265811 * x + 0.505403119
    elif -1.95 <= x <= 3:
        return 0.001806746 * x**6 - 0.037688200 * x**4 + 0.360329269 * x**2 + 0.5 * x + 0.008526321
    else:
        return x
    
def exp_approx(x):
    """Approximation of e^x using (1 + x / 256) ** 256."""
    return (1 + x / 256) ** 256

def reciprocal_approx(x):
    """Approximation of 1/x using a polynomial expansion."""
    term1 = 1 - x + 1
    term2 = (1 - x)**2 + 1
    term3 = (1 - x)**4 + 1
    term4 = (1 - x)**8 + 1
    term5 = (1 - x)**16 + 1
    return term1 * term2 * term3 * term4 * term5

def softmax_approx(values):
    """Approximate softmax using the exponential and reciprocal approximations."""
    # Step 1: Compute approximated exponentials for each value
    exp_values = [exp_approx(v) for v in values]
    
    # Step 2: Compute the sum of these approximated exponentials
    sum_exp_values = sum(exp_values)
    
    # Step 3: Compute the reciprocal approximation of the sum
    sum_reciprocal = reciprocal_approx(sum_exp_values)
    
    # Step 4: Compute the softmax approximation for each element
    return [ev * sum_reciprocal for ev in exp_values]

def layernorm_sqrt_reciprocal(x):
    # Step 1: Initial approximation of y
    y = 0.129054537 - 0.000129055 * x
    
    # Step 2: Iterate 4 times for the recursive formula
    for _ in range(4):
        y = 1.5 * y - 0.5 * x * y**3
    
    # Step 3: Calculate a and b
    a = x * y
    b = 0.5 * y
    
    # Step 4: Iterate 2 times for the final adjustments
    for _ in range(2):
        a = a * (1.5 - 0.5 * x * a**2)
        b = b * (1.5 - 0.5 * x * b**2)
    
    return a, b

