def perfect_squares(num):
    dp = [float('inf')] * (num + 1)
    dp[0] = 0
    for i in range(1, len(dp)):
        j = 1
        while j * j <= i:
            dp[i] = min(dp[i], dp[i - j * j] + 1)
            j += 1
    return dp[num]