file_path
stringlengths 7
180
| content
stringlengths 0
811k
| repo
stringclasses 11
values |
---|---|---|
vta/python/vta/top/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""TVM TOPI connector, eventually most of these should go to TVM repo"""
from . import bitpack
from .graphpack import graph_pack
from . import op
from .vta_conv2d import conv2d_packed, schedule_conv2d_packed
from .vta_conv2d_transpose import conv2d_transpose_packed, schedule_conv2d_transpose_packed
from .vta_group_conv2d import group_conv2d_packed, schedule_group_conv2d_packed
from .vta_dense import dense_packed, schedule_dense_packed
from . import utils
| https://github.com/zk-ml/tachikoma |
vta/python/vta/top/bitpack.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=ungrouped-imports
"""Bit packing operators"""
from __future__ import absolute_import as _abs
import tvm
from tvm import te
from tvm.topi import utils
from tvm.relay.op.op import register_compute, register_injective_schedule
from tvm.relay.op.op import register_pattern, OpPattern
def bitpack(data, bits, pack_type="int8", name="bitpack"):
"""Packs lowest dimension into format needed by VTA
Parameters
----------
pack_axis : int
index of the axis to pack in data
bit_axis : int
index of axis to place bit axis in resulting packed data
Returns
-------
packed : Tensor
The packed tensor.
"""
shape_vec = list(data.shape)
if pack_type == "int8":
data_width = 8
elif pack_type == "int16":
data_width = 16
elif pack_type == "int32":
data_width = 32
else:
raise RuntimeError("Unknown pack type %s" % pack_type)
assert data_width % bits == 0
lanes = data_width // bits
# Data must be in multiples of the data_width
assert utils.get_const_int(shape_vec[-1]) % lanes == 0, "Not a multiple of word size"
shape_vec[-1] = shape_vec[-1] // lanes
oshape = tuple(shape_vec)
def _bitpack(*indices):
ret = None
mask = tvm.tir.const((1 << bits) - 1, pack_type)
for k in range(lanes):
idx = list(indices)
idx[-1] = idx[-1] * lanes + k
elem = data(*idx).astype(pack_type)
if k == 0:
ret = elem & mask
else:
val = (elem & mask) << tvm.tir.const(k * bits, pack_type)
ret = ret | val
return ret
return te.compute(oshape, _bitpack, name=name, tag="bitpack")
@register_compute("bitpack", level=15)
def compute_bitpack(attrs, inputs):
lanes = attrs.lanes
dtype = inputs[0].dtype
assert dtype == "int8"
width = 8
assert width % lanes == 0
bits = 8 // lanes
return bitpack(inputs[0], bits, dtype)
register_injective_schedule("bitpack")
register_pattern("bitpack", OpPattern.INJECTIVE)
| https://github.com/zk-ml/tachikoma |
vta/python/vta/top/graphpack.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=unused-argument
"""A Relay implementation of graph packing."""
import tvm
from tvm import relay
from tvm.relay import op, transform
from tvm.relay import ExprMutator
def run_opt_pass(expr, opt_pass):
"""Exectue a relay pass."""
assert isinstance(opt_pass, tvm.transform.Pass)
mod = tvm.IRModule.from_expr(expr)
mod = opt_pass(mod)
entry = mod["main"]
return entry if isinstance(expr, relay.Function) else entry.body
def _to_shape(shape):
"""convert shape into tuple."""
return tuple(int(sh) for sh in shape)
def _pack_batch_channel(data, dshape, bfactor, cfactor):
"""Pack the data channel dimension."""
assert int(dshape[0]) % bfactor == 0
assert int(dshape[1]) % cfactor == 0
data = op.reshape(
data,
newshape=(
int(dshape[0]) // bfactor,
bfactor,
int(dshape[1]) // cfactor,
cfactor,
int(dshape[2]),
int(dshape[3]),
),
)
data = op.transpose(data, axes=(0, 2, 4, 5, 1, 3))
return data
def _unpack_batch_channel(data, old_shape, unpack_transpose=False):
"""Unpack the data channel dimension."""
if unpack_transpose:
data = op.transpose(data, axes=(0, 4, 1, 5, 2, 3))
data = op.reshape(data, newshape=old_shape)
return data
def _channel_const_match(channel_length, cfactor_out):
"""Round the channel const variant if the value not divisible by cfactor_out"""
diff = int(channel_length) % cfactor_out
if diff != 0:
diff = cfactor_out - diff
channel_length = channel_length + diff
return diff, channel_length
def _const_shape_match(data, dshape, cfactor_out):
"""Pad the constant if the shape[0] not divisible by cfactor_out."""
assert len(dshape) == 3
pad_width = int(dshape[0]) % cfactor_out
if pad_width != 0:
pad_width = cfactor_out - pad_width
data = op.nn.pad(data, [[0, pad_width], [0, 0], [0, 0]])
dshape = tuple([dshape[0] + pad_width, dshape[1], dshape[2]])
return data, dshape
def _weight_shape_match(data, dshape, channels, cfactor_out, transpose=False):
"""Pad the weight if the shape[0] not divisible by cfactor_out."""
assert len(dshape) == 4
pad_width = int(dshape[0]) % cfactor_out
channels_pad = int(channels) % cfactor_out
if pad_width != 0:
pad_width = cfactor_out - pad_width
data = op.nn.pad(data, [[0, pad_width], [0, 0], [0, 0], [0, 0]])
dshape = tuple([dshape[0] + pad_width, dshape[1], dshape[2], dshape[3]])
if channels_pad != 0:
channels = channels + (cfactor_out - channels_pad)
return data, dshape, channels
def _weight_shape_match_transpose(data, dshape, channels, cfactor_out):
"""Pad the weight if the shape[1] not divisible by cfactor_out."""
assert len(dshape) == 4
pad_width = int(dshape[1]) % cfactor_out
channels_pad = int(channels) % cfactor_out
if pad_width != 0:
pad_width = cfactor_out - pad_width
data = op.nn.pad(data, [[0, 0], [0, pad_width], [0, 0], [0, 0]])
dshape = tuple(dshape[0], [dshape[1] + pad_width, dshape[2], dshape[3]])
if channels_pad != 0:
channels = channels + (cfactor_out - channels_pad)
return data, dshape, channels
def _pack_weight(data, dshape, cfactor):
"""Pack the weight into packed format."""
assert len(dshape) == 4
assert int(dshape[0]) % cfactor == 0
assert int(dshape[1]) % cfactor == 0
data = op.reshape(
data,
newshape=(
int(dshape[0]) // cfactor,
cfactor,
int(dshape[1]) // cfactor,
cfactor,
int(dshape[2]),
int(dshape[3]),
),
)
data = op.transpose(data, axes=(0, 2, 4, 5, 1, 3))
return data
def _pack_weight_conv2d_transpose(data, dshape, cfactor):
"""Pack the weight into packed format."""
dshape = _to_shape(dshape)
assert len(dshape) == 4
assert dshape[0] % cfactor == 0
assert dshape[1] % cfactor == 0
data = op.reshape(
data,
newshape=(
dshape[0] // cfactor,
cfactor,
dshape[1] // cfactor,
cfactor,
dshape[2],
dshape[3],
),
)
data = op.transpose(data, axes=(2, 0, 4, 5, 3, 1))
return data
def _pack_const(data, dshape, dtype, bfactor, cfactor):
"""Pack a constant parameter."""
dshape = _to_shape(dshape)
assert len(dshape) == 3
assert dshape[0] % cfactor == 0
data = op.reshape(data, newshape=(dshape[0] // cfactor, cfactor, dshape[1], dshape[2], 1))
data = op.transpose(data, axes=(0, 2, 3, 4, 1))
# broadcast batch dimension to bfactor
data = op.broadcast_to(
data, shape=(dshape[0] // cfactor, dshape[1], dshape[2], bfactor, cfactor)
)
return data
def _get_tensor_shape(node):
"""Get node shape."""
if isinstance(node.checked_type, relay.ty.TensorType):
return _to_shape(node.checked_type.shape)
return []
def _get_tensor_type(node):
"""Get node type."""
if isinstance(node.checked_type, relay.ty.TensorType):
return node.checked_type.dtype
return "float32"
def _operator_idx_inc(expr, count_meta, operator_current_idx):
"""Increase operator index"""
if isinstance(expr, relay.expr.Constant):
operator_current_idx = operator_current_idx + 1 if count_meta else operator_current_idx
else:
operator_current_idx = operator_current_idx + 1
return operator_current_idx
class ExprDeviceAnnot(ExprMutator):
"""Visitor to perform graph annotation on an AST.
Parameters
----------
start: int
the start location to mark run on vta (inclusive)
end: int
the end location to mark run on vta (exclusive)
Returns
---------
None
"""
def __init__(self, start=-1, end=-1):
self.ext_dev = tvm.device("ext_dev")
self.cpu_dev = tvm.device("cpu")
self.cast = op.op.get("cast")
self.counter = -1
self.start = start
self.end = end
super().__init__()
def visit_call(self, call):
"""Visit the children."""
# First visit the children.
args = [self.visit(arg) for arg in call.args]
self.counter += 1
if self.counter == self.start:
ret = relay.Call(call.op, args, call.attrs)
ret = relay.annotation.on_device(ret, self.ext_dev)
return ret
if self.counter == self.end:
ret = relay.Call(call.op, args, call.attrs)
ret = relay.annotation.on_device(ret, self.cpu_dev)
return ret
if self.counter > self.start and self.counter < self.end:
ret = relay.Call(call.op, args, call.attrs)
# skip the float op, i.e., float->int cast
if self.is_float_op(call):
return ret
return relay.annotation.on_device(ret, self.ext_dev)
return relay.Call(self.visit(call.op), args, call.attrs)
def is_float_op(self, call):
"""check if this op belongs to a float op
in general, float op's odtype is float;
a special case is float->int cast, which follow this op sequence:
multiply(float) -> round(float) -> clip(float) -> cast(int);
"""
args = call.args
odtype = _get_tensor_type(call)
if odtype == "float32":
return True
if call.op == self.cast:
idtype = _get_tensor_type(args[0])
if idtype == "float32":
return True
return False
class ExprLocator(ExprMutator):
"""Visitor to locate op on an AST."""
def __init__(self):
self.counter = -1
self.op2nodes = {}
super().__init__()
def visit_call(self, call):
"""Visit the children."""
# First visit the children.
args = [self.visit(arg) for arg in call.args]
odtype = _get_tensor_type(call)
self.counter += 1
if (call.op, odtype) in self.op2nodes:
self.op2nodes[(call.op, odtype)].append(self.counter)
else:
self.op2nodes[(call.op, odtype)] = [self.counter]
return relay.Call(self.visit(call.op), args, call.attrs)
class ExprPack(ExprMutator):
"""Visitor to perform graph packing on an AST."""
def __init__(self, bfactor, cfactor, weight_bits):
self.bfactor = bfactor
self.cfactor = cfactor
self.weight_bits = weight_bits
self.start_pack = False
# Cache Operator the algorithm matches against.
self.bitpack_start = op.op.get("annotation.bitpack_start")
self.bitpack_end = op.op.get("annotation.bitpack_end")
self.conv2d = op.op.get("nn.conv2d")
self.conv2d_transpose = op.op.get("nn.conv2d_transpose")
self.add = op.op.get("add")
self.multiply = op.op.get("multiply")
self.bias_add = op.op.get("nn.bias_add")
self.pad = op.op.get("nn.pad")
self.upsampling = op.op.get("nn.upsampling")
self.reshape = op.op.get("reshape")
self.number_of_conv2d = 0
self.unpack_transpose = True
super().__init__()
def visit_call(self, call):
"""Visit the children."""
# First visit the children.
oshape = _get_tensor_shape(call)
odtype = _get_tensor_type(call)
input_types = [arg.checked_type for arg in call.args]
args = [self.visit(arg) for arg in call.args]
# Start and stop cases.
if call.op == self.bitpack_start:
assert not self.start_pack
self.start_pack = True
return _pack_batch_channel(args[0], oshape, self.bfactor, self.cfactor)
if call.op == self.bitpack_end:
if self.start_pack:
self.start_pack = False
data = args[0]
data_shape = _get_tensor_shape(call.args[0])
return _unpack_batch_channel(data, data_shape, self.unpack_transpose)
if self.start_pack:
# Operator cases
if call.op == self.conv2d and odtype == "int32":
self.number_of_conv2d += 1
assert 8 % self.weight_bits == 0
w_lanes = 8 // self.weight_bits
data_layout = "NCHW%dn%dc" % (self.bfactor, self.cfactor)
kernel_layout = "OIHW%do%di" % (self.cfactor, self.cfactor)
data, weight = args
data_shape = _to_shape(input_types[0].shape)
kernel_shape = _to_shape(input_types[1].shape)
channels = call.attrs.channels
weight, kernel_shape, channels = _weight_shape_match(
weight, kernel_shape, channels, self.cfactor
)
kernel = _pack_weight(weight, kernel_shape, self.cfactor)
# insert bit packing when necessary
if w_lanes != 1:
assert 8 % w_lanes == 0
kernel = op.bitpack(kernel, lanes=w_lanes)
conv2d = op.nn.conv2d(
data,
kernel,
strides=call.attrs.strides,
padding=call.attrs.padding,
dilation=call.attrs.dilation,
groups=call.attrs.groups,
channels=channels,
kernel_size=call.attrs.kernel_size,
data_layout=data_layout,
kernel_layout=kernel_layout,
out_dtype=call.attrs.out_dtype,
)
return conv2d
if call.op == self.conv2d_transpose and odtype == "int32":
self.number_of_conv2d += 1
assert 8 % self.weight_bits == 0
w_lanes = 8 // self.weight_bits
if self.start_pack:
data_layout = "NCHW%dn%dc" % (self.bfactor, self.cfactor)
kernel_layout = "IOHW%di%do" % (self.cfactor, self.cfactor)
data, weight = args
data_shape = _to_shape(input_types[0].shape)
kernel_shape = _to_shape(input_types[1].shape)
channels = call.attrs.channels
weight, kernel_shape, channels = _weight_shape_match_transpose(
weight, kernel_shape, channels, self.cfactor
)
kernel = _pack_weight_conv2d_transpose(weight, kernel_shape, self.cfactor)
conv2d = op.nn.conv2d_transpose(
data,
kernel,
strides=call.attrs.strides,
padding=call.attrs.padding,
dilation=call.attrs.dilation,
groups=call.attrs.groups,
channels=call.attrs.channels,
kernel_size=call.attrs.kernel_size,
data_layout=data_layout,
kernel_layout=kernel_layout,
output_padding=call.attrs.output_padding,
out_dtype=call.attrs.out_dtype,
)
return conv2d
if call.op == self.add and tuple(input_types[0].shape) == tuple(input_types[1].shape):
pass
elif call.op == self.add and len(input_types[1].shape) == 3:
data, const = args
const, input_shape = _const_shape_match(const, input_types[1].shape, self.cfactor)
const = _pack_const(
const, _to_shape(input_shape), input_types[1].dtype, self.bfactor, self.cfactor
)
return relay.Call(self.add, [data, const])
elif call.op == self.multiply and tuple(input_types[0].shape) == tuple(
input_types[1].shape
):
pass
elif call.op == self.multiply and len(input_types[1].shape) == 3:
data, const = args
const = _pack_const(
const,
_to_shape(input_types[1].shape),
input_types[1].dtype,
self.bfactor,
self.cfactor,
)
return relay.Call(self.multiply, [data, const])
elif self.start_pack and call.op == self.bias_add:
data, bias = args
bias = _pack_const(
bias,
_to_shape(input_types[1].shape),
input_types[1].dtype,
self.bfactor,
self.cfactor,
)
return relay.Call(self.add, [data, bias])
elif (
self.start_pack and call.op == op.op.get("cast") and input_types[0].dtype == "int32"
):
cast = relay.Call(op.op.get("cast"), [args[0]], call.attrs)
return cast
elif call.op == self.pad:
pad_width = call.attrs.pad_width
if len(pad_width) == 6:
pass
elif len(pad_width) == 4:
(data, pad_value) = args
new_pad_width = []
new_pad_width.extend(pad_width)
for _ in range(2):
new_pad_width.append([0, 0])
return op.nn.pad(data, pad_value=pad_value, pad_width=new_pad_width)
elif call.op == self.upsampling:
(data,) = args
scale_h = call.attrs.scale_h
scale_w = call.attrs.scale_w
data_layout = "NCHW%dn%dc" % (self.bfactor, self.cfactor)
method = call.attrs.method
align_corners = call.attrs.align_corners
return op.nn.upsampling(data, scale_h, scale_w, data_layout, method, align_corners)
elif call.op == self.reshape and len(input_types[0].shape) == 4:
(data,) = args
self.unpack_transpose = False
data = op.transpose(data, axes=(0, 4, 1, 5, 2, 3))
new_shape = [int(x) for x in input_types[0].shape]
# Check if the reshape match with such shape after pad
pad, new_shape[1] = _channel_const_match(new_shape[1], self.cfactor)
data = op.reshape(data, new_shape)
# remove pad data
if pad != 0:
new_pad_width = [[0, 0], [0, -pad], [0, 0], [0, 0]]
data = op.nn.pad(data, pad_width=new_pad_width)
return data
return relay.Call(self.visit(call.op), args, call.attrs)
class BT(Exception):
pass
def get_subgraph(expr, start_name, stop_name, start_name_idx, stop_name_idx, count_meta):
"""We assume stop_name only appears once for simplicity.
This constraint will be lifted in the future.
bitpack_start and bitpack_end are both inclusive.
"""
bitpack_start = op.op.get("annotation.bitpack_start")
bitpack_end = op.op.get("annotation.bitpack_end")
anf = run_opt_pass(expr, transform.ToANormalForm())
operator_current_idx = 0
def _recursion(anf, start_found, stop_found, operator_current_idx):
"""Helper to obtain the subgraph."""
if isinstance(anf, relay.Function):
return relay.Function(
anf.params,
_recursion(anf.body, start_found, stop_found, operator_current_idx),
anf.ret_type,
anf.type_params,
anf.attrs,
)
if isinstance(anf, relay.expr.Let):
value = anf.value
if isinstance(value, relay.expr.Call):
if isinstance(value.op, tvm.ir.Op):
if value.op.name == start_name and not start_found:
if operator_current_idx == start_name_idx or start_name_idx is None:
value = relay.expr.Call(bitpack_start, [value])
start_found = True
elif value.op.name == stop_name:
if operator_current_idx == stop_name_idx or stop_name_idx is None:
raise BT()
operator_current_idx = _operator_idx_inc(value, count_meta, operator_current_idx)
try:
return relay.expr.Let(
anf.var,
value,
_recursion(anf.body, start_found, stop_found, operator_current_idx),
)
except BT:
assert start_found
assert not stop_found
stop_found = True
value = relay.expr.Call(bitpack_end, [value])
# todo: check anf.body has no more stop_name beside that one
return relay.expr.Let(anf.var, value, anf.body)
else:
assert start_found
assert stop_found
return anf
annotated = _recursion(anf, False, False, operator_current_idx)
return run_opt_pass(annotated, transform.ToGraphNormalForm())
def graph_pack(
expr,
bfactor,
cfactor,
weight_bits,
start_name="nn.max_pool2d",
stop_name="nn.global_avg_pool2d",
start_name_idx=None,
stop_name_idx=None,
count_meta=False,
device_annot=False,
annot_start_name="nn.conv2d",
annot_end_name="annotation.stop_fusion",
):
"""Pack the graph into batch&channel packed format.
Parameters
----------
expr : relay.Expr
The input program.
bfactor : int
The packing factor in batch
cfactor : int
The packing factor in channel
weight_bits: int
The bit-width of the weights.
start_name: str, optional
Start packing from certain known node when start_name_idx is None.
stop_name: str, optional
Stop packing from certain known node when stop_name_idx is None.
start_name_idx: int, optional
When start_name_idx not None, start packing only when node name equal start_name
and node idx equals start_name_idx.
stop_name_idx: int, optional
When stop_name_idx not None, stop packing only when node name equal stop_name
and node index equals stop_name_idx.
count_meta:boolean, optional
When count_meta is False, the operator increase logic would not count the meta that have
the type 'relay.expr.Constant', start_name_idx and stop_name_idx follow the index from
'expr.astext(show_meta_data=False)'. When count_meta is True, the operator increase
logic would count the meta.
device_annot: boolean, optional
if we want to annoate the device_type
annot_start_name: str, optional
device annotation start node, from which we mark the nodes as `ext_dev`
annot_end_name: str, optional
device annotation end node, after which we mark the nodes as 'cpu'
Returns
-------
expr : Expr
The transformed expression.
"""
assert isinstance(expr, relay.Function)
assert (
(start_name != stop_name)
or (start_name_idx is None != stop_name_idx is None)
or (not (start_name_idx is None and stop_name_idx is None))
or (start_name_idx < stop_name_idx)
)
expr = get_subgraph(expr, start_name, stop_name, start_name_idx, stop_name_idx, count_meta)
expr = run_opt_pass(expr, transform.InferType())
packer = ExprPack(bfactor, cfactor, weight_bits)
expr = packer.visit(expr)
assert not packer.start_pack
expr = run_opt_pass(expr, transform.InferType())
if device_annot:
expr_locator = ExprLocator()
expr_locator.visit(expr)
annot_start = op.op.get(annot_start_name)
start = expr_locator.op2nodes[(annot_start, "int32")][0]
annot_end = op.op.get(annot_end_name)
# we mark the next op to the last stop_fusion on cpu device
end = expr_locator.op2nodes[(annot_end, "int8")][-1] + 1
device_annot = ExprDeviceAnnot(start=start, end=end)
expr = device_annot.visit(expr)
return run_opt_pass(expr, transform.InferType())
return expr
| https://github.com/zk-ml/tachikoma |
vta/python/vta/top/op.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=unused-argument, ungrouped-imports
"""Namespace for supporting Relay operators on VTA."""
from __future__ import absolute_import as _abs
import tvm
from tvm import te
from tvm import autotvm
from tvm import topi
from tvm.relay.op import op as reg
from tvm.relay.op import strategy as _strategy
from tvm.relay.op.op import OpPattern, OpStrategy
from .utils import is_packed_layout
from .vta_conv2d import conv2d_packed, schedule_conv2d_packed
from .vta_conv2d_transpose import conv2d_transpose_packed, schedule_conv2d_transpose_packed
from .vta_group_conv2d import group_conv2d_packed, schedule_group_conv2d_packed
from .vta_dense import dense_packed, schedule_dense_packed
from ..environment import get_env
ENV = get_env()
# override to force partition at copy
reg.register_pattern("copy", OpPattern.INJECTIVE, level=15)
# add clip vta strategy
def compute_clip_vta(attrs, inputs, output_type):
"""Clip operator."""
x = inputs[0]
a_min = attrs.a_min
a_max = attrs.a_max
const_min = tvm.tir.const(a_min, x.dtype)
const_max = tvm.tir.const(a_max, x.dtype)
with tvm.te.tag_scope(topi.tag.ELEMWISE):
x = te.compute(x.shape, lambda *i: tvm.te.min(x(*i), const_max), name="clipA")
x = te.compute(x.shape, lambda *i: tvm.te.max(x(*i), const_min), name="clipB")
return [x]
def clip_strategy_vta(attrs, inputs, out_type, target):
strategy = OpStrategy()
strategy.add_implementation(
compute_clip_vta,
_strategy.wrap_topi_schedule(topi.generic.schedule_injective),
name="clip.vta",
)
return strategy
reg.get("clip").get_attr("FTVMStrategy").register(clip_strategy_vta, "vta")
@autotvm.register_topi_compute("add.vta")
def add_packed(cfg, lhs, rhs):
return topi.add(lhs, rhs)
@autotvm.register_topi_compute("multiply.vta")
def multiply_packed(cfg, lhs, rhs):
return topi.multiply(lhs, rhs)
def schedule_alu_packed(cfg, outs):
"""alu packed schedule"""
assert len(outs) == 1
def is_cast_op(op):
return op.name == "T_cast"
outs = [outs] if isinstance(outs, te.tensor.Tensor) else outs
output = outs[0]
s = te.create_schedule([x.op for x in outs])
te.schedule.AutoInlineInjective(s)
# other target does not support alu-only ops
if not (ENV.TARGET in ["sim", "tsim", "intelfocl"]):
return s
# only put the int-related ops to vta
if "int" in output.dtype and len(output.shape) == 6:
ewise_inputs = []
ewise_ops = []
const_ops = []
def _traverse(op):
if topi.tag.is_broadcast(op.tag):
if not op.same_as(output.op):
if not op.axis:
const_ops.append(op)
elif not is_cast_op(op):
ewise_ops.append(op)
for tensor in op.input_tensors:
if isinstance(tensor.op, tvm.te.PlaceholderOp):
ewise_inputs.append((op, tensor))
elif is_cast_op(tensor.op) and not op.same_as(output.op):
ewise_inputs.append((op, tensor))
else:
_traverse(tensor.op)
else:
for tensor in op.input_tensors:
if (not isinstance(tensor.op, tvm.te.PlaceholderOp)) and (
not is_cast_op(tensor.op)
):
_traverse(tensor.op)
op = output.op
_traverse(op)
for _, t in ewise_inputs:
if t.dtype == "float32":
return s
x_bo, x_co, x_i, x_j, x_bi, x_ci = s[output].op.axis
cfg.define_split("tile_co", x_co, num_outputs=2)
cfg.define_split("tile_h", x_i, num_outputs=2)
cfg.define_split("tile_w", x_j, num_outputs=2)
x_co0, x_co1 = cfg["tile_co"].apply(s, output, x_co)
x_i0, x_i1 = cfg["tile_h"].apply(s, output, x_i)
x_j0, x_j1 = cfg["tile_w"].apply(s, output, x_j)
s[output].reorder(x_bo, x_i0, x_co0, x_j0, x_co1, x_i1, x_j1, x_bi, x_ci)
store_pt = x_j0
for e_o in ewise_ops:
s[e_o].set_scope(ENV.acc_scope)
s[e_o].pragma(s[e_o].op.axis[0], ENV.alu)
s[e_o].compute_at(s[output], store_pt)
# cache read input
cache_read_ewise = []
for consumer, tensor in ewise_inputs:
cache_read_ewise.append(s.cache_read(tensor, ENV.acc_scope, [consumer]))
for tensor in cache_read_ewise:
if s[tensor].op.axis:
s[tensor].pragma(s[tensor].op.axis[0], ENV.dma_copy)
s[tensor].compute_at(s[output], store_pt)
for op in const_ops:
s[op].compute_inline()
s[output].pragma(x_co1, ENV.dma_copy)
return s
@autotvm.register_topi_schedule("add.vta")
def schedule_add_packed(cfg, outs):
return schedule_alu_packed(cfg, outs)
@autotvm.register_topi_schedule("multiply.vta")
def schedule_multiply_packed(cfg, outs):
return schedule_alu_packed(cfg, outs)
def add_strategy_vta(attrs, inputs, out_type, target):
strategy = OpStrategy()
strategy.add_implementation(
_strategy.wrap_topi_compute(add_packed),
_strategy.wrap_topi_schedule(schedule_add_packed),
name="add.vta",
)
return strategy
def multiply_strategy_vta(attrs, inputs, out_type, target):
strategy = OpStrategy()
strategy.add_implementation(
_strategy.wrap_topi_compute(multiply_packed),
_strategy.wrap_topi_schedule(schedule_multiply_packed),
name="multiply.vta",
)
return strategy
# other target does not support alu-only ops
if ENV.TARGET in ["sim", "intelfocl"]:
reg.get("add").get_attr("FTVMStrategy").register(add_strategy_vta, "vta")
reg.get("multiply").get_attr("FTVMStrategy").register(multiply_strategy_vta, "vta")
@_strategy.conv2d_strategy.register("vta")
def conv2d_strategy_vta(attrs, inputs, out_type, target):
"""conv2d vta strategy"""
strategy = OpStrategy()
kernel = inputs[1]
dilation = topi.utils.get_const_tuple(attrs.dilation)
groups = attrs.groups
layout = attrs.data_layout
assert dilation == (1, 1), "support for dilation limited to (1, 1)"
if is_packed_layout(layout):
if groups == 1:
assert ENV.LOG_INP_WIDTH == 3, "only support 8bit inp for now"
assert ENV.LOG_WGT_WIDTH == 3, "only support 8bit wgt for now"
assert kernel.dtype == "int8"
strategy.add_implementation(
_strategy.wrap_compute_conv2d(conv2d_packed, need_data_layout=True),
_strategy.wrap_topi_schedule(schedule_conv2d_packed),
name="conv2d_packed.vta",
)
else: # group_conv2d
strategy.add_implementation(
_strategy.wrap_compute_conv2d(group_conv2d_packed, has_groups=True),
_strategy.wrap_topi_schedule(schedule_group_conv2d_packed),
name="group_conv2d_packed.vta",
)
return strategy
# If it's not packed, run on ARM CPU
arm_tgt = tvm.target.arm_cpu(target.model)
return _strategy.arm_cpu.conv2d_strategy_arm_cpu(attrs, inputs, out_type, arm_tgt)
@_strategy.conv2d_transpose_strategy.register("vta")
def conv2d_transpose_strategy_vta(attrs, inputs, out_type, target):
"""conv2d_transpose vta strategy"""
dilation = topi.utils.get_const_tuple(attrs.dilation)
layout = attrs.data_layout
assert dilation == (1, 1), "support for dilation limited to (1, 1)"
if is_packed_layout(layout):
strategy = OpStrategy()
strategy.add_implementation(
_strategy.wrap_compute_conv2d_transpose(conv2d_transpose_packed),
_strategy.wrap_topi_schedule(schedule_conv2d_transpose_packed),
name="conv2d_transpose_packed.vta",
)
return strategy
# If it's not packed, run on ARM CPU
arm_tgt = tvm.target.arm_cpu(target.model)
return _strategy.arm_cpu.conv2d_transpose_strategy_arm_cpu(attrs, inputs, out_type, arm_tgt)
@_strategy.dense_strategy.register("vta")
def dense_strategy_vta(attrs, inputs, out_type, target):
"""dense vta strategy"""
if len(inputs[0].shape) == 4: # this implies the layout is packed
strategy = OpStrategy()
strategy.add_implementation(
_strategy.wrap_compute_dense(dense_packed),
_strategy.wrap_topi_schedule(schedule_dense_packed),
name="dense_packed.vta",
)
return strategy
# If it's not packed, run on ARM CPU
arm_tgt = tvm.target.arm_cpu(target.model)
return _strategy.x86.dense_strategy_cpu(attrs, inputs, out_type, arm_tgt)
| https://github.com/zk-ml/tachikoma |
vta/python/vta/top/utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""VTA TOPI Utils."""
def is_packed_layout(layout):
"""Check if layout is packed layout"""
if layout == "NCHW":
return False
if "n" in layout and "c" in layout:
return True
return False
| https://github.com/zk-ml/tachikoma |
vta/python/vta/top/vta_conv2d.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Conv2D operator declaration and schedule registration for VTA."""
import numpy as np
import tvm
from tvm import te
from tvm import autotvm
from tvm import topi
from .utils import is_packed_layout
from ..environment import get_env
@autotvm.register_topi_compute("conv2d_packed.vta")
def conv2d_packed(cfg, data, kernel, strides, padding, dilation, layout, out_dtype):
"""Packed conv2d function."""
if not is_packed_layout(layout):
raise topi.InvalidShapeError()
assert dilation == (1, 1)
if padding[0]:
pad_data = topi.nn.pad(data, [0, 0, padding[0], padding[1], 0, 0], name="pad_data")
else:
pad_data = data
assert len(data.shape) == 6
assert len(kernel.shape) == 6
oheight = topi.utils.get_const_int((pad_data.shape[2] - kernel.shape[2]) // strides[0] + 1)
owidth = topi.utils.get_const_int((pad_data.shape[3] - kernel.shape[3]) // strides[1] + 1)
oshape = (data.shape[0], kernel.shape[0], oheight, owidth, data.shape[4], kernel.shape[4])
ishape = topi.utils.get_const_tuple(data.shape)
kshape = topi.utils.get_const_tuple(kernel.shape)
d_i = te.reduce_axis((0, kshape[2]), name="d_i")
d_j = te.reduce_axis((0, kshape[3]), name="d_j")
k_o = te.reduce_axis((0, ishape[1]), name="k_o")
k_i = te.reduce_axis((0, ishape[-1]), name="k_i")
hstride, wstride = strides
res = te.compute(
oshape,
lambda b_o, c_o, i, j, b_i, c_i: te.sum(
pad_data[b_o, k_o, i * hstride + d_i, j * wstride + d_j, b_i, k_i].astype(out_dtype)
* kernel[c_o, k_o, d_i, d_j, c_i, k_i].astype(out_dtype),
axis=[k_o, d_i, d_j, k_i],
),
name="res",
tag="conv2d_dense",
)
cfg.add_flop(
2
* np.prod(topi.utils.get_const_tuple(oshape))
* kshape[2]
* kshape[3]
* ishape[1]
* ishape[-1]
)
return res
@autotvm.register_topi_schedule("conv2d_packed.vta")
def schedule_conv2d_packed(cfg, outs):
"""Schedule packed conv2d"""
assert len(outs) == 1
output = outs[0]
const_ops = []
ewise_inputs = []
ewise_ops = []
conv2d_res = []
assert "int" in output.op.input_tensors[0].dtype
def _traverse(op):
if topi.tag.is_broadcast(op.tag):
if not op.same_as(output.op):
if not op.axis:
const_ops.append(op)
else:
ewise_ops.append(op)
for tensor in op.input_tensors:
if isinstance(tensor.op, tvm.te.PlaceholderOp):
ewise_inputs.append((op, tensor))
else:
_traverse(tensor.op)
else:
assert op.tag == "conv2d_dense"
conv2d_res.append(op)
_traverse(output.op)
assert len(conv2d_res) == 1
conv2d_stage = conv2d_res[0].output(0)
s = te.create_schedule(output.op)
##### space definition begin #####
b, c_o, x_i, x_j, _, _ = s[conv2d_stage].op.axis
c_i, _, _, _ = s[conv2d_stage].op.reduce_axis
cfg.define_split("tile_b", b, num_outputs=2)
cfg.define_split("tile_h", x_i, num_outputs=2)
cfg.define_split("tile_w", x_j, num_outputs=2)
cfg.define_split("tile_ci", c_i, num_outputs=2)
cfg.define_split("tile_co", c_o, num_outputs=2)
cfg.define_knob("oc_nthread", [1, 2])
cfg.define_knob("h_nthread", [1, 2])
###### space definition end ######
data, kernel = conv2d_stage.op.input_tensors
if isinstance(data.op, tvm.te.ComputeOp) and "pad" in data.op.tag:
temp = data.op.input_tensors[0]
pad_data = data
data = temp
else:
pad_data = None
env = get_env()
# setup pad
if pad_data is not None:
cdata = pad_data
s[pad_data].set_scope(env.inp_scope)
else:
cdata = s.cache_read(data, env.inp_scope, [conv2d_stage])
ckernel = s.cache_read(kernel, env.wgt_scope, [conv2d_stage])
s[conv2d_stage].set_scope(env.acc_scope)
# cache read input
cache_read_ewise = []
for consumer, tensor in ewise_inputs:
cache_read_ewise.append(s.cache_read(tensor, env.acc_scope, [consumer]))
# set ewise scope
for op in ewise_ops:
s[op].set_scope(env.acc_scope)
s[op].pragma(s[op].op.axis[0], env.alu)
for op in const_ops:
s[op].compute_inline()
# tile
x_bo, x_co, x_i, x_j, x_bi, x_ci = s[output].op.axis
x_co0, x_co1 = cfg["tile_co"].apply(s, output, x_co)
x_i0, x_i1 = cfg["tile_h"].apply(s, output, x_i)
x_j0, x_j1 = cfg["tile_w"].apply(s, output, x_j)
s[output].reorder(x_bo, x_i0, x_co0, x_j0, x_co1, x_i1, x_j1, x_bi, x_ci)
store_pt = x_j0
# set all compute scopes
s[conv2d_stage].compute_at(s[output], store_pt)
for op in ewise_ops:
s[op].compute_at(s[output], store_pt)
for tensor in cache_read_ewise:
s[tensor].compute_at(s[output], store_pt)
s[tensor].pragma(s[tensor].op.axis[0], env.dma_copy)
# virtual threading along output channel axes
if cfg["oc_nthread"].val > 1:
_, v_t = s[output].split(x_co0, factor=cfg["oc_nthread"].val)
s[output].reorder(v_t, x_bo)
s[output].bind(v_t, te.thread_axis("cthread"))
# virtual threading along spatial rows
if cfg["h_nthread"].val > 1:
_, v_t = s[output].split(x_i0, factor=cfg["h_nthread"].val)
s[output].reorder(v_t, x_bo)
s[output].bind(v_t, te.thread_axis("cthread"))
x_bo, x_co, x_i, x_j, x_bi, x_ci = s[conv2d_stage].op.axis
k_o, d_i, d_j, k_i = s[conv2d_stage].op.reduce_axis
s[conv2d_stage].reorder(x_bo, k_o, x_j, d_j, d_i, x_co, x_i, x_bi, x_ci, k_i)
k_o, _ = cfg["tile_ci"].apply(s, conv2d_stage, k_o)
s[cdata].compute_at(s[conv2d_stage], k_o)
s[ckernel].compute_at(s[conv2d_stage], k_o)
# Use VTA instructions
s[cdata].pragma(s[cdata].op.axis[0], env.dma_copy)
s[ckernel].pragma(s[ckernel].op.axis[0], env.dma_copy)
s[conv2d_stage].tensorize(x_bi, env.gemm)
s[output].pragma(x_co1, env.dma_copy)
return s
| https://github.com/zk-ml/tachikoma |
vta/python/vta/top/vta_conv2d_transpose.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Conv2D_transpose operator declaration and schedule registration for VTA."""
import numpy as np
import tvm
from tvm import te
from tvm import autotvm
from tvm import topi
from tvm.topi.utils import get_const_tuple
from tvm.topi.nn.utils import get_pad_tuple
from ..environment import get_env
@autotvm.register_topi_compute("conv2d_transpose_packed.vta")
def conv2d_transpose_packed(cfg, data, kernel, strides, padding, out_dtype, output_padding=(0, 0)):
"""Packed conv2d_transpose compute"""
ishape = get_const_tuple(data.shape)
kshape = get_const_tuple(kernel.shape)
b, c_i, i_h, i_w, t_b, t_ci = ishape
c_o, _, k_h, k_w, t_co, t_ci = kshape
stride_h, stride_w = strides
opad_h, opad_w = output_padding
# FIXME(tmoreau89): currently IR pass breaks when output padding != (0,0)
assert opad_h == 0 and opad_w == 0, "VTA does not support output padding for now"
# derive padding parameters
fpad_top, fpad_left, fpad_bottom, fpad_right = get_pad_tuple(padding, (k_h, k_w))
bpad_top = k_h - 1 - fpad_top
bpad_bottom = k_h - 1 - fpad_bottom + opad_h
bpad_left = k_w - 1 - fpad_left
bpad_right = k_w - 1 - fpad_right + opad_w
# padding stage
dilated_input = topi.nn.dilate(data, [1, 1, stride_h, stride_w, 1, 1])
data_pad = topi.nn.pad(
dilated_input, [0, 0, bpad_top, bpad_left, 0, 0], [0, 0, bpad_bottom, bpad_right, 0, 0]
)
# convolution transpose stage
out_h = (i_h - 1) * stride_h - fpad_top - fpad_bottom + k_h + opad_h
out_w = (i_w - 1) * stride_w - fpad_left - fpad_right + k_w + opad_w
oshape = (b, c_o, out_h, out_w, t_b, t_co)
d_c = te.reduce_axis((0, c_i), name="d_c")
d_h = te.reduce_axis((0, k_h), name="d_h")
d_w = te.reduce_axis((0, k_w), name="d_w")
d_ci = te.reduce_axis((0, t_ci), name="d_ci")
out = te.compute(
oshape,
lambda i_n, i_c, i_h, i_w, j_n, j_c: te.sum(
data_pad(i_n, d_c, i_h + d_h, i_w + d_w, j_n, d_ci).astype(out_dtype)
* kernel[i_c, d_c, d_h, d_w, j_c, d_ci].astype(out_dtype),
axis=[d_c, d_h, d_w, d_ci],
),
tag="packed_conv2d_transpose",
name="res",
)
cfg.add_flop(
2
* np.prod(topi.utils.get_const_tuple(oshape))
* kshape[2]
* kshape[3]
* ishape[1]
* ishape[-1]
)
return out
@autotvm.register_topi_schedule("conv2d_transpose_packed.vta")
def schedule_conv2d_transpose_packed(cfg, outs):
"""Schedule packed conv2d_transpose"""
assert len(outs) == 1
output = outs[0]
ewise_inputs = []
ewise_ops = []
conv2d_res = []
assert output.dtype == "int8"
assert output.op.input_tensors[0].dtype == "int32"
def _traverse(op):
if topi.tag.is_broadcast(op.tag):
if not op.same_as(output.op):
ewise_ops.append(op)
for tensor in op.input_tensors:
if isinstance(tensor.op, tvm.te.PlaceholderOp):
ewise_inputs.append((op, tensor))
else:
_traverse(tensor.op)
else:
assert op.tag == "packed_conv2d_transpose"
conv2d_res.append(op)
_traverse(output.op)
assert len(conv2d_res) == 1
conv2d_stage = conv2d_res[0].output(0)
s = te.create_schedule(output.op)
##### space definition begin #####
b, c_o, x_i, x_j, _, c_i = s[conv2d_stage].op.axis
c_i, _, _, _ = s[conv2d_stage].op.reduce_axis
cfg.define_split("tile_b", b, num_outputs=2)
cfg.define_split("tile_h", x_i, num_outputs=2)
cfg.define_split("tile_w", x_j, num_outputs=2)
cfg.define_split("tile_ci", c_i, num_outputs=2)
cfg.define_split("tile_co", c_o, num_outputs=2)
cfg.define_knob("oc_nthread", [1, 2])
cfg.define_knob("h_nthread", [1, 2])
###### space definition end ######
data, kernel = conv2d_stage.op.input_tensors
if isinstance(data.op, tvm.te.ComputeOp) and "pad" in data.op.tag:
temp = data.op.input_tensors[0]
pad_data = data
data = temp
else:
pad_data = None
env = get_env()
# setup pad
if pad_data is not None:
cdata = pad_data
s[pad_data].set_scope(env.inp_scope)
else:
cdata = s.cache_read(data, env.inp_scope, [conv2d_stage])
ckernel = s.cache_read(kernel, env.wgt_scope, [conv2d_stage])
s[conv2d_stage].set_scope(env.acc_scope)
# cache read input
cache_read_ewise = []
for consumer, tensor in ewise_inputs:
cache_read_ewise.append(s.cache_read(tensor, env.acc_scope, [consumer]))
# set ewise scope
for op in ewise_ops:
s[op].set_scope(env.acc_scope)
s[op].pragma(s[op].op.axis[0], env.alu)
# tile
x_bo, x_co, x_i, x_j, x_bi, x_ci = s[output].op.axis
x_co0, x_co1 = cfg["tile_co"].apply(s, output, x_co)
x_i0, x_i1 = cfg["tile_h"].apply(s, output, x_i)
x_j0, x_j1 = cfg["tile_w"].apply(s, output, x_j)
s[output].reorder(x_bo, x_i0, x_co0, x_j0, x_co1, x_i1, x_j1, x_bi, x_ci)
store_pt = x_j0
# set all compute scopes
s[conv2d_stage].compute_at(s[output], store_pt)
for op in ewise_ops:
s[op].compute_at(s[output], store_pt)
for tensor in cache_read_ewise:
s[tensor].compute_at(s[output], store_pt)
s[tensor].pragma(s[tensor].op.axis[0], env.dma_copy)
# virtual threading along output channel axes
if cfg["oc_nthread"].val > 1:
_, v_t = s[output].split(x_co0, factor=cfg["oc_nthread"].val)
s[output].reorder(v_t, x_bo)
s[output].bind(v_t, te.thread_axis("cthread"))
# virtual threading along spatial rows
if cfg["h_nthread"].val > 1:
_, v_t = s[output].split(x_i0, factor=cfg["h_nthread"].val)
s[output].reorder(v_t, x_bo)
s[output].bind(v_t, te.thread_axis("cthread"))
x_bo, x_co, x_i, x_j, x_bi, x_ci = s[conv2d_stage].op.axis
k_o, d_i, d_j, k_i = s[conv2d_stage].op.reduce_axis
x_i, x_ii = s[conv2d_stage].split(x_i, 4)
x_j, x_jj = s[conv2d_stage].split(x_j, 2)
s[conv2d_stage].reorder(x_bo, k_o, x_j, x_co, x_i, x_jj, d_j, d_i, x_ii, x_bi, x_ci, k_i)
for axis in [d_j, d_i, x_ii, x_jj]:
s[conv2d_stage].unroll(axis)
k_o, _ = cfg["tile_ci"].apply(s, conv2d_stage, k_o)
s[cdata].compute_at(s[conv2d_stage], k_o)
s[ckernel].compute_at(s[conv2d_stage], k_o)
# Use VTA instructions
s[cdata].pragma(s[cdata].op.axis[0], env.dma_copy)
s[ckernel].pragma(s[ckernel].op.axis[0], env.dma_copy)
s[conv2d_stage].pragma(x_bi, "conv2d_transpose_gemm")
s[output].pragma(x_co1, env.dma_copy)
return s
| https://github.com/zk-ml/tachikoma |
vta/python/vta/top/vta_dense.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=unused-argument
"""Dense operator declaration and schedule registration for VTA."""
import numpy as np
import tvm
from tvm import te
from tvm import autotvm
from tvm import topi
from ..environment import get_env
def is_packed_layout(layout):
"""Check if layout is packed layout"""
if layout == "NCHW":
return False
if "n" in layout and "c" in layout:
return True
return False
@autotvm.register_topi_compute("dense_packed.vta")
def dense_packed(cfg, data, weight, bias=None, out_dtype=None):
"""Dense function declaration."""
# Make sure that the dense operator is packed
if len(data.shape) != 4 or len(weight.shape) != 4:
raise topi.InvalidShapeError()
# Derive shapes
ishape = topi.utils.get_const_tuple(data.shape)
wshape = topi.utils.get_const_tuple(weight.shape)
oshape = (data.shape[0], weight.shape[0], data.shape[2], weight.shape[2])
# Reduction axes (input channel)
assert ishape[1] == wshape[1]
assert ishape[3] == wshape[3]
k_o = te.reduce_axis((0, ishape[1]), name="k_o")
k_i = te.reduce_axis((0, ishape[3]), name="k_i")
res = te.compute(
oshape,
lambda b_o, c_o, b_i, c_i: te.sum(
data[b_o, k_o, b_i, k_i].astype(out_dtype)
* weight[c_o, k_o, c_i, k_i].astype(out_dtype),
axis=[k_o, k_i],
),
name="res",
tag="dense_pack",
)
cfg.add_flop(2 * np.prod(topi.utils.get_const_tuple(oshape)) * ishape[1] * ishape[3])
return res
@autotvm.register_topi_schedule("dense_packed.vta")
def schedule_dense_packed(cfg, outs):
"""Packed dense schedule."""
assert len(outs) == 1
output = outs[0]
const_ops = []
ewise_inputs = []
ewise_ops = []
dense_res = []
assert "int" in output.op.input_tensors[0].dtype
def _traverse(op):
if topi.tag.is_broadcast(op.tag):
if not op.same_as(output.op):
if not op.axis:
const_ops.append(op)
else:
ewise_ops.append(op)
for tensor in op.input_tensors:
if isinstance(tensor.op, tvm.te.PlaceholderOp):
ewise_inputs.append((op, tensor))
else:
_traverse(tensor.op)
else:
assert op.tag == "dense_pack"
dense_res.append(op)
_traverse(output.op)
assert len(dense_res) == 1
dense_stage = dense_res[0].output(0)
s = te.create_schedule(output.op)
##### space definition begin #####
b, c_o, _, _ = s[dense_stage].op.axis
c_i, _ = s[dense_stage].op.reduce_axis
cfg.define_split("tile_b", b, num_outputs=2)
cfg.define_split("tile_ci", c_i, num_outputs=2)
cfg.define_split("tile_co", c_o, num_outputs=2)
cfg.define_knob("oc_nthread", [1, 2])
###### space definition end ######
data, weight = dense_stage.op.input_tensors
env = get_env()
cdata = s.cache_read(data, env.inp_scope, [dense_stage])
cweight = s.cache_read(weight, env.wgt_scope, [dense_stage])
s[dense_stage].set_scope(env.acc_scope)
# cache read input
cache_read_ewise = []
for consumer, tensor in ewise_inputs:
cache_read_ewise.append(s.cache_read(tensor, env.acc_scope, [consumer]))
# set ewise scope
for op in ewise_ops:
s[op].set_scope(env.acc_scope)
s[op].pragma(s[op].op.axis[0], env.alu)
for op in const_ops:
s[op].compute_inline()
# apply tiling for SRAM reuse
x_b, x_c, _, _ = s[output].op.axis
x_bo, x_bi = cfg["tile_b"].apply(s, output, x_b)
x_co, x_ci = cfg["tile_co"].apply(s, output, x_c)
s[output].reorder(x_bo, x_co, x_bi, x_ci)
store_pt = x_co
# set all compute scopes
s[dense_stage].compute_at(s[output], store_pt)
for op in ewise_ops:
s[op].compute_at(s[output], store_pt)
for tensor in cache_read_ewise:
s[tensor].compute_at(s[output], store_pt)
s[tensor].pragma(s[tensor].op.axis[0], env.dma_copy)
# virtual threading along output channel axes
if cfg["oc_nthread"].val > 1:
_, v_t = s[output].split(x_co, factor=cfg["oc_nthread"].val)
s[output].reorder(v_t, x_bo)
s[output].bind(v_t, te.thread_axis("cthread"))
x_bo, x_co, x_bi, _ = s[dense_stage].op.axis
k_o, _ = s[dense_stage].op.reduce_axis
s[dense_stage].reorder(x_bo, k_o, x_co)
k_o, _ = cfg["tile_ci"].apply(s, dense_stage, k_o)
s[cdata].compute_at(s[dense_stage], k_o)
s[cweight].compute_at(s[dense_stage], k_o)
# Use VTA instructions
s[cdata].pragma(s[cdata].op.axis[0], env.dma_copy)
s[cweight].pragma(s[cweight].op.axis[0], env.dma_copy)
s[dense_stage].tensorize(x_bi, env.gemm)
s[output].pragma(x_ci, env.dma_copy)
return s
| https://github.com/zk-ml/tachikoma |
vta/python/vta/top/vta_group_conv2d.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Group conv2D operator declaration and schedule registration for VTA."""
import numpy as np
import tvm
from tvm import te
from tvm import autotvm
from tvm import topi
from ..environment import get_env
@autotvm.register_topi_compute("group_conv2d_packed.vta")
def group_conv2d_packed(cfg, data, kernel, strides, padding, dilation, group, out_dtype):
"""Packed group conv2d nchw function."""
assert dilation == (1, 1)
if padding[0]:
pad_data = topi.nn.pad(data, [0, 0, padding[0], padding[1], 0, 0], name="pad_data")
else:
pad_data = data
assert len(data.shape) == 6
assert len(kernel.shape) == 6
assert data.dtype == "int8", data.dtype
assert kernel.dtype == "int8", kernel.dtype
assert out_dtype == "int32", out_dtype
oheight = topi.utils.get_const_int((pad_data.shape[2] - kernel.shape[2]) // strides[0] + 1)
owidth = topi.utils.get_const_int((pad_data.shape[3] - kernel.shape[3]) // strides[1] + 1)
oshape = (data.shape[0], kernel.shape[0], oheight, owidth, data.shape[4], kernel.shape[4])
ishape = topi.utils.get_const_tuple(data.shape)
kshape = topi.utils.get_const_tuple(kernel.shape)
assert group * kshape[1] == ishape[1]
assert kshape[0] % group == 0
d_i = te.reduce_axis((0, kshape[2]), name="d_i")
d_j = te.reduce_axis((0, kshape[3]), name="d_j")
k_o = te.reduce_axis((0, kshape[1]), name="k_o")
k_i = te.reduce_axis((0, kshape[-1]), name="k_i")
hstride, wstride = strides
out = te.compute(
oshape,
lambda b_o, c_o, i, j, b_i, c_i: te.sum(
pad_data[
b_o,
c_o // (kshape[0] // group) * kshape[1] + k_o,
i * hstride + d_i,
j * wstride + d_j,
b_i,
k_i,
].astype(out_dtype)
* kernel[c_o, k_o, d_i, d_j, c_i, k_i].astype(out_dtype),
axis=[k_o, d_i, d_j, k_i],
),
name="res",
tag="packed_group_conv2d",
)
cfg.add_flop(
2
* np.prod(topi.utils.get_const_tuple(oshape))
* kshape[2]
* kshape[3]
* ishape[1]
* kshape[-1]
)
return out
@autotvm.register_topi_schedule("group_conv2d_packed.vta")
def schedule_group_conv2d_packed(cfg, outs):
"""Schedule the packed conv2d."""
assert len(outs) == 1
output = outs[0]
const_ops = []
ewise_inputs = []
ewise_ops = []
conv2d_res = []
assert output.dtype == "int8"
assert output.op.input_tensors[0].dtype == "int32"
def _traverse(op):
if topi.tag.is_broadcast(op.tag):
if not op.same_as(output.op):
if not op.axis:
const_ops.append(op)
else:
ewise_ops.append(op)
for tensor in op.input_tensors:
if isinstance(tensor.op, tvm.te.PlaceholderOp):
ewise_inputs.append((op, tensor))
else:
_traverse(tensor.op)
else:
assert op.tag == "packed_group_conv2d"
conv2d_res.append(op)
_traverse(output.op)
assert len(conv2d_res) == 1
conv2d_stage = conv2d_res[0].output(0)
s = te.create_schedule(output.op)
##### space definition begin #####
b, c_o, x_i, x_j, _, _ = s[conv2d_stage].op.axis
c_i, _, _, _ = s[conv2d_stage].op.reduce_axis
cfg.define_split("tile_b", b, num_outputs=2)
cfg.define_split("tile_h", x_i, num_outputs=2)
cfg.define_split("tile_w", x_j, num_outputs=2)
cfg.define_split("tile_ci", c_i, num_outputs=2)
cfg.define_split("tile_co", c_o, num_outputs=2)
cfg.define_knob("oc_nthread", [1, 2])
cfg.define_knob("h_nthread", [1, 2])
###### space definition end ######
data, kernel = conv2d_stage.op.input_tensors
if isinstance(data.op, tvm.te.ComputeOp) and "pad" in data.op.tag:
temp = data.op.input_tensors[0]
pad_data = data
data = temp
else:
pad_data = None
env = get_env()
# setup pad
if pad_data is not None:
cdata = pad_data
s[pad_data].set_scope(env.inp_scope)
else:
cdata = s.cache_read(data, env.inp_scope, [conv2d_stage])
ckernel = s.cache_read(kernel, env.wgt_scope, [conv2d_stage])
s[conv2d_stage].set_scope(env.acc_scope)
# cache read input
cache_read_ewise = []
for consumer, tensor in ewise_inputs:
cache_read_ewise.append(s.cache_read(tensor, env.acc_scope, [consumer]))
# set ewise scope
for op in ewise_ops:
s[op].set_scope(env.acc_scope)
s[op].pragma(s[op].op.axis[0], env.alu)
for op in const_ops:
s[op].compute_inline()
# tile
x_bo, x_co, x_i, x_j, x_bi, x_ci = s[output].op.axis
x_co0, x_co1 = cfg["tile_co"].apply(s, output, x_co)
x_i0, x_i1 = cfg["tile_h"].apply(s, output, x_i)
x_j0, x_j1 = cfg["tile_w"].apply(s, output, x_j)
s[output].reorder(x_bo, x_i0, x_co0, x_j0, x_co1, x_i1, x_j1, x_bi, x_ci)
store_pt = x_j0
# set all compute scopes
s[conv2d_stage].compute_at(s[output], store_pt)
for op in ewise_ops:
s[op].compute_at(s[output], store_pt)
for tensor in cache_read_ewise:
s[tensor].compute_at(s[output], store_pt)
s[tensor].pragma(s[tensor].op.axis[0], env.dma_copy)
# virtual threading along output channel axes
if cfg["oc_nthread"].val > 1:
_, v_t = s[output].split(x_co0, factor=cfg["oc_nthread"].val)
s[output].reorder(v_t, x_bo)
s[output].bind(v_t, te.thread_axis("cthread"))
# virtual threading along spatial rows
if cfg["h_nthread"].val > 1:
_, v_t = s[output].split(x_i0, factor=cfg["h_nthread"].val)
s[output].reorder(v_t, x_bo)
s[output].bind(v_t, te.thread_axis("cthread"))
x_bo, x_co, x_i, x_j, x_bi, x_ci = s[conv2d_stage].op.axis
k_o, d_i, d_j, k_i = s[conv2d_stage].op.reduce_axis
s[conv2d_stage].reorder(x_bo, k_o, x_j, d_j, d_i, x_co, x_i, x_bi, x_ci, k_i)
k_o, _ = cfg["tile_ci"].apply(s, conv2d_stage, k_o)
s[cdata].compute_at(s[conv2d_stage], k_o)
s[ckernel].compute_at(s[conv2d_stage], k_o)
# Use VTA instructions
s[cdata].pragma(s[cdata].op.axis[0], env.dma_copy)
s[ckernel].pragma(s[ckernel].op.axis[0], env.dma_copy)
s[conv2d_stage].tensorize(x_bi, env.gemm)
s[output].pragma(x_co1, env.dma_copy)
return s
| https://github.com/zk-ml/tachikoma |
vta/python/vta/transform.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Additional Transformation Passes. for VTA"""
# pylint: disable=len-as-condition, no-else-return, unused-argument, invalid-name
import tvm
from tvm import te
from tvm.topi import utils
from .environment import get_env
def _match_pragma(stmt, key):
"""Internal helper to match stmt to pragma stmt.
Parameters
----------
stmt : Stmt
The AttrStmt
key : str
The pragma key
"""
return (stmt.attr_key == "pragma_" + key) or (
stmt.attr_key == "pragma_scope" and stmt.value.value == key
)
def FoldUopLoop():
"""Detect and fold uop loop.
VTA support uop programming model
that recognizes loop structure.
This pass detect the loop structure
and extract that into uop loop AST.
Returns
-------
fpass : tvm.transform.Pass
The pass
"""
def _fold_outermost_loop(body):
stmt = body
if not isinstance(stmt, tvm.tir.For):
return None, body, None
loop_var = stmt.loop_var
gemm_offsets = [None, None, None]
fail = [False]
builtin_uop_push = tvm.ir.Op.get("tir.vta.uop_push")
def _post_order(op):
assert isinstance(op, tvm.tir.Call)
base_args = 2
if op.op.same_as(builtin_uop_push):
args = []
args += op.args[:base_args]
for i in range(3):
m = tvm.arith.detect_linear_equation(op.args[i + base_args], [loop_var])
if not m:
fail[0] = True
return op
if gemm_offsets[i] is not None:
if not tvm.ir.structural_equal(m[0], gemm_offsets[i]):
fail[0] = True
return op
args.append(m[1])
else:
gemm_offsets[i] = m[0]
args.append(m[1])
args += op.args[base_args + 3 :]
return tvm.tir.call_intrin("int32", builtin_uop_push, *args)
if op.op.name not in ("tir.vta.command_handle", "tir.tvm_thread_context"):
raise RuntimeError("unexpected op %s" % op)
return op
ret = tvm.tir.stmt_functor.ir_transform(stmt.body, None, _post_order, ["tir.Call"])
if not fail[0] and all(x is not None for x in gemm_offsets):
def _visit(op):
if op.same_as(loop_var):
fail[0] = True
tvm.tir.stmt_functor.post_order_visit(ret, _visit)
if not fail[0]:
begin = tvm.tir.call_extern("int32", "VTAUopLoopBegin", stmt.extent, *gemm_offsets)
end = tvm.tir.call_extern("int32", "VTAUopLoopEnd")
return [begin, ret, end]
raise ValueError("Failed to fold the GEMM instructions..")
def _do_fold(stmt):
env = get_env()
if (
stmt.attr_key == "coproc_uop_scope"
and isinstance(stmt.value, tvm.tir.StringImm)
and stmt.value.value == env.dev.vta_push_uop.value
):
body = stmt.body
begins = []
ends = []
try:
begin, body, end = _fold_outermost_loop(body)
if begin is not None:
begins.append(begin)
if end is not None:
ends.append(end)
begin, body, end = _fold_outermost_loop(body)
if begin is not None:
begins.append(begin)
if end is not None:
ends.append(end)
except ValueError:
pass
if body == stmt.body:
return stmt
ends = list(reversed(ends))
body = tvm.tir.stmt_seq(*(begins + [body] + ends))
return tvm.tir.AttrStmt(stmt.node, stmt.attr_key, stmt.value, body)
return None
def _ftransform(f, mod, ctx):
return f.with_body(
tvm.tir.stmt_functor.ir_transform(f.body, _do_fold, None, ["tir.AttrStmt"])
)
return tvm.tir.transform.prim_func_pass(_ftransform, opt_level=0, name="tir.vta.FoldUopLoop")
def CPUAccessRewrite():
"""Detect CPU access to VTA buffer and get address correctly.
VTA's buffer is an opaque handle that do not
correspond to address in CPU.
This pass detect CPU access and rewrite to use pointer
returned VTABufferCPUPtr for CPU access.
Returns
-------
fpass : tvm.transform.Pass
The pass
"""
def _ftransform(f, mod, ctx):
env = get_env()
var_remap = {}
buf_remap = {}
def find_var_remap(old_var):
if old_var in var_remap:
return var_remap[old_var]
new_var = tvm.tir.Var(old_var.name + "_ptr", dtype=old_var.type_annotation)
var_remap[old_var] = new_var
return new_var
def find_buf_remap(old_buf):
if old_buf in buf_remap:
return buf_remap[old_buf]
new_var = find_var_remap(old_buf.data)
new_buf = tvm.tir.decl_buffer(
shape=old_buf.shape,
dtype=old_buf.dtype,
data=new_var,
strides=old_buf.strides,
elem_offset=old_buf.elem_offset,
scope=old_buf.scope,
data_alignment=old_buf.data_alignment,
offset_factor=old_buf.offset_factor,
buffer_type="auto_broadcast" if (old_buf.buffer_type == 2) else "",
axis_separators=old_buf.axis_separators,
)
buf_remap[old_buf] = new_buf
return new_buf
def _post_order(op):
if isinstance(op, tvm.tir.Allocate):
buffer_var = op.buffer_var
if buffer_var not in var_remap:
return None
new_var = var_remap[buffer_var]
let_stmt = tvm.tir.LetStmt(
new_var,
tvm.tir.call_extern(
"handle", "VTABufferCPUPtr", env.dev.command_handle, buffer_var
),
op.body,
)
alloc = tvm.tir.Allocate(buffer_var, op.dtype, op.extents, op.condition, let_stmt)
del var_remap[buffer_var]
bufs_to_delete = [
old_buf for old_buf in buf_remap if old_buf.data.same_as(buffer_var)
]
for buf in bufs_to_delete:
del buf_remap[buf]
return alloc
if isinstance(op, tvm.tir.BufferLoad):
return tvm.tir.BufferLoad(find_buf_remap(op.buffer), op.indices)
if isinstance(op, tvm.tir.BufferStore):
return tvm.tir.BufferStore(find_buf_remap(op.buffer), op.value, op.indices)
raise RuntimeError("not reached")
stmt_in = f.body
stmt = tvm.tir.stmt_functor.ir_transform(
stmt_in, None, _post_order, ["tir.Allocate", "tir.BufferLoad", "tir.BufferStore"]
)
for old_var, new_var in var_remap.items():
stmt = tvm.tir.LetStmt(
new_var,
tvm.tir.call_extern("handle", "VTABufferCPUPtr", env.dev.command_handle, old_var),
stmt,
)
return f.with_body(stmt)
return tvm.tir.transform.prim_func_pass(
_ftransform, opt_level=0, name="tir.vta.CPUAccessRewrite"
)
def LiftAllocToScopeBegin():
"""Lift allocate to beginning of the current scope.
Returns
-------
fpass : tvm.transform.Pass
The pass
"""
def _ftransform(f, mod, ctx):
lift_stmt = [[]]
def _merge_block(slist, body):
for op in slist:
if op.body == body:
body = op
elif isinstance(op, tvm.tir.Allocate):
body = tvm.tir.Allocate(op.buffer_var, op.dtype, op.extents, op.condition, body)
elif isinstance(op, tvm.tir.AttrStmt):
body = tvm.tir.AttrStmt(op.node, op.attr_key, op.value, body)
elif isinstance(op, tvm.tir.For):
body = tvm.tir.For(
op.loop_var,
op.min,
op.extent,
op.kind,
body,
op.thread_binding,
op.annotations,
)
else:
raise RuntimeError("unexpected op")
del slist[:]
return body
def _pre_order(op):
if isinstance(op, tvm.tir.For):
lift_stmt.append([])
elif isinstance(op, tvm.tir.AttrStmt):
if op.attr_key == "virtual_thread":
lift_stmt.append([])
def _post_order(op):
if isinstance(op, tvm.tir.Allocate):
lift_stmt[-1].append(op)
return op.body
if isinstance(op, tvm.tir.AttrStmt):
if op.attr_key == "storage_scope":
lift_stmt[-1].append(op)
return op.body
if op.attr_key == "virtual_thread":
return _merge_block(lift_stmt.pop() + [op], op.body)
return op
if isinstance(op, tvm.tir.For):
return _merge_block(lift_stmt.pop() + [op], op.body)
raise RuntimeError("not reached")
stmt_in = f.body
stmt = tvm.tir.stmt_functor.ir_transform(
stmt_in, _pre_order, _post_order, ["tir.Allocate", "tir.AttrStmt", "tir.For"]
)
assert len(lift_stmt) == 1
return f.with_body(_merge_block(lift_stmt[0], stmt))
return tvm.tir.transform.prim_func_pass(
_ftransform, opt_level=0, name="tir.vta.LiftAllocToScopeBegin"
)
def InjectSkipCopy():
"""Pass to inject skip copy stmt, used for debug purpose.
Returns
-------
fpass : tvm.transform.Pass
The pass
"""
def _do_fold(stmt):
if _match_pragma(stmt, "skip_dma_copy"):
return tvm.tir.Evaluate(0)
return None
def _ftransform(f, mod, ctx):
return f.with_body(
tvm.tir.stmt_functor.ir_transform(f.body, _do_fold, None, ["tir.AttrStmt"])
)
return tvm.tir.transform.prim_func_pass(_ftransform, opt_level=0, name="tir.vta.InjectSkipCopy")
def InjectCoProcSync():
"""Pass inject coproc sync
Returns
-------
fpass : tvm.transform.Pass
The pass
"""
def _ftransform(f, *_):
success = [False]
def _do_fold(stmt):
if _match_pragma(stmt, "coproc_sync"):
success[0] = True
sync = tvm.tir.Call("int32", "vta.coproc_sync", [])
return tvm.tir.SeqStmt([stmt.body, tvm.tir.Evaluate(sync)])
if _match_pragma(stmt, "trim_loop"):
op = stmt.body
assert isinstance(op, tvm.tir.For)
return tvm.tir.For(
op.loop_var, op.min, 2, op.kind, op.body, op.thread_binding, op.annotations
)
return None
return f.with_body(
tvm.tir.stmt_functor.ir_transform(f.body, None, _do_fold, ["tir.AttrStmt"])
)
return tvm.transform.Sequential(
[
tvm.tir.transform.prim_func_pass(_ftransform, 0, "tir.vta.InjectCoProcSync"),
tvm.tir.transform.CoProcSync(),
],
opt_level=0,
name="tir.vta.InjectCoProcSync",
)
def InjectDMAIntrin():
"""Pass to inject DMA copy intrinsics.
Returns
-------
fpass : tvm.transform.Pass
The pass
"""
idxd = tvm.tir.indexdiv
idxm = tvm.tir.indexmod
def _check_compact(buf):
ndim = len(buf.shape)
size = tvm.tir.const(1, buf.shape[0].dtype)
for i in reversed(range(ndim)):
if not utils.equal_const_int(size - buf.strides[i], 0):
raise RuntimeError(
"Cannot prove compact: shape=%s, strides=%s" % (buf.shape, buf.strides)
)
size = size * buf.shape[i]
def _fold_buffer_dim(buf, scope, elem_block):
ndim = len(buf.shape)
x_size = 1
base = 0
for i in range(1, ndim + 1):
if not utils.equal_const_int(buf.strides[ndim - i] - x_size, 0):
raise RuntimeError("scope %s needs to have block=%d" % (scope, elem_block))
x_size = x_size * buf.shape[ndim - i]
if utils.equal_const_int(x_size - elem_block, 0):
base = i + 1
break
if base == 0:
raise RuntimeError(
"scope %s need to have block=%d, shape=%s" % (scope, elem_block, buf.shape)
)
shape = [elem_block]
strides = [1]
if base < ndim + 1 and not utils.equal_const_int(buf.strides[ndim - base], elem_block):
shape.append(1)
strides.append(elem_block)
analyzer = tvm.arith.Analyzer()
while base < ndim + 1:
x_size = 1
x_stride = buf.strides[ndim - base]
next_base = base
if not utils.equal_const_int(idxm(x_stride, elem_block), 0):
raise RuntimeError(
"scope %s need to have block=%d, shape=%s, strides=%s"
% (scope, elem_block, buf.shape, buf.strides)
)
for i in range(base, ndim + 1):
k = ndim - i
if not utils.equal_const_int(x_size * x_stride - buf.strides[k], 0):
break
x_size = x_size * buf.shape[k]
next_base = i + 1
shape.append(analyzer.simplify(x_size))
strides.append(x_stride)
assert next_base != base
base = next_base
strides = list(reversed(strides))
shape = list(reversed(shape))
return shape, strides
def _get_2d_pattern(buf, elem_width, elem_bytes, dtype, scope, allow_fold):
elem_block = elem_bytes * 8 // elem_width
shape, strides = buf.shape, buf.strides
if not utils.equal_const_int(idxm(buf.elem_offset, elem_block), 0):
raise RuntimeError("scope %s need to have block=%d" % (scope, elem_block))
if allow_fold:
shape, strides = _fold_buffer_dim(buf, scope, elem_block)
else:
shape = list(x for x in shape)
strides = list(x for x in strides)
def raise_error():
"""Internal function to raise error"""
raise RuntimeError(
(
"Scope[%s]: cannot detect 2d pattern with elem_block=%d:"
+ " shape=%s, strides=%s"
)
% (scope, elem_block, buf.shape, buf.strides)
)
ndim = len(shape)
# Check if the inner-tensor is already flat
flat = utils.equal_const_int(shape[-1], elem_block)
if flat:
if not utils.equal_const_int(strides[-1], 1):
raise_error()
if ndim == 1:
x_size = 1
x_stride = 1
y_size = 1
return x_size, y_size, x_stride, idxd(buf.elem_offset, elem_block)
if not utils.equal_const_int(strides[-2] - elem_block, 0):
raise_error()
if ndim == 2:
x_size = shape[-2]
x_stride = shape[-2]
y_size = 1
return x_size, y_size, x_stride, idxd(buf.elem_offset, elem_block)
if not utils.equal_const_int(idxm(strides[-3], elem_block), 0):
raise_error()
if ndim == 3:
x_size = shape[-2]
x_stride = idxd(strides[-3], elem_block)
y_size = shape[-3]
return x_size, y_size, x_stride, idxd(buf.elem_offset, elem_block)
else:
if not utils.equal_const_int(strides[-1], 1):
raise_error()
if not utils.equal_const_int(strides[-2] - shape[-1], 0):
raise_error()
if not utils.equal_const_int(shape[-1] * shape[-2], elem_block):
raise_error()
if ndim == 2:
x_size = 1
x_stride = 1
y_size = 1
return x_size, y_size, x_stride, idxd(buf.elem_offset, elem_block)
if not utils.equal_const_int(strides[-3], elem_block):
raise_error()
if ndim == 3:
x_size = shape[-3]
x_stride = shape[-3]
y_size = 1
return x_size, y_size, x_stride, idxd(buf.elem_offset, elem_block)
if not utils.equal_const_int(idxm(strides[-4], elem_block), 0):
raise_error()
if ndim == 4:
x_size = shape[-3]
x_stride = idxd(strides[-4], elem_block)
y_size = shape[-4]
return x_size, y_size, x_stride, idxd(buf.elem_offset, elem_block)
raise_error()
def _inject_copy(src, dst, pad_before, pad_after, pad_value):
# FIXME: pad_value is ignored...
env = get_env()
_ = pad_value
if dst.scope() == "global":
# Store
if pad_before or pad_after:
raise RuntimeError("Do not support copy into DRAM with pad")
if src.scope() == env.acc_scope:
elem_width = env.OUT_WIDTH
elem_bytes = env.OUT_ELEM_BYTES
mem_type = env.dev.MEM_ID_OUT
data_type = "int%d" % env.OUT_WIDTH
task_qid = env.dev.QID_STORE_OUT
else:
raise RuntimeError("Do not support copy %s->dram" % (src.scope()))
_check_compact(src)
x_size, y_size, x_stride, offset = _get_2d_pattern(
dst, elem_width, elem_bytes, data_type, src.scope(), allow_fold=True
)
irb = tvm.tir.ir_builder.create()
irb.scope_attr(env.dev.vta_axis, "coproc_scope", env.dev.get_task_qid(task_qid))
irb.emit(
tvm.tir.call_extern(
"int32",
"VTAStoreBuffer2D",
env.dev.command_handle,
src.access_ptr("r", "int32"),
mem_type,
dst.data,
offset,
x_size,
y_size,
x_stride,
)
)
return irb.get()
elif src.scope() == "global":
if dst.scope() == env.acc_scope:
elem_width = env.ACC_WIDTH
elem_bytes = env.ACC_ELEM_BYTES
mem_type = env.dev.MEM_ID_ACC
data_type = "int%d" % env.ACC_WIDTH
task_qid = env.dev.QID_LOAD_OUT
elif dst.scope() == env.inp_scope:
elem_width = env.INP_WIDTH
elem_bytes = env.INP_ELEM_BYTES
mem_type = env.dev.MEM_ID_INP
data_type = "int%d" % env.INP_WIDTH
task_qid = env.dev.QID_LOAD_INP
elif dst.scope() == env.wgt_scope:
elem_width = env.WGT_WIDTH
elem_bytes = env.WGT_ELEM_BYTES
mem_type = env.dev.MEM_ID_WGT
data_type = "int%d" % env.WGT_WIDTH
task_qid = env.dev.QID_LOAD_WGT
else:
raise RuntimeError("Do not support copy dram->%s" % (dst.scope()))
# collect pad statistics
if pad_before:
assert pad_after
ndim = len(pad_before)
if ndim <= 2 or ndim > 5:
raise ValueError("Limitation of 2D pad load forbid ndim=%d" % ndim)
if ndim == 5:
# This case occurs when batch size N > 1
y_pad_before = pad_before[1]
x_pad_before = pad_before[2]
y_pad_after = pad_after[1]
x_pad_after = pad_after[2]
for dim in range(3, ndim):
if not utils.equal_const_int(pad_before[dim], 0):
raise ValueError("Do not support pad on the innermost block")
if not utils.equal_const_int(pad_after[dim], 0):
raise ValueError("Do not support pad on the innermost block")
else:
y_pad_before = pad_before[0]
x_pad_before = pad_before[1]
y_pad_after = pad_after[0]
x_pad_after = pad_after[1]
for dim in range(2, ndim):
if not utils.equal_const_int(pad_before[dim], 0):
raise ValueError("Do not support pad on the innermost block")
if not utils.equal_const_int(pad_after[dim], 0):
raise ValueError("Do not support pad on the innermost block")
allow_fold = False
else:
x_pad_before = 0
y_pad_before = 0
x_pad_after = 0
y_pad_after = 0
allow_fold = True
_check_compact(dst)
x_size, y_size, x_stride, offset = _get_2d_pattern(
src, elem_width, elem_bytes, data_type, dst.scope(), allow_fold=allow_fold
)
if data_type != src.dtype:
assert data_type == "int%d" % env.ACC_WIDTH and src.dtype == "int%d" % env.INP_WIDTH
mem_type = env.dev.MEM_ID_ACC_8BIT
irb = tvm.tir.ir_builder.create()
irb.scope_attr(env.dev.vta_axis, "coproc_scope", env.dev.get_task_qid(task_qid))
irb.emit(
tvm.tir.call_extern(
"int32",
"VTALoadBuffer2D",
env.dev.command_handle,
src.data,
offset,
x_size,
y_size,
x_stride,
x_pad_before,
y_pad_before,
x_pad_after,
y_pad_after,
dst.access_ptr("r", "int32"),
mem_type,
)
)
return irb.get()
else:
raise RuntimeError("Do not support copy %s->%s" % (src.scope(), dst.scope()))
return tvm.tir.transform.InjectCopyIntrin("dma_copy", _inject_copy)
def _get_gemm_intrin_buffer():
env = get_env()
wgt_lanes = env.WGT_ELEM_BITS // env.WGT_WIDTH
assert wgt_lanes == env.BLOCK_OUT * env.BLOCK_IN
wgt_shape = (env.BLOCK_OUT, env.BLOCK_IN)
assert wgt_shape[0] * wgt_shape[1] == wgt_lanes
inp_lanes = env.INP_ELEM_BITS // env.INP_WIDTH
assert inp_lanes == env.BATCH * env.BLOCK_IN
inp_shape = (env.BATCH, env.BLOCK_IN)
assert inp_shape[0] * inp_shape[1] == inp_lanes
out_lanes = env.ACC_ELEM_BITS // env.ACC_WIDTH
assert out_lanes == env.BATCH * env.BLOCK_OUT
out_shape = (env.BATCH, env.BLOCK_OUT)
assert out_shape[0] * out_shape[1] == out_lanes
wgt = te.placeholder(
(wgt_shape[0], wgt_shape[1]), dtype="int%d" % env.WGT_WIDTH, name=env.wgt_scope
)
inp = te.placeholder(
(inp_shape[0], inp_shape[1]), dtype="int%d" % env.INP_WIDTH, name=env.inp_scope
)
k = te.reduce_axis((0, wgt_shape[1]), name="k")
out_dtype = "int%d" % env.ACC_WIDTH
out = te.compute(
(out_shape[0], out_shape[1]),
lambda i, j: te.sum(inp[i, k].astype(out_dtype) * wgt[j, k].astype(out_dtype), axis=[k]),
name="out",
)
wgt_layout = tvm.tir.decl_buffer(
wgt.shape,
wgt.dtype,
env.wgt_scope,
scope=env.wgt_scope,
offset_factor=wgt_lanes,
data_alignment=wgt_lanes,
)
inp_layout = tvm.tir.decl_buffer(
inp.shape,
inp.dtype,
env.inp_scope,
scope=env.inp_scope,
offset_factor=inp_lanes,
data_alignment=inp_lanes,
)
out_layout = tvm.tir.decl_buffer(
out.shape,
out.dtype,
env.acc_scope,
scope=env.acc_scope,
offset_factor=out_lanes,
data_alignment=out_lanes,
)
return wgt_layout, inp_layout, out_layout
def InjectConv2DTransposeSkip():
"""Pass to skip 0-weights in conv2d transpose with stride > 1.
Returns
-------
fpass : tvm.transform.Pass
The pass
"""
def _ftransform(func, mod, ctx):
env = get_env()
dwgt, dinp, dout = _get_gemm_intrin_buffer()
calls = []
selects = []
def _find_basics(op):
if isinstance(op, tvm.tir.BufferLoad):
calls.append(op)
elif isinstance(op, tvm.tir.Select):
selects.append(op)
def _do_fold(op):
if _match_pragma(op, "conv2d_transpose_gemm"):
is_init = ".init" in str(op)
tvm.tir.stmt_functor.post_order_visit(op, _find_basics)
if is_init:
# create inner most block
irb = tvm.tir.ir_builder.create()
dev = env.dev
irb.scope_attr(dev.vta_axis, "coproc_scope", dev.get_task_qid(dev.QID_COMPUTE))
irb.scope_attr(dev.vta_axis, "coproc_uop_scope", dev.vta_push_uop)
irb.emit(
tvm.tir.call_intrin(
"int32",
"tir.vta.uop_push",
0,
1,
dout.access_ptr("rw", "int32"),
0,
0,
0,
0,
0,
)
)
inner = irb.get()
# TODO(@tmoreau89): This is only a temporary fix, please take a look.
body = op.body.body
while isinstance(body, tvm.tir.IfThenElse):
body = body.then_case
args = body.indices
res_buffer = body.buffer
tpl = (args[0], 1, args[1], 1, args[2], 1, args[3], 1, 0, 1, 0, env.BLOCK_OUT)
inner = tvm.tir.AttrStmt(
[dout, res_buffer],
"buffer_bind_scope",
tvm.tir.call_intrin("handle", "tir.tvm_tuple", *tpl),
inner,
)
return inner
else:
conv_call, data_call, kernel_call = calls[-3:]
pad_data_tensor = data_call.buffer
kernel_tensor = kernel_call.buffer
res_tensor = conv_call.buffer
if selects:
condition = selects[0].condition
else:
condition = tvm.tir.const(1, "int")
# create inner most block
irb = tvm.tir.ir_builder.create()
with irb.if_scope(condition):
dev = env.dev
irb.scope_attr(
dev.vta_axis, "coproc_scope", dev.get_task_qid(dev.QID_COMPUTE)
)
irb.scope_attr(dev.vta_axis, "coproc_uop_scope", dev.vta_push_uop)
irb.emit(
tvm.tir.call_intrin(
"int32",
"tir.vta.uop_push",
0,
0,
dout.access_ptr("rw", "int32"),
dinp.access_ptr("r", "int32"),
dwgt.access_ptr("r", "int32"),
0,
0,
0,
)
)
inner = irb.get()
args = conv_call.indices
tpl = (args[0], 1, args[1], 1, args[2], 1, args[3], 1, 0, 1, 0, env.BLOCK_OUT)
inner = tvm.tir.AttrStmt(
[dout, res_tensor],
"buffer_bind_scope",
tvm.tir.call_intrin("handle", "tir.tvm_tuple", *tpl),
inner,
)
args = kernel_call.indices
tpl = (
args[0],
1,
args[1],
1,
args[2],
1,
args[3],
1,
0,
env.BLOCK_OUT,
0,
env.BLOCK_IN,
)
inner = tvm.tir.AttrStmt(
[dwgt, kernel_tensor],
"buffer_bind_scope",
tvm.tir.call_intrin("handle", "tir.tvm_tuple", *tpl),
inner,
)
args = data_call.indices
tpl = (args[0], 1, args[1], 1, args[2], 1, args[3], 1, 0, 1, 0, env.BLOCK_IN)
inner = tvm.tir.AttrStmt(
[dinp, pad_data_tensor],
"buffer_bind_scope",
tvm.tir.call_intrin("handle", "tir.tvm_tuple", *tpl),
inner,
)
return inner
return None
return func.with_body(
tvm.tir.stmt_functor.ir_transform(func.body, _do_fold, None, ["tir.AttrStmt"])
)
return tvm.tir.transform.prim_func_pass(
_ftransform, opt_level=0, name="tir.vta.InjectConv2DTrasnposeSkip"
)
def AnnotateALUCoProcScope():
"""Pass to insert ALU instruction.
Returns
-------
fpass : tvm.transform.Pass
The pass
"""
def _ftransform(func, mod, ctx):
env = get_env()
def _do_fold(stmt):
if _match_pragma(stmt, "alu"):
irb = tvm.tir.ir_builder.create()
irb.scope_attr(
env.dev.vta_axis, "coproc_scope", env.dev.get_task_qid(env.dev.QID_COMPUTE)
)
irb.scope_attr(
env.dev.vta_axis, "coproc_uop_scope", tvm.tir.StringImm("VTAPushALUOp")
)
irb.emit(stmt)
return irb.get()
if _match_pragma(stmt, "skip_alu"):
return tvm.tir.Evaluate(0)
return stmt
return func.with_body(
tvm.tir.stmt_functor.ir_transform(func.body, None, _do_fold, ["tir.AttrStmt"])
)
return tvm.tir.transform.prim_func_pass(
_ftransform, opt_level=0, name="tir.vta.AnnotateALUCoProcScope"
)
def InjectALUIntrin():
"""Pass to inject ALU micro-ops.
Returns
-------
fpass : tvm.transform.Pass
The pass
"""
def _ftransform(func, mod, ctx):
env = get_env()
idxm = tvm.tir.indexmod
analyzer = tvm.arith.Analyzer()
def _do_fold(stmt):
def _flatten_loop(src_coeff, dst_coeff, extents):
src_coeff = list(src_coeff)
dst_coeff = list(dst_coeff)
extents = list(extents)
rev_src_coeff = [src_coeff.pop()]
rev_dst_coeff = [dst_coeff.pop()]
rev_extents = []
assert src_coeff
vsrc = src_coeff.pop()
vdst = dst_coeff.pop()
vext = extents.pop()
while src_coeff:
next_src = src_coeff.pop()
next_dst = dst_coeff.pop()
next_ext = extents.pop()
if analyzer.can_prove_equal(next_src, vsrc * vext) and analyzer.can_prove_equal(
next_dst, vdst * vext
):
vext = analyzer.simplify(vext * next_ext)
else:
rev_src_coeff.append(vsrc)
rev_dst_coeff.append(vdst)
rev_extents.append(vext)
vsrc = next_src
vdst = next_dst
vext = next_ext
rev_src_coeff.append(vsrc)
rev_dst_coeff.append(vdst)
rev_extents.append(vext)
rev_src_coeff.reverse()
rev_dst_coeff.reverse()
rev_extents.reverse()
return rev_src_coeff, rev_dst_coeff, rev_extents
if _match_pragma(stmt, "alu"):
# Get to the innermost loop body
loop_body = stmt.body
nest_size = 0
while isinstance(loop_body, tvm.tir.For):
loop_body = loop_body.body
nest_size += 1
# Get the src/dst arguments
dst_var = loop_body.buffer.data
dst_idx = loop_body.indices[0]
# Derive loop variables and extents
tmp_body = stmt.body
indices = []
extents = []
for _ in range(nest_size):
indices.append(tmp_body.loop_var)
extents.append(tmp_body.extent)
tmp_body = tmp_body.body
# Derive opcode
if isinstance(loop_body.value, tvm.tir.Add):
alu_opcode = env.dev.ALU_OPCODE_ADD
lhs = loop_body.value.a
rhs = loop_body.value.b
elif isinstance(loop_body.value, tvm.tir.Sub):
alu_opcode = env.dev.ALU_OPCODE_SUB
lhs = loop_body.value.a
rhs = loop_body.value.b
elif isinstance(loop_body.value, tvm.tir.Mul):
alu_opcode = env.dev.ALU_OPCODE_MUL
lhs = loop_body.value.a
rhs = loop_body.value.b
elif isinstance(loop_body.value, tvm.tir.Min):
alu_opcode = env.dev.ALU_OPCODE_MIN
lhs = loop_body.value.a
rhs = loop_body.value.b
elif isinstance(loop_body.value, tvm.tir.Max):
alu_opcode = env.dev.ALU_OPCODE_MAX
lhs = loop_body.value.a
rhs = loop_body.value.b
elif isinstance(loop_body.value, tvm.tir.Call):
if loop_body.value.op.name == "tir.shift_left":
alu_opcode = env.dev.ALU_OPCODE_SHR
lhs = loop_body.value.args[0]
rhs = analyzer.simplify(-loop_body.value.args[1])
elif loop_body.value.op.name == "tir.shift_right":
alu_opcode = env.dev.ALU_OPCODE_SHR
lhs = loop_body.value.args[0]
rhs = loop_body.value.args[1]
else:
raise RuntimeError(
"Function call not recognized %s" % (loop_body.value.name)
)
elif isinstance(loop_body.value, tvm.tir.BufferLoad):
alu_opcode = env.dev.ALU_OPCODE_SHR
lhs = loop_body.value
rhs = tvm.tir.const(0, "int32")
else:
raise RuntimeError(
"Expression not recognized %s, %s, %s"
% (type(loop_body.value), str(loop_body.value), str(stmt))
)
# Derive array index coefficients
dst_coeff = tvm.arith.detect_linear_equation(dst_idx, indices)
# Check if lhs/rhs is immediate
use_imm = False
imm_val = None
if isinstance(rhs, tvm.tir.IntImm):
assert lhs.buffer.data.same_as(dst_var)
src_coeff = tvm.arith.detect_linear_equation(lhs.indices[0], indices)
use_imm = True
imm_val = rhs
if isinstance(lhs, tvm.tir.IntImm):
assert rhs.buffer.data.same_as(dst_var)
src_coeff = tvm.arith.detect_linear_equation(rhs.indices[0], indices)
use_imm = True
imm_val = lhs
if imm_val is None:
imm_val = 0
assert lhs.buffer.data.same_as(dst_var) and rhs.buffer.data.same_as(dst_var)
src_lhs_coeff = tvm.arith.detect_linear_equation(lhs.indices[0], indices)
src_rhs_coeff = tvm.arith.detect_linear_equation(rhs.indices[0], indices)
# Determine which side has the same coefficients
lhs_equal = True
rhs_equal = True
for i, coef in enumerate(dst_coeff):
if not tvm.ir.structural_equal(coef, src_lhs_coeff[i]):
lhs_equal = False
if not tvm.ir.structural_equal(coef, src_rhs_coeff[i]):
rhs_equal = False
# Make sure at least one of the source is identical to the
# destination (in-place computation)
assert lhs_equal or rhs_equal
# Assign the source coefficients
if lhs_equal:
src_coeff = src_rhs_coeff
else:
src_coeff = src_lhs_coeff
# Ensure that we have the proper tensor dimensions in the
# innermost loop (pattern match)
src_coeff = list(src_coeff)
dst_coeff = list(dst_coeff)
extents = list(extents)
assert len(src_coeff) > 1
assert len(dst_coeff) > 1
assert len(extents) != 0
assert tvm.ir.structural_equal(
analyzer.simplify(idxm(src_coeff[-1], env.BATCH * env.BLOCK_OUT)), 0
)
assert tvm.ir.structural_equal(
analyzer.simplify(idxm(dst_coeff[-1], env.BATCH * env.BLOCK_OUT)), 0
)
assert tvm.ir.structural_equal(src_coeff[-2], 1)
assert tvm.ir.structural_equal(dst_coeff[-2], 1)
if env.BATCH > 1:
assert len(src_coeff) > 2
assert len(dst_coeff) > 2
assert len(extents) > 1
assert tvm.ir.structural_equal(src_coeff[-3], env.BLOCK_OUT)
assert tvm.ir.structural_equal(dst_coeff[-3], env.BLOCK_OUT)
# Apply tensorization of the loop coefficients
src_offset = src_coeff[-1]
dst_offset = dst_coeff[-1]
if env.BATCH == 1:
src_coeff = src_coeff[:-2]
dst_coeff = dst_coeff[:-2]
extents = extents[:-1]
else:
src_coeff = src_coeff[:-3]
dst_coeff = dst_coeff[:-3]
extents = extents[:-2]
src_coeff.append(src_offset)
dst_coeff.append(dst_offset)
src_coeff = [analyzer.simplify(c // (env.BATCH * env.BLOCK_OUT)) for c in src_coeff]
dst_coeff = [analyzer.simplify(c // (env.BATCH * env.BLOCK_OUT)) for c in dst_coeff]
# Flatten the outer loops
if extents:
src_coeff, dst_coeff, extents = _flatten_loop(src_coeff, dst_coeff, extents)
# Insert ALU micro-ops
irb = tvm.tir.ir_builder.create()
for idx, extent in enumerate(extents):
irb.emit(
tvm.tir.call_extern(
"int32",
"VTAUopLoopBegin",
extent,
dst_coeff[idx],
src_coeff[idx],
0,
)
)
use_imm = int(use_imm)
irb.emit(
tvm.tir.call_intrin(
"int32",
"tir.vta.uop_push",
1,
0,
dst_coeff[len(dst_coeff) - 1],
src_coeff[len(src_coeff) - 1],
0,
alu_opcode,
use_imm,
imm_val,
)
)
for extent in extents:
irb.emit(tvm.tir.call_extern("int32", "VTAUopLoopEnd"))
return irb.get()
return stmt
return func.with_body(
tvm.tir.stmt_functor.ir_transform(func.body, None, _do_fold, ["tir.AttrStmt"])
)
return tvm.tir.transform.prim_func_pass(
_ftransform, opt_level=0, name="tir.vta.InjectALUIntrin"
)
| https://github.com/zk-ml/tachikoma |
vta/runtime/runtime.h | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*!
* \file runtime.h
* \brief VTA runtime library.
*/
#ifndef VTA_RUNTIME_RUNTIME_H_
#define VTA_RUNTIME_RUNTIME_H_
#ifdef __cplusplus
extern "C" {
#endif
#include <tvm/runtime/c_runtime_api.h>
#include <vta/driver.h>
#define VTA_MEMCPY_H2D 1
#define VTA_MEMCPY_D2H 2
#define VTA_MEMCPY_D2D 3
#define VTA_DEBUG_DUMP_INSN (1 << 1)
#define VTA_DEBUG_DUMP_UOP (1 << 2)
#define VTA_DEBUG_SKIP_READ_BARRIER (1 << 3)
#define VTA_DEBUG_SKIP_WRITE_BARRIER (1 << 4)
#define VTA_DEBUG_FORCE_SERIAL (1 << 5)
#define ALLOC_ALIGNMENT 64
/*!
* \brief Allocate data buffer.
* \param size Buffer size.
* \return A pointer to the allocated buffer.
*/
TVM_DLL void* VTABufferAlloc(size_t size);
/*!
* \brief Free data buffer.
* \param buffer The data buffer to be freed.
*/
TVM_DLL void VTABufferFree(void* buffer);
/*!
* \brief Copy data buffer from one location to another.
* \param from The source buffer base address.
* \param from_offset The offset of the source buffer.
* \param to The target buffer base address.
* \param to_offset The offset of the target buffer.
* \param size Size of copy.
* \param kind_mask The memory copy kind.
*/
TVM_DLL void VTABufferCopy(const void* from, size_t from_offset, void* to, size_t to_offset,
size_t size, int kind_mask);
/*! \brief VTA command handle */
typedef void* VTACommandHandle;
/*! \brief Shutdown hook of VTA to cleanup resources */
TVM_DLL void VTARuntimeShutdown();
/*!
* \brief Get thread local command handle.
* \return A thread local command handle.
*/
TVM_DLL VTACommandHandle VTATLSCommandHandle();
/*!
* \brief Get the buffer access pointer on CPU.
* \param cmd The VTA command handle.
* \param buffer The data buffer.
* \return The pointer that can be accessed by the CPU.
*/
TVM_DLL void* VTABufferCPUPtr(VTACommandHandle cmd, void* buffer);
/*!
* \brief Perform a write barrier to make a memory region visible to the CPU.
* \param cmd The VTA command handle.
* \param buffer The head buffer pointer.
* \param elem_bits The size in bits of each element.
* \param start The start of the region (in elements).
* \param extent The end of the region (in elements).
*/
TVM_DLL void VTAWriteBarrier(VTACommandHandle cmd, void* buffer, uint32_t elem_bits, uint32_t start,
uint32_t extent);
/*!
* \brief Perform a read barrier to a memory region visible to VTA.
* \param cmd The VTA command handle.
* \param buffer The head buffer pointer.
* \param elem_bits The unit bits of each elements.
* \param start The start of the region (in elements).
* \param extent The end of the region (in elements).
*/
TVM_DLL void VTAReadBarrier(VTACommandHandle cmd, void* buffer, uint32_t elem_bits, uint32_t start,
uint32_t extent);
/*!
* \brief Set debug mode on the command handle.
* \param cmd The VTA command handle.
* \param debug_flag The debug flag.
*/
TVM_DLL void VTASetDebugMode(VTACommandHandle cmd, int debug_flag);
/*!
* \brief Perform a 2D data load from DRAM.
* Sizes are measured in units of vector elements.
* \param cmd The VTA command handle.
* \param src_dram_addr Source DRAM address.
* \param src_elem_offset The source DRAM offset in number of unit elements.
* \param x_size The lowest dimension (x axis) size in number of unit elements.
* \param y_size The number of rows (y axis).
* \param x_stride The x axis stride.
* \param x_pad_before The start padding on x axis.
* \param y_pad_before The start padding on y axis.
* \param x_pad_after The end padding on x axis.
* \param y_pad_after The end padding of y axis.
* \param dst_sram_index Destination SRAM index.
* \param dst_memory_type Destination memory type.
*/
TVM_DLL void VTALoadBuffer2D(VTACommandHandle cmd, void* src_dram_addr, uint32_t src_elem_offset,
uint32_t x_size, uint32_t y_size, uint32_t x_stride,
uint32_t x_pad_before, uint32_t y_pad_before, uint32_t x_pad_after,
uint32_t y_pad_after, uint32_t dst_sram_index,
uint32_t dst_memory_type);
/*!
* \brief Perform a 2D data store into DRAM
* Sizes are measured in units of vector elements.
* \param cmd The VTA command handle.
* \param src_sram_index Source SRAM index.
* \param src_memory_type Source memory type.
* \param dst_dram_addr Destination DRAM address.
* \param dst_elem_offset The destination DRAM offset in number of unit elements.
* \param x_size The lowest dimension (x axis) size in number of unit elements.
* \param y_size The number of rows.
* \param x_stride The x axis stride.
*/
TVM_DLL void VTAStoreBuffer2D(VTACommandHandle cmd, uint32_t src_sram_index,
uint32_t src_memory_type, void* dst_dram_addr,
uint32_t dst_elem_offset, uint32_t x_size, uint32_t y_size,
uint32_t x_stride);
/*!
* \brief Push uop into kernel buffer.
* In GEMM mode, do a blocked GEMM with 2d access pattern.
* In ALU mode, do a vectorized ALU operation with 2d access pattern.
*
* \code
*
* DType accum[INP_BUFF_DEPTH][l][n];
* DType weight[WGT_BUFF_DEPTH][n][m];
* DType input[INP_BUFF_DEPTH][l][m];
* if reset_out == 1
* accum[dst_index] = 0
* elif mode == 0
* accum[dst_index] += GEMM(input[src_index], weight[wgt_index]);
* else
* if (use_imm)
* accum[dst_index] = opcode(accum[dst_index], imm_val);
* else
* accum[dst_index] = opcode(accum[dst_index], accum[src_index]);
*
* \endcode
*
* \param mode Set to GEMM mode if set to 0, ALU mode is set to 1.
* \param reset_out Resets the accum to 0.
* \param dst_index The accum memory index.
* \param src_index The input memory (gemm) / accum memory (alu) index.
* \param wgt_index The weight memory index.
* \param opcode The ALU opcode.
* \param use_imm Use immediate in ALU mode if set to true.
* \param imm_val Immediate value in ALU mode.
*/
TVM_DLL void VTAUopPush(uint32_t mode, uint32_t reset_out, uint32_t dst_index, uint32_t src_index,
uint32_t wgt_index, uint32_t opcode, uint32_t use_imm, int32_t imm_val);
/*!
* \brief Mark start of a micro op loop.
* \param extent The extent of the loop.
* \param dst_factor The accum factor.
* \param src_factor The input factor.
* \param wgt_factor The weight factor.
*/
TVM_DLL void VTAUopLoopBegin(uint32_t extent, uint32_t dst_factor, uint32_t src_factor,
uint32_t wgt_factor);
/*!
* \brief Mark end of a micro op loop.
*/
TVM_DLL void VTAUopLoopEnd();
/*!
* \brief Push GEMM uop kernel into the command handle.
* \param uop_handle The uop cache handle.
* \param finit The initalization function to initialize uop.
* \param signature The closure arguments of the finit.
* \param nbytes Number of bytes to in the closure arguments.
* \return 0 if success.
*/
TVM_DLL int VTAPushGEMMOp(void** uop_handle, int (*finit)(void*), void* signature, int nbytes);
/*!
* \brief Push ALU uop kernel into the command handle.
* \param uop_handle The uop cache handle.
* \param finit The initalization function to initialize uop.
* \param signature The closure arguments of the finit.
* \param nbytes Number of bytes to in the closure arguments.
* \return 0 if success.
*/
TVM_DLL int VTAPushALUOp(void** uop_handle, int (*finit)(void*), void* signature, int nbytes);
/*!
* \brief Push dependence token.
* \param cmd The VTA command handle.
* \param from_qid The source queue.
* \param to_qid The destination queue.
* \return 0 if success.
*/
TVM_DLL int VTADepPush(VTACommandHandle cmd, int from_qid, int to_qid);
/*!
* \brief Pop dependence signal.
* \param cmd The VTA command handle.
* \param from_qid The source queue.
* \param to_qid The destination queue.
* \return 0 if success.
*/
TVM_DLL int VTADepPop(VTACommandHandle cmd, int from_qid, int to_qid);
/*!
* \brief Synchronize the command handle.
* Commit all the instructions to VTA and wait until
* the accelerator finishes its job.
* Perform all of the out-of-order DRAM stores.
* \param cmd The VTA command handle.
* \param wait_cycles The limit of poll cycles.
*
*/
TVM_DLL void VTASynchronize(VTACommandHandle cmd, uint32_t wait_cycles);
#ifdef __cplusplus
}
#endif
#endif // VTA_RUNTIME_RUNTIME_H_
| https://github.com/zk-ml/tachikoma |
vta/scripts/tune_conv2d.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Tuning a single conv2d operator"""
from collections import namedtuple
import logging
import os
import tvm
from tvm import te
from tvm import autotvm
from tvm import topi
import vta
import vta.testing
env = vta.get_env()
Workload = namedtuple(
"Conv2DWorkload",
[
"batch",
"height",
"width",
"in_filter",
"out_filter",
"hkernel",
"wkernel",
"hpad",
"wpad",
"hstride",
"wstride",
],
)
resnet_wkls = [
# Workloads of resnet18 on imagenet
# ('resnet-18.C1', Workload(env.BATCH, 224, 224, 3, 64, 7, 7, 3, 3, 2, 2)),
("resnet-18.C2", Workload(env.BATCH, 56, 56, 64, 64, 3, 3, 1, 1, 1, 1)),
("resnet-18.C3", Workload(env.BATCH, 56, 56, 64, 128, 3, 3, 1, 1, 2, 2)),
("resnet-18.C4", Workload(env.BATCH, 56, 56, 64, 128, 1, 1, 0, 0, 2, 2)),
("resnet-18.C5", Workload(env.BATCH, 28, 28, 128, 128, 3, 3, 1, 1, 1, 1)),
("resnet-18.C6", Workload(env.BATCH, 28, 28, 128, 256, 3, 3, 1, 1, 2, 2)),
("resnet-18.C7", Workload(env.BATCH, 28, 28, 128, 256, 1, 1, 0, 0, 2, 2)),
("resnet-18.C8", Workload(env.BATCH, 14, 14, 256, 256, 3, 3, 1, 1, 1, 1)),
("resnet-18.C9", Workload(env.BATCH, 14, 14, 256, 512, 3, 3, 1, 1, 2, 2)),
("resnet-18.C10", Workload(env.BATCH, 14, 14, 256, 512, 1, 1, 0, 0, 2, 2)),
("resnet-18.C11", Workload(env.BATCH, 7, 7, 512, 512, 3, 3, 1, 1, 1, 1)),
]
@tvm.te.tag_scope(tag=topi.tag.ELEMWISE)
def my_clip(x, a_min, a_max):
"""Unlike topi's current clip, put min and max into two stages."""
const_min = tvm.tir.const(a_min, x.dtype)
const_max = tvm.tir.const(a_max, x.dtype)
x = te.compute(x.shape, lambda *i: tvm.te.min(x(*i), const_max), name="clipA")
x = te.compute(x.shape, lambda *i: tvm.te.max(x(*i), const_min), name="clipB")
return x
def conv2d(N, CI, H, W, CO, KH, KW, strides, padding, dilation):
data_shape = (N // env.BATCH, CI // env.BLOCK_IN, H, W, env.BATCH, env.BLOCK_IN)
kernel_shape = (CO // env.BLOCK_OUT, CI // env.BLOCK_IN, KH, KW, env.BLOCK_OUT, env.BLOCK_IN)
bias_shape = (N // env.BATCH, CO // env.BLOCK_OUT, 1, 1, env.BATCH, env.BLOCK_OUT)
data = te.placeholder(data_shape, name="data", dtype=env.inp_dtype)
kernel = te.placeholder(kernel_shape, name="kernel", dtype=env.wgt_dtype)
bias = te.placeholder(bias_shape, name="bias", dtype=env.acc_dtype)
with tvm.target.vta():
res = topi.nn.conv2d(
input=data,
filter=kernel,
padding=padding,
strides=strides,
dilation=dilation,
layout="NCHW%dn%dc" % (env.BATCH, env.BLOCK_IN),
out_dtype=env.acc_dtype,
)
res = topi.right_shift(res, env.WGT_WIDTH)
res = topi.add(res, bias)
res = my_clip(res, 0, (1 << env.OUT_WIDTH - 1) - 1)
res = topi.cast(res, env.out_dtype)
if tvm.target.Target.current().device_name == "vta":
s = topi.generic.schedule_conv2d_nchw([res])
else:
s = te.create_schedule([res.op])
return s, [data, kernel, bias, res]
if __name__ == "__main__":
# Logging config (for printing tuning log to the screen)
logging.basicConfig()
# logging.getLogger('autotvm').setLevel(logging.DEBUG)
# Tuning log files
log_file = "%s.conv2d.log" % (env.TARGET)
# create tmp log file
tmp_log_file = log_file + ".tmp"
if os.path.exists(log_file):
os.remove(log_file)
# Get tracker info from env
tracker_host = os.environ.get("TVM_TRACKER_HOST", None)
tracker_port = os.environ.get("TVM_TRACKER_PORT", None)
if not tracker_host or not tracker_port:
print("Set your AutoTVM tracker node host and port variables to run the autotuner")
exit()
for idx, (wl_name, wl) in enumerate(resnet_wkls):
prefix = "[Task %2d/%2d] " % (idx, len(resnet_wkls))
# Read in workload parameters
N = wl.batch
CI = wl.in_filter
H = wl.height
W = wl.width
CO = wl.out_filter
KH = wl.hkernel
KW = wl.wkernel
strides = (wl.hstride, wl.wstride)
padding = (wl.hpad, wl.wpad)
dilation = (1, 1)
# Create task
task = autotvm.task.create(
conv2d,
args=(N, CI, H, W, CO, KH, KW, strides, padding, dilation),
target=tvm.target.vta(),
target_host=env.target_host,
template_key="direct",
)
print(task.config_space)
# Tune
measure_option = autotvm.measure_option(
builder=autotvm.LocalBuilder(),
runner=autotvm.RPCRunner(
env.TARGET,
host=tracker_host,
port=int(tracker_port),
number=5,
timeout=60,
# check_correctness=True, # TODO: re-enable when check_correctness works again.
),
)
# Run Tuner
tuner = autotvm.tuner.RandomTuner(task)
tuner.tune(
n_trial=len(task.config_space),
early_stopping=None,
measure_option=measure_option,
callbacks=[
autotvm.callback.progress_bar(len(task.config_space), prefix=prefix),
autotvm.callback.log_to_file(tmp_log_file),
],
)
# Pick best records to a cache file
autotvm.record.pick_best(tmp_log_file, log_file)
os.remove(tmp_log_file)
| https://github.com/zk-ml/tachikoma |
vta/scripts/tune_conv2d_transpose.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Tuning a single conv2d transpose operator"""
from collections import namedtuple
import logging
import os
import tvm
from tvm import te
from tvm import autotvm
from tvm import topi
import vta
import vta.testing
# Get batch info from env
env = vta.get_env()
Workload = namedtuple(
"Conv2DTransposeWorkload",
[
"batch",
"height",
"width",
"in_filter",
"out_filter",
"hkernel",
"wkernel",
"hpad",
"wpad",
"hstride",
"wstride",
"o_hpad",
"o_wpad",
],
)
# DCGAN workloads
dcgan_wkls = [
# dcgan
("DCGAN.CT1", Workload(env.BATCH, 4, 4, 1024, 512, 4, 4, 1, 1, 2, 2, 0, 0)),
("DCGAN.CT2", Workload(env.BATCH, 8, 8, 512, 256, 4, 4, 1, 1, 2, 2, 0, 0)),
("DCGAN.CT3", Workload(env.BATCH, 16, 16, 256, 128, 4, 4, 1, 1, 2, 2, 0, 0)),
]
@tvm.te.tag_scope(tag=topi.tag.ELEMWISE)
def my_clip(x, a_min, a_max):
"""Unlike topi's current clip, put min and max into two stages."""
const_min = tvm.tir.const(a_min, x.dtype)
const_max = tvm.tir.const(a_max, x.dtype)
x = te.compute(x.shape, lambda *i: tvm.te.min(x(*i), const_max), name="clipA")
x = te.compute(x.shape, lambda *i: tvm.te.max(x(*i), const_min), name="clipB")
return x
def conv2d_transpose(N, CI, H, W, CO, KH, KW, strides, padding, opadding):
data_shape = (N // env.BATCH, CI // env.BLOCK_IN, H, W, env.BATCH, env.BLOCK_IN)
kernel_shape = (CO // env.BLOCK_OUT, CI // env.BLOCK_IN, KH, KW, env.BLOCK_OUT, env.BLOCK_IN)
data = te.placeholder(data_shape, name="data", dtype=env.inp_dtype)
kernel = te.placeholder(kernel_shape, name="kernel", dtype=env.wgt_dtype)
with tvm.target.vta():
res = topi.nn.conv2d_transpose_nchw(
Input=data,
Filter=kernel,
strides=strides,
padding=padding,
out_dtype=env.acc_dtype,
output_padding=opadding,
)
res = topi.right_shift(res, env.WGT_WIDTH)
res = my_clip(res, 0, (1 << env.OUT_WIDTH - 1) - 1)
res = topi.cast(res, env.out_dtype)
if tvm.target.Target.current().device_name == "vta":
s = topi.generic.schedule_conv2d_transpose_nchw([res])
else:
s = te.create_schedule([res.op])
return s, [data, kernel, res]
if __name__ == "__main__":
# Logging config (for printing tuning log to the screen)
logging.basicConfig()
# logging.getLogger('autotvm').setLevel(logging.DEBUG)
# Tuning log files
log_file = "%s.conv2d_transpose.log" % (env.TARGET)
# create tmp log file
tmp_log_file = log_file + ".tmp"
if os.path.exists(log_file):
os.remove(log_file)
# Get tracker info from env
tracker_host = os.environ.get("TVM_TRACKER_HOST", None)
tracker_port = os.environ.get("TVM_TRACKER_PORT", None)
if not tracker_host or not tracker_port:
print("Set your AutoTVM tracker node host and port variables to run the autotuner")
exit()
for idx, (wl_name, wl) in enumerate(dcgan_wkls):
prefix = "[Task %2d/%2d] " % (idx, len(dcgan_wkls))
# Read in workload parameters
N = wl.batch
H = wl.height
W = wl.width
CI = wl.in_filter
CO = wl.out_filter
KH = wl.hkernel
KW = wl.wkernel
strides = (wl.hstride, wl.wstride)
padding = (wl.hpad, wl.wpad)
opadding = (wl.o_hpad, wl.o_wpad)
# Create task
task = autotvm.task.create(
conv2d_transpose,
args=(N, CI, H, W, CO, KH, KW, strides, padding, opadding),
target=tvm.target.Target(tvm.target.vta(), host=env.target_host),
template_key="direct",
)
print(task.config_space)
# Tune
measure_option = autotvm.measure_option(
builder=autotvm.LocalBuilder(),
runner=autotvm.RPCRunner(
env.TARGET,
host=tracker_host,
port=int(tracker_port),
number=5,
timeout=60,
# check_correctness=True, # TODO: re-enable when check_correctness works again.
),
)
# Run Tuner
tuner = autotvm.tuner.RandomTuner(task)
tuner.tune(
n_trial=len(task.config_space),
early_stopping=None,
measure_option=measure_option,
callbacks=[
autotvm.callback.progress_bar(len(task.config_space), prefix=prefix),
autotvm.callback.log_to_file(tmp_log_file),
],
)
# Pick best records to a cache file
autotvm.record.pick_best(tmp_log_file, log_file)
os.remove(tmp_log_file)
| https://github.com/zk-ml/tachikoma |
vta/scripts/tune_dense.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Tuning a single dense operator"""
from collections import namedtuple
import logging
import os
import tvm
from tvm import te
from tvm import autotvm
from tvm import topi
import vta
import vta.testing
env = vta.get_env()
Workload = namedtuple("DenseWorkload", ["batch", "in_filter", "out_filter"])
dense_wkls = [
("lstm.dense.1", Workload(1, 256, 128)),
("lstm.dense.4", Workload(4, 256, 128)),
]
@tvm.te.tag_scope(tag=topi.tag.ELEMWISE)
def my_clip(x, a_min, a_max):
"""Unlike topi's current clip, put min and max into two stages."""
const_min = tvm.tir.const(a_min, x.dtype)
const_max = tvm.tir.const(a_max, x.dtype)
x = te.compute(x.shape, lambda *i: tvm.te.min(x(*i), const_max), name="clipA")
x = te.compute(x.shape, lambda *i: tvm.te.max(x(*i), const_min), name="clipB")
return x
def dense(N, CI, CO):
data_shape = (N // env.BATCH, CI // env.BLOCK_IN, env.BATCH, env.BLOCK_IN)
kernel_shape = (CO // env.BLOCK_OUT, CI // env.BLOCK_IN, env.BLOCK_OUT, env.BLOCK_IN)
data = te.placeholder(data_shape, name="data", dtype=env.inp_dtype)
kernel = te.placeholder(kernel_shape, name="kernel", dtype=env.wgt_dtype)
with tvm.target.vta():
res = topi.nn.dense(data, kernel, None, "int32")
res = topi.right_shift(res, 8)
res = my_clip(res, 0, 127)
res = topi.cast(res, "int8")
if tvm.target.Target.current().device_name == "vta":
s = topi.generic.schedule_dense([res])
else:
s = te.create_schedule([res.op])
return s, [data, kernel, res]
if __name__ == "__main__":
# Logging config (for printing tuning log to the screen)
logging.basicConfig()
# logging.getLogger('autotvm').setLevel(logging.DEBUG)
# Tuning log files
log_file = "%s.dense.log" % (env.TARGET)
# create tmp log file
tmp_log_file = log_file + ".tmp"
if os.path.exists(log_file):
os.remove(log_file)
# Get tracker info from env
tracket_host = os.environ.get("TVM_TRACKER_HOST", None)
tracket_port = os.environ.get("TVM_TRACKER_PORT", None)
if not tracket_host or not tracket_port:
print("Set your AutoTVM tracker node host and port variables to run the autotuner")
exit()
for idx, (wl_name, wl) in enumerate(dense_wkls):
prefix = "[Task %2d/%2d] " % (idx, len(dense_wkls))
# Workload parameters
N = wl.batch
CI = wl.in_filter
CO = wl.out_filter
task = autotvm.task.create(
dense,
args=(N, CI, CO),
target=tvm.target.vta(),
target_host=env.target_host,
template_key="direct",
)
print(task.config_space)
# Tune
measure_option = autotvm.measure_option(
builder=autotvm.LocalBuilder(),
runner=autotvm.RPCRunner(
env.TARGET,
host=tracket_host,
port=int(tracket_port),
number=5,
timeout=60,
# check_correctness=True, # TODO: re-enable when check_correctness works again.
),
)
# Run Tuner
tuner = autotvm.tuner.RandomTuner(task)
tuner.tune(
n_trial=len(task.config_space),
early_stopping=None,
measure_option=measure_option,
callbacks=[
autotvm.callback.progress_bar(len(task.config_space), prefix=prefix),
autotvm.callback.log_to_file(tmp_log_file),
],
)
# Pick best records to a cache file
autotvm.record.pick_best(tmp_log_file, log_file)
os.remove(tmp_log_file)
| https://github.com/zk-ml/tachikoma |
vta/scripts/tune_group_conv2d.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Tuning a single group conv2d operator"""
from collections import namedtuple
import logging
import os
import tvm
from tvm import te
from tvm import autotvm
from tvm import topi
import vta
import vta.testing
env = vta.get_env()
Workload = namedtuple(
"GroupConv2DWorkload",
[
"batch",
"height",
"width",
"in_filter",
"out_filter",
"groups",
"hkernel",
"wkernel",
"hpad",
"wpad",
"hstride",
"wstride",
],
)
# Mobilenet (grouped variant) workloads
mobilenet_wkls = [
("mobilenet.D1", Workload(env.BATCH, 112, 112, 32, 32, 2, 3, 3, 1, 1, 1, 1)),
("mobilenet.D2", Workload(env.BATCH, 112, 112, 64, 64, 4, 3, 3, 1, 1, 2, 2)),
("mobilenet.D3", Workload(env.BATCH, 56, 56, 128, 128, 8, 3, 3, 1, 1, 1, 1)),
("mobilenet.D4", Workload(env.BATCH, 56, 56, 128, 128, 8, 3, 3, 1, 1, 2, 2)),
("mobilenet.D5", Workload(env.BATCH, 28, 28, 256, 256, 16, 3, 3, 1, 1, 1, 1)),
("mobilenet.D6", Workload(env.BATCH, 28, 28, 256, 256, 16, 3, 3, 1, 1, 2, 2)),
("mobilenet.D7", Workload(env.BATCH, 14, 14, 512, 512, 32, 3, 3, 1, 1, 1, 1)),
("mobilenet.D8", Workload(env.BATCH, 14, 14, 512, 512, 32, 3, 3, 1, 1, 2, 2)),
("mobilenet.D9", Workload(env.BATCH, 7, 7, 1024, 1024, 64, 3, 3, 1, 1, 1, 1)),
]
@tvm.te.tag_scope(tag=topi.tag.ELEMWISE)
def my_clip(x, a_min, a_max):
"""Unlike topi's current clip, put min and max into two stages."""
const_min = tvm.tir.const(a_min, x.dtype)
const_max = tvm.tir.const(a_max, x.dtype)
x = te.compute(x.shape, lambda *i: tvm.te.min(x(*i), const_max), name="clipA")
x = te.compute(x.shape, lambda *i: tvm.te.max(x(*i), const_min), name="clipB")
return x
def group_conv2d(N, CI, H, W, CO, KH, KW, strides, padding, dilation, group):
CI_G = CI // groups
data_shape = (N // env.BATCH, CI // env.BLOCK_IN, H, W, env.BATCH, env.BLOCK_IN)
kernel_shape = (CO // env.BLOCK_OUT, CI_G // env.BLOCK_IN, KH, KW, env.BLOCK_OUT, env.BLOCK_IN)
bias_shape = (N // env.BATCH, CO // env.BLOCK_OUT, 1, 1, env.BATCH, env.BLOCK_OUT)
data = te.placeholder(data_shape, name="data", dtype=env.inp_dtype)
kernel = te.placeholder(kernel_shape, name="kernel", dtype=env.wgt_dtype)
bias = te.placeholder(bias_shape, name="bias", dtype=env.acc_dtype)
with tvm.target.vta():
res = topi.nn.group_conv2d_nchw(
data, kernel, strides, padding, dilation, groups, env.acc_dtype
)
res = topi.right_shift(res, env.WGT_WIDTH)
res = topi.add(res, bias)
res = my_clip(res, 0, (1 << env.OUT_WIDTH - 1) - 1)
res = topi.cast(res, env.out_dtype)
if tvm.target.Target.current().device_name == "vta":
s = topi.generic.schedule_group_conv2d_nchw([res])
else:
s = te.create_schedule([res.op])
return s, [data, kernel, bias, res]
if __name__ == "__main__":
# Logging config (for printing tuning log to the screen)
logging.basicConfig()
# Tuning log files
log_file = "%s.group_conv2d.log" % (env.TARGET)
# create tmp log file
tmp_log_file = log_file + ".tmp"
if os.path.exists(log_file):
os.remove(log_file)
# Get tracker info from env
tracker_host = os.environ.get("TVM_TRACKER_HOST", None)
tracker_port = os.environ.get("TVM_TRACKER_PORT", None)
if not tracker_host or not tracker_port:
print("Set your AutoTVM tracker node host and port variables to run the autotuner")
exit()
for idx, (wl_name, wl) in enumerate(mobilenet_wkls):
prefix = "[Task %2d/%2d] " % (idx, len(mobilenet_wkls))
# Read in workload parameters
N = wl.batch
CI = wl.in_filter
H = wl.height
W = wl.width
CO = wl.out_filter
KH = wl.hkernel
KW = wl.wkernel
strides = (wl.hstride, wl.wstride)
padding = (wl.hpad, wl.wpad)
dilation = (1, 1)
groups = wl.groups
# Create task
task = autotvm.task.create(
group_conv2d,
args=(N, CI, H, W, CO, KH, KW, strides, padding, dilation, groups),
target=tvm.target.vta(),
target_host=env.target_host,
template_key="direct",
)
print(task.config_space)
# Tune
measure_option = autotvm.measure_option(
builder=autotvm.LocalBuilder(),
runner=autotvm.RPCRunner(
env.TARGET,
host=tracker_host,
port=int(tracker_port),
number=5,
timeout=60,
# check_correctness=True, # TODO: re-enable when check_correctness works again.
),
)
# Run Tuner
tuner = autotvm.tuner.RandomTuner(task)
tuner.tune(
n_trial=len(task.config_space),
early_stopping=None,
measure_option=measure_option,
callbacks=[
autotvm.callback.progress_bar(len(task.config_space), prefix=prefix),
autotvm.callback.log_to_file(tmp_log_file),
],
)
# Pick best records to a cache file
autotvm.record.pick_best(tmp_log_file, log_file)
os.remove(tmp_log_file)
| https://github.com/zk-ml/tachikoma |
vta/scripts/tune_resnet.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Perform ResNet autoTVM tuning on VTA using Relay."""
import argparse, os, time
from mxnet.gluon.model_zoo import vision
import numpy as np
from PIL import Image
from tvm import topi
import tvm
from tvm import te
from tvm import rpc, autotvm, relay
from tvm.autotvm.measure.measure_methods import request_remote
from tvm.autotvm.tuner import XGBTuner, GATuner, RandomTuner, GridSearchTuner
from tvm.contrib import graph_executor, utils, download
from tvm.contrib.debugger import debug_executor
import vta
from vta.testing import simulator
from vta.top import graph_pack
from tvm.autotvm.task import extract_from_program
def parse_arguments():
parser = argparse.ArgumentParser(description="Train a model for image classification.")
parser.add_argument(
"--model",
type=str,
default="resnet18_v1",
choices=["resnet18_v1"],
help="Input model name.",
)
parser.add_argument(
"--start-name",
type=str,
default="nn.max_pool2d",
help="The name of the node where packing starts",
)
parser.add_argument(
"--stop-name",
type=str,
default="nn.global_avg_pool2d",
help="The name of the node where packing stops",
)
parser.add_argument(
"--debug-profile", action="store_true", help="Show layer-wise time cost profiling results"
)
parser.add_argument(
"--device", default="vta", choices=["vta", "arm_cpu"], help="Select device target"
)
parser.add_argument(
"--measurements", type=int, default=1, help="Number of measurements during AutoTVM search"
)
parser.add_argument("--tuner", type=str, default="random", help="AutoTVM search strategy")
parser.add_argument(
"--log-filename", type=str, default="resnet-18.log", help="AutoTVM log file name"
)
return parser.parse_args()
def register_vta_tuning_tasks():
from tvm.autotvm.task.topi_integration import TaskExtractEnv, deserialize_args
@tvm.te.tag_scope(tag=topi.tag.ELEMWISE)
def my_clip(x, a_min, a_max):
"""Unlike topi's current clip, put min and max into two stages."""
const_min = tvm.tir.const(a_min, x.dtype)
const_max = tvm.tir.const(a_max, x.dtype)
x = te.compute(x.shape, lambda *i: tvm.te.min(x(*i), const_max), name="clipA")
x = te.compute(x.shape, lambda *i: tvm.te.max(x(*i), const_min), name="clipB")
return x
# init autotvm env to register VTA operator
TaskExtractEnv()
@autotvm.task.register("topi_nn_conv2d", override=True)
def _topi_nn_conv2d(*args, **kwargs):
assert not kwargs, "Do not support kwargs in template function call"
args = deserialize_args(args)
A, W = args[:2]
with tvm.target.vta():
res = topi.nn.conv2d(*args, **kwargs)
res = topi.right_shift(res, 8)
res = my_clip(res, 0, 127)
res = topi.cast(res, "int8")
if tvm.target.Target.current().device_name == "vta":
s = topi.generic.schedule_conv2d_nchw([res])
else:
s = te.create_schedule([res.op])
return s, [A, W, res]
@autotvm.task.register("topi_nn_dense", override=True)
def _topi_nn_dense(*args, **kwargs):
assert not kwargs, "Do not support kwargs in template function call"
args = deserialize_args(args)
A, W = args[:2]
with tvm.target.vta():
res = topi.nn.dense(*args, **kwargs)
res = topi.right_shift(res, 8)
res = my_clip(res, 0, 127)
res = topi.cast(res, "int8")
if tvm.target.Target.current().device_name == "vta":
s = topi.generic.schedule_dense([res])
else:
s = te.create_schedule([res.op])
return s, [A, W, res]
def compile_network(opt, env, target):
# Populate the shape and data type dictionary
dtype_dict = {"data": "float32"}
shape_dict = {"data": (env.BATCH, 3, 224, 224)}
# Get off the shelf gluon model, and convert to relay
gluon_model = vision.get_model(opt.model, pretrained=True)
mod, params = relay.frontend.from_mxnet(gluon_model, shape_dict)
# Update shape and type dictionary
shape_dict.update({k: v.shape for k, v in params.items()})
dtype_dict.update({k: str(v.dtype) for k, v in params.items()})
# Perform quantization in Relay
# Note: We set opt_level to 3 in order to fold batch norm
with tvm.transform.PassContext(opt_level=3):
with relay.quantize.qconfig(global_scale=8.0, skip_conv_layers=[0]):
relay_prog = relay.quantize.quantize(mod["main"], params=params)
# Perform graph packing and constant folding for VTA target
if target.device_name == "vta":
assert env.BLOCK_IN == env.BLOCK_OUT
relay_prog = graph_pack(
relay_prog,
env.BATCH,
env.BLOCK_OUT,
env.WGT_WIDTH,
start_name=opt.start_name,
stop_name=opt.stop_name,
)
return relay_prog, params
def tune_tasks(
tasks,
measure_option,
tuner="xgb",
n_trial=1000,
early_stopping=None,
log_filename="tuning.log",
use_transfer_learning=True,
try_winograd=True,
):
# create tmp log file
tmp_log_file = log_filename + ".tmp"
if os.path.exists(tmp_log_file):
os.remove(tmp_log_file)
for i, tsk in enumerate(reversed(tasks)):
prefix = "[Task %2d/%2d] " % (i + 1, len(tasks))
# create tuner
if tuner == "xgb" or tuner == "xgb-rank":
tuner_obj = XGBTuner(tsk, loss_type="rank")
elif tuner == "ga":
tuner_obj = GATuner(tsk, pop_size=50)
elif tuner == "random":
tuner_obj = RandomTuner(tsk)
elif tuner == "gridsearch":
tuner_obj = GridSearchTuner(tsk)
else:
raise ValueError("Invalid tuner: " + tuner)
if use_transfer_learning:
if os.path.isfile(tmp_log_file):
tuner_obj.load_history(autotvm.record.load_from_file(tmp_log_file))
# do tuning
n_trial_ = min(n_trial, len(tsk.config_space))
tuner_obj.tune(
n_trial_,
early_stopping=early_stopping,
measure_option=measure_option,
callbacks=[
autotvm.callback.progress_bar(n_trial_, prefix=prefix),
autotvm.callback.log_to_file(tmp_log_file),
],
)
# pick best records to a cache file
autotvm.record.pick_best(tmp_log_file, log_filename)
os.remove(tmp_log_file)
if __name__ == "__main__":
opt = parse_arguments()
# Make sure that TVM was compiled with RPC=1
assert tvm.runtime.enabled("rpc")
# Read in VTA environment
env = vta.get_env()
# Get remote from fleet node
tracker_host = os.environ.get("TVM_TRACKER_HOST", None)
tracker_port = os.environ.get("TVM_TRACKER_PORT", None)
if not tracker_host or not tracker_port:
print("Set your AutoTVM tracker node host and port variables to run the autotuner")
exit()
# Get remote
if env.TARGET != "sim":
# Measure build start time
reconfig_start = time.time()
# Get remote from fleet node
remote = autotvm.measure.request_remote(
env.TARGET, tracker_host, int(tracker_port), timeout=10000
)
# Reconfigure the JIT runtime and FPGA.
# You can program the FPGA with your own custom bitstream
# by passing the path to the bitstream file instead of None.
vta.reconfig_runtime(remote)
vta.program_fpga(remote, bitstream=None)
# Report on reconfiguration time
reconfig_time = time.time() - reconfig_start
print("Reconfigured FPGA and RPC runtime in {0:.2f}s!".format(reconfig_time))
# In simulation mode, host the RPC server locally.
else:
remote = rpc.LocalSession()
# VTA target and execution context
target = env.target if opt.device == "vta" else env.target_vta_cpu
ctx = remote.ext_dev(0) if opt.device == "vta" else remote.cpu(0)
# Compile Relay program
print("Initial compile...")
relay_prog, params = compile_network(opt, env, target)
# Register VTA tuning tasks
register_vta_tuning_tasks()
# Perform task extraction on Relay program
print("Extracting tasks...")
tasks = extract_from_program(
func=relay_prog,
params=params,
ops=(relay.op.get("nn.conv2d"),),
target=tvm.target.Target(target, host=env.target_host),
)
# Perform Autotuning
print("Tuning...")
tuning_opt = {
"log_filename": opt.log_filename,
"tuner": opt.tuner,
"n_trial": 1e9,
"early_stopping": None,
"measure_option": autotvm.measure_option(
builder=autotvm.LocalBuilder(build_func=vta.vta_autotvm_build_func),
runner=autotvm.RPCRunner(
env.TARGET,
tracker_host,
tracker_port,
number=4,
min_repeat_ms=150,
repeat=opt.measurements,
timeout=60,
# check_correctness=True, # TODO: re-enable when check_correctness works again.
),
),
}
tune_tasks(tasks, **tuning_opt)
# Compile kernels with history best records
with autotvm.tophub.context(target, extra_files=[opt.log_filename]):
# Compile network
print("Compiling network with best tuning parameters...")
if target.device_name != "vta":
with tvm.transform.PassContext(opt_level=3, disabled_pass={"AlterOpLayout"}):
graph, lib, params = relay.build(
relay_prog,
target=tvm.target.Target(target, host=env.target_host),
params=params,
)
else:
with vta.build_config(opt_level=3, disabled_pass={"AlterOpLayout"}):
graph, lib, params = relay.build(
relay_prog,
target=tvm.target.Target(target, host=env.target_host),
params=params,
)
# Export library
temp = utils.tempdir()
lib.save(temp.relpath("graphlib.o"))
remote.upload(temp.relpath("graphlib.o"))
lib = remote.load_module("graphlib.o")
# If detailed runtime info is needed build with debug runtime
if opt.debug_profile:
m = debug_executor.create(graph, lib, ctx)
else:
m = graph_executor.create(graph, lib, ctx)
# Set the network parameters and synthetic input
image = tvm.nd.array((np.random.uniform(size=(1, 3, 224, 224))).astype("float32"))
m.set_input(**params)
m.set_input("data", image)
# Perform inference
timer = m.module.time_evaluator("run", ctx, number=4, repeat=opt.measurements)
tcost = timer()
prof_res = np.array(tcost.results) * 1000 # convert to millisecond
print(
"Mean inference time (std dev): %.2f ms (%.2f ms)"
% (np.mean(prof_res), np.std(prof_res))
)
# Display profile information
if opt.debug_profile:
m.run()
| https://github.com/zk-ml/tachikoma |
vta/tests/python/de10nano/test_program_rpc.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys, os
import tvm
from tvm import rpc
from vta import get_bitstream_path, download_bitstream, program_fpga, reconfig_runtime
host = os.environ.get("VTA_RPC_HOST", "de10nano")
port = int(os.environ.get("VTA_RPC_PORT", "9091"))
def program_rpc_bitstream(path=None):
"""Program the FPGA on the RPC server
Parameters
----------
path : path to bitstream (optional)
"""
assert tvm.runtime.enabled("rpc")
remote = rpc.connect(host, port)
program_fpga(remote, path)
def reconfig_rpc_runtime():
"""Reconfig the RPC server runtime"""
assert tvm.runtime.enabled("rpc")
remote = rpc.connect(host, port)
reconfig_runtime(remote)
bitstream = sys.argv[1] if len(sys.argv) == 2 else None
program_rpc_bitstream(bitstream)
reconfig_rpc_runtime()
| https://github.com/zk-ml/tachikoma |
vta/tests/python/integration/test_benchmark_gemm.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
import tvm.testing
from tvm import te
import numpy as np
from tvm.contrib import utils
import vta.testing
from vta.testing import simulator
def test_gemm():
def run_gemm_packed(env, remote, batch_size, channel, block):
data_shape = (batch_size // env.BATCH, channel // env.BLOCK_IN, env.BATCH, env.BLOCK_IN)
weight_shape = (
channel // env.BLOCK_OUT,
channel // env.BLOCK_IN,
env.BLOCK_OUT,
env.BLOCK_IN,
)
res_shape = (batch_size // env.BATCH, channel // env.BLOCK_OUT, env.BATCH, env.BLOCK_OUT)
# To compute number of ops, use a x2 factor for FMA
num_ops = 2 * channel * channel * batch_size
ko = te.reduce_axis((0, channel // env.BLOCK_IN), name="ko")
ki = te.reduce_axis((0, env.BLOCK_IN), name="ki")
data = te.placeholder(data_shape, name="data", dtype=env.inp_dtype)
weight = te.placeholder(weight_shape, name="weight", dtype=env.wgt_dtype)
data_buf = te.compute(data_shape, lambda *i: data(*i), "data_buf")
weight_buf = te.compute(weight_shape, lambda *i: weight(*i), "weight_buf")
res_gem = te.compute(
res_shape,
lambda bo, co, bi, ci: te.sum(
data_buf[bo, ko, bi, ki].astype(env.acc_dtype)
* weight_buf[co, ko, ci, ki].astype(env.acc_dtype),
axis=[ko, ki],
),
name="res_gem",
)
res_shf = te.compute(res_shape, lambda *i: res_gem(*i) >> 8, name="res_shf")
res_max = te.compute(res_shape, lambda *i: tvm.te.max(res_shf(*i), 0), "res_max") # relu
res_min = te.compute(
res_shape, lambda *i: tvm.te.min(res_max(*i), (1 << (env.INP_WIDTH - 1)) - 1), "res_min"
) # relu
res = te.compute(res_shape, lambda *i: res_min(*i).astype(env.inp_dtype), name="res")
def verify(s):
mod = vta.build(
s,
[data, weight, res],
tvm.target.Target("ext_dev", host=env.target_host),
name="gemm",
)
temp = utils.tempdir()
mod.save(temp.relpath("gemm.o"))
remote.upload(temp.relpath("gemm.o"))
f = remote.load_module("gemm.o")
# verify
dev = remote.ext_dev(0)
# Data in original format
data_orig = np.random.randint(-128, 128, size=(batch_size, channel)).astype(data.dtype)
weight_orig = np.random.randint(-128, 128, size=(channel, channel)).astype(weight.dtype)
data_packed = data_orig.reshape(
batch_size // env.BATCH, env.BATCH, channel // env.BLOCK_IN, env.BLOCK_IN
).transpose((0, 2, 1, 3))
weight_packed = weight_orig.reshape(
channel // env.BLOCK_OUT, env.BLOCK_OUT, channel // env.BLOCK_IN, env.BLOCK_IN
).transpose((0, 2, 1, 3))
res_np = np.zeros(res_shape).astype(res.dtype)
data_arr = tvm.nd.array(data_packed, dev)
weight_arr = tvm.nd.array(weight_packed, dev)
res_arr = tvm.nd.array(res_np, dev)
res_ref = np.zeros(res_shape).astype(env.acc_dtype)
for b in range(batch_size // env.BATCH):
for i in range(channel // env.BLOCK_OUT):
for j in range(channel // env.BLOCK_IN):
res_ref[b, i, :] += np.dot(
data_packed[b, j, :].astype(env.acc_dtype),
weight_packed[i, j].T.astype(env.acc_dtype),
)
res_ref = np.right_shift(res_ref, 8)
res_ref = np.clip(res_ref, 0, (1 << (env.INP_WIDTH - 1)) - 1).astype(res.dtype)
time_f = f.time_evaluator("gemm", dev, number=20)
if env.TARGET in ["sim", "tsim"]:
simulator.clear_stats()
cost = time_f(data_arr, weight_arr, res_arr)
if env.TARGET in ["sim", "tsim"]:
stats = simulator.stats()
print("Execution statistics:")
for k, v in stats.items():
print("\t{:<16}: {:>16}".format(k, v))
res_unpack = res_arr.numpy().reshape(
batch_size // env.BATCH, channel // env.BLOCK_OUT, env.BATCH, env.BLOCK_OUT
)
return cost
def run_schedule(load_inp, load_wgt, gemm, alu, store_out, print_ir):
s = te.create_schedule(res.op)
s[data_buf].set_scope(env.inp_scope)
s[weight_buf].set_scope(env.wgt_scope)
s[res_gem].set_scope(env.acc_scope)
s[res_shf].set_scope(env.acc_scope)
s[res_min].set_scope(env.acc_scope)
s[res_max].set_scope(env.acc_scope)
if block:
bblock = block // env.BATCH
iblock = block // env.BLOCK_IN
oblock = block // env.BLOCK_OUT
xbo, xco, xbi, xci = s[res].op.axis
xb1, xco1, xb2, xco2 = s[res].tile(xbo, xco, bblock, oblock)
store_pt = xb2
s[res_gem].compute_at(s[res], xco1)
s[res_shf].compute_at(s[res], xco1)
s[res_min].compute_at(s[res], xco1)
s[res_max].compute_at(s[res], xco1)
xbo, xco, xbi, xci = s[res_gem].op.axis
# Compute one line at a time
ko1, ko2 = s[res_gem].split(ko, iblock)
s[res_gem].reorder(ko1, ko2, xbo, xco, xbi, xci, ki)
s[data_buf].compute_at(s[res_gem], ko1)
s[weight_buf].compute_at(s[res_gem], ko1)
# Use VTA instructions
s[data_buf].pragma(s[data_buf].op.axis[0], load_inp)
s[weight_buf].pragma(s[weight_buf].op.axis[0], load_wgt)
s[res_gem].tensorize(xbi, gemm)
s[res_shf].pragma(s[res_shf].op.axis[0], alu)
s[res_min].pragma(s[res_min].op.axis[0], alu)
s[res_max].pragma(s[res_max].op.axis[0], alu)
s[res].pragma(store_pt, store_out)
else:
xbo, xco, xbi, xci = s[res_gem].op.axis
s[res_gem].reorder(ko, xbo, xco, xbi, xci, ki)
# Use VTA instructions
s[data_buf].pragma(s[data_buf].op.axis[0], load_inp)
s[weight_buf].pragma(s[weight_buf].op.axis[0], load_wgt)
s[res_gem].tensorize(xbi, gemm)
s[res_shf].pragma(s[res_shf].op.axis[0], alu)
s[res_min].pragma(s[res_min].op.axis[0], alu)
s[res_max].pragma(s[res_max].op.axis[0], alu)
s[res].pragma(s[res].op.axis[0], store_out)
if print_ir:
print(tvm.lower(s, [data, weight, res], simple_mode=True))
return verify(s)
def gemm_normal(print_ir):
mock = env.mock
print("----- GEMM GOPS End-to-End Test-------")
def run_test(header, print_ir):
cost = run_schedule(
env.dma_copy,
env.dma_copy,
env.gemm,
env.alu,
env.dma_copy,
print_ir,
)
gops = (num_ops / cost.mean) / float(10**9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS" % (cost.mean, gops))
with vta.build_config():
run_test("NORMAL", print_ir)
def gemm_unittest(print_ir):
mock = env.mock
print("----- GEMM Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
mock.dma_copy, mock.dma_copy, env.gemm, mock.alu, mock.dma_copy, print_ir
)
gops = (num_ops / cost.mean) / float(10**9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS" % (cost.mean, gops))
with vta.build_config():
run_test("NORMAL", print_ir)
def alu_unittest(print_ir):
mock = env.mock
print("----- ALU Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
mock.dma_copy, mock.dma_copy, mock.gemm, env.alu, mock.dma_copy, print_ir
)
gops = (num_ops / cost.mean) / float(10**9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS" % (cost.mean, gops))
with vta.build_config():
run_test("NORMAL", print_ir)
print("")
def load_inp_unittest(print_ir):
mock = env.mock
print("----- LoadInp Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
env.dma_copy, mock.dma_copy, mock.gemm, mock.alu, mock.dma_copy, print_ir
)
gops = (num_ops / cost.mean) / float(10**9)
bandwith = (batch_size * channel * env.INP_WIDTH / cost.mean) / float(10**9)
print(header)
print(
"\tTime cost = %g sec/op, %g GOPS, bandwidth=%g Gbits"
% (cost.mean, gops, bandwith)
)
with vta.build_config():
run_test("NORMAL", print_ir)
print("")
def load_wgt_unittest(print_ir):
mock = env.mock
print("----- LoadWgt Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
mock.dma_copy, env.dma_copy, mock.gemm, mock.alu, mock.dma_copy, print_ir
)
gops = (num_ops / cost.mean) / float(10**9)
bandwith = (channel * channel * env.WGT_WIDTH / cost.mean) / float(10**9)
print(header)
print(
"\tTime cost = %g sec/op, %g GOPS, bandwidth=%g Gbits"
% (cost.mean, gops, bandwith)
)
with vta.build_config():
run_test("NORMAL", print_ir)
print("")
def store_out_unittest(print_ir):
mock = env.mock
print("----- StoreOut Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
mock.dma_copy, mock.dma_copy, mock.gemm, mock.alu, env.dma_copy, print_ir
)
gops = (num_ops / cost.mean) / float(10**9)
bandwith = (batch_size * channel * env.OUT_WIDTH / cost.mean) / float(10**9)
print(header)
print(
"\tTime cost = %g sec/op, %g GOPS, bandwidth=%g Gbits"
% (cost.mean, gops, bandwith)
)
with vta.build_config():
run_test("NORMAL", print_ir)
print("")
gemm_normal(False)
gemm_unittest(False)
alu_unittest(False)
def _run(env, remote):
print("========GEMM 128=========")
run_gemm_packed(env, remote, 128, 128, 128)
vta.testing.run(_run)
if __name__ == "__main__":
test_gemm()
| https://github.com/zk-ml/tachikoma |
vta/tests/python/integration/test_benchmark_topi_conv2d.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Testing topi conv2d operator for VTA"""
import json
import os
import pytest
import numpy as np
from collections import namedtuple
import tvm
from tvm import te
from tvm import relay
from tvm import autotvm
from tvm.contrib import utils
from tvm.contrib.pickle_memoize import memoize
from tvm import topi
import tvm.topi.testing
import vta
from vta import program_fpga, reconfig_runtime
import vta.testing
from vta.testing import simulator
Workload = namedtuple(
"Conv2DWorkload",
[
"batch",
"height",
"width",
"in_filter",
"out_filter",
"hkernel",
"wkernel",
"hpad",
"wpad",
"hstride",
"wstride",
],
)
# Get batch info from env
env = vta.get_env()
# ResNet18 workloads
resnet_wkls = [
# Workloads of resnet18 on imagenet
# ('resnet-18.C1', Workload(env.BATCH, 224, 224, 3, 64, 7, 7, 3, 3, 2, 2)),
("resnet-18.C2", Workload(env.BATCH, 56, 56, 64, 64, 3, 3, 1, 1, 1, 1)),
("resnet-18.C3", Workload(env.BATCH, 56, 56, 64, 128, 3, 3, 1, 1, 2, 2)),
("resnet-18.C4", Workload(env.BATCH, 56, 56, 64, 128, 1, 1, 0, 0, 2, 2)),
("resnet-18.C5", Workload(env.BATCH, 28, 28, 128, 128, 3, 3, 1, 1, 1, 1)),
("resnet-18.C6", Workload(env.BATCH, 28, 28, 128, 256, 3, 3, 1, 1, 2, 2)),
("resnet-18.C7", Workload(env.BATCH, 28, 28, 128, 256, 1, 1, 0, 0, 2, 2)),
("resnet-18.C8", Workload(env.BATCH, 14, 14, 256, 256, 3, 3, 1, 1, 1, 1)),
("resnet-18.C9", Workload(env.BATCH, 14, 14, 256, 512, 3, 3, 1, 1, 2, 2)),
("resnet-18.C10", Workload(env.BATCH, 14, 14, 256, 512, 1, 1, 0, 0, 2, 2)),
("resnet-18.C11", Workload(env.BATCH, 7, 7, 512, 512, 3, 3, 1, 1, 1, 1)),
]
# FIXME: we need a custom clip operator to circumvent a pattern detection limitation
@tvm.te.tag_scope(tag=topi.tag.ELEMWISE)
def my_clip(x, a_min, a_max):
"""Unlike topi's current clip, put min and max into two stages."""
const_min = tvm.tir.const(a_min, x.dtype)
const_max = tvm.tir.const(a_max, x.dtype)
x = te.compute(x.shape, lambda *i: tvm.te.min(x(*i), const_max), name="clipA")
x = te.compute(x.shape, lambda *i: tvm.te.max(x(*i), const_min), name="clipB")
return x
def run_conv2d(env, remote, wl, target, check_correctness=True, print_ir=False, samples=4):
# Workload assertions
assert wl.hpad == wl.wpad
# Perform packing only if we are targeting the accelerator
if "arm_cpu" in target.keys:
data_pack = False
layout = "NCHW"
conv2d_fcompute = topi.arm_cpu.conv2d_nchw_spatial_pack
conv2d_fschedule = topi.arm_cpu.schedule_conv2d_nchw_spatial_pack
elif "vta" in target.keys:
data_pack = True
layout = "NCHW%dn%dc" % (env.BATCH, env.BLOCK_IN)
conv2d_fcompute = vta.top.conv2d_packed
conv2d_fschedule = vta.top.schedule_conv2d_packed
# Derive shapes depending upon packing
a_shape = (wl.batch, wl.in_filter, wl.height, wl.width)
w_shape = (wl.out_filter, wl.in_filter, wl.hkernel, wl.wkernel)
b_shape = (wl.batch, wl.out_filter, 1, 1)
if data_pack:
data_shape = (
wl.batch // env.BATCH,
wl.in_filter // env.BLOCK_IN,
wl.height,
wl.width,
env.BATCH,
env.BLOCK_IN,
)
kernel_shape = (
wl.out_filter // env.BLOCK_OUT,
wl.in_filter // env.BLOCK_IN,
wl.hkernel,
wl.wkernel,
env.BLOCK_OUT,
env.BLOCK_IN,
)
bias_shape = (
wl.batch // env.BATCH,
wl.out_filter // env.BLOCK_OUT,
1,
1,
env.BATCH,
env.BLOCK_OUT,
)
else:
data_shape = a_shape
kernel_shape = w_shape
bias_shape = b_shape
data = te.placeholder(data_shape, name="data", dtype=env.inp_dtype)
kernel = te.placeholder(kernel_shape, name="kernel", dtype=env.wgt_dtype)
bias = te.placeholder(bias_shape, name="bias", dtype=env.acc_dtype)
padding = relay.nn.get_pad_tuple2d((wl.hpad, wl.wpad))
# Define base computation schedule
with target:
if data_pack:
res = conv2d_fcompute(
data, kernel, (wl.hstride, wl.wstride), padding, (1, 1), layout, env.acc_dtype
)
else:
res = conv2d_fcompute(
data, kernel, (wl.hstride, wl.wstride), padding, (1, 1), env.acc_dtype
)
res = topi.right_shift(res, 8)
res = topi.add(res, bias)
res = my_clip(res, 0, (1 << env.OUT_WIDTH - 1) - 1)
res = topi.cast(res, env.out_dtype)
# Derive base schedule
s = conv2d_fschedule([res])
if print_ir:
print(vta.lower(s, [data, kernel, bias, res], simple_mode=True))
# Derive number of ops
fout_height = (wl.height + 2 * wl.hpad - wl.hkernel) // wl.hstride + 1
fout_width = (wl.width + 2 * wl.wpad - wl.wkernel) // wl.wstride + 1
num_ops = (
2
* wl.batch
* fout_height
* fout_width
* wl.hkernel
* wl.wkernel
* wl.out_filter
* wl.in_filter
)
# @memoize("vta.tests.test_benchmark_topi.conv2d.verify_nhwc")
def get_ref_data():
# derive min max for act, wgt, and bias types (max non inclusive)
a_min, a_max = 0 - (1 << (env.INP_WIDTH - 1)), (1 << (env.INP_WIDTH - 1))
w_min, w_max = 0 - (1 << (env.WGT_WIDTH - 1)), (1 << (env.WGT_WIDTH - 1))
b_min, b_max = 0 - 1 << (env.INP_WIDTH + env.WGT_WIDTH - 2), 1 << (
env.INP_WIDTH + env.WGT_WIDTH - 2
)
a_np = np.random.randint(a_min, a_max, size=a_shape).astype(data.dtype)
w_np = np.random.randint(w_min, w_max, size=w_shape).astype(kernel.dtype)
b_np = np.random.randint(b_min, b_max, size=b_shape).astype(env.acc_dtype)
r_np = tvm.topi.testing.conv2d_nchw_python(
a_np.astype(env.acc_dtype),
w_np.astype(env.acc_dtype),
(wl.hstride, wl.wstride),
wl.hpad,
).astype(env.acc_dtype)
return a_np, w_np, b_np, r_np
# Data in original format
data_np, kernel_np, bias_np, res_ref = get_ref_data()
if data_pack:
data_np = data_np.reshape(
wl.batch // env.BATCH,
env.BATCH,
wl.in_filter // env.BLOCK_IN,
env.BLOCK_IN,
wl.height,
wl.width,
).transpose((0, 2, 4, 5, 1, 3))
kernel_np = kernel_np.reshape(
wl.out_filter // env.BLOCK_OUT,
env.BLOCK_OUT,
wl.in_filter // env.BLOCK_IN,
env.BLOCK_IN,
wl.hkernel,
wl.wkernel,
).transpose((0, 2, 4, 5, 1, 3))
bias_np = bias_np.reshape(
wl.batch // env.BATCH, wl.out_filter // env.BLOCK_OUT, 1, 1, env.BATCH, env.BLOCK_OUT
)
# Build
if "vta" in target.keys:
with vta.build_config(disabled_pass={"tir.CommonSubexprElimTIR"}):
mod = vta.build(
s,
[data, kernel, bias, res],
target=tvm.target.Target(target, host=env.target_host),
name="conv2d",
)
else:
mod = tvm.build(
s,
[data, kernel, bias, res],
target=tvm.target.Target(target, host=env.target_host),
name="conv2d",
)
temp = utils.tempdir()
mod.save(temp.relpath("conv2d.o"))
remote.upload(temp.relpath("conv2d.o"))
f = remote.load_module("conv2d.o")
dev = remote.device(str(target))
res_np = np.zeros(topi.utils.get_const_tuple(res.shape)).astype(res.dtype)
data_arr = tvm.nd.array(data_np, dev)
kernel_arr = tvm.nd.array(kernel_np, dev)
bias_arr = tvm.nd.array(bias_np, dev)
res_arr = tvm.nd.array(res_np, dev)
time_f = f.time_evaluator("conv2d", dev, number=samples)
# In vta sim mode, collect simulator runtime statistics
stats = {}
cost = None
if env.TARGET in ["sim", "tsim"]:
# Check if we're in local RPC mode (allows us to rebuild the
# runtime on the fly when varying the VTA designs)
local_rpc = int(os.environ.get("VTA_LOCAL_SIM_RPC", "0"))
if local_rpc:
if env.TARGET == "sim":
remote.get_function("vta.simulator.profiler_clear")()
else:
remote.get_function("vta.tsim.profiler_clear")()
cost = time_f(data_arr, kernel_arr, bias_arr, res_arr)
if env.TARGET == "sim":
stats = json.loads(remote.get_function("vta.simulator.profiler_status")())
else:
stats = json.loads(remote.get_function("vta.tsim.profiler_status")())
else:
simulator.clear_stats()
cost = time_f(data_arr, kernel_arr, bias_arr, res_arr)
stats = simulator.stats()
else:
cost = time_f(data_arr, kernel_arr, bias_arr, res_arr)
# Check correctness
correct = False
if check_correctness:
res_orig = res_arr.numpy()
if data_pack:
res_orig = res_orig.transpose((0, 4, 1, 5, 2, 3)).reshape(
wl.batch, wl.out_filter, fout_height, fout_width
)
bias_np = bias_np.transpose((0, 4, 1, 5, 2, 3)).reshape(wl.batch, wl.out_filter, 1, 1)
res_ref = res_ref >> env.WGT_WIDTH
res_ref += bias_np
res_ref = np.clip(res_ref, 0, (1 << env.OUT_WIDTH - 1) - 1)
res_ref = res_ref.astype(env.out_dtype)
correct = np.allclose(res_orig, res_ref)
gops = (num_ops / cost.mean) / float(10**9)
status = "PASSED" if correct else "FAILED"
if "arm_cpu" in target.keys:
device = "CPU"
elif "vta" in target.keys:
device = "VTA"
print("%s CONV2D TEST %s: Time cost = %g sec/op, %g GOPS" % (device, status, cost.mean, gops))
return correct, cost, stats
@pytest.mark.parametrize("device", ["vta", "arm_cpu"])
def test_conv2d(device):
def _run(env, remote):
if device == "vta":
target = env.target
if env.TARGET not in ["sim", "tsim", "intelfocl"]:
assert tvm.runtime.enabled("rpc")
program_fpga(remote, bitstream=None)
reconfig_runtime(remote)
elif device == "arm_cpu":
target = env.target_vta_cpu
with autotvm.tophub.context(target): # load pre-tuned schedule parameters
for _, wl in resnet_wkls:
print(wl)
run_conv2d(env, remote, wl, target)
vta.testing.run(_run)
if __name__ == "__main__":
test_conv2d(device="arm_cpu")
test_conv2d(device="vta")
| https://github.com/zk-ml/tachikoma |
vta/tests/python/integration/test_benchmark_topi_conv2d_transpose.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Testing topi conv2d_transpose operator for VTA"""
import json
import os
import pytest
import numpy as np
from collections import namedtuple
import tvm
from tvm import te
from tvm import relay
from tvm import autotvm
from tvm.contrib import utils
from tvm.contrib.pickle_memoize import memoize
from tvm import topi
import tvm.topi.testing
import vta
from vta import program_fpga, reconfig_runtime
import vta.testing
from vta.testing import simulator
Workload = namedtuple(
"Conv2DTransposeWorkload",
[
"batch",
"height",
"width",
"in_filter",
"out_filter",
"hkernel",
"wkernel",
"hpad",
"wpad",
"hstride",
"wstride",
"o_hpad",
"o_wpad",
],
)
# Get batch info from env
env = vta.get_env()
# DCGAN workloads
dcgan_wklds = [
# dcgan
("DCGAN.CT1", Workload(env.BATCH, 4, 4, 1024, 512, 4, 4, 1, 1, 2, 2, 0, 0)),
("DCGAN.CT2", Workload(env.BATCH, 8, 8, 512, 256, 4, 4, 1, 1, 2, 2, 0, 0)),
("DCGAN.CT3", Workload(env.BATCH, 16, 16, 256, 128, 4, 4, 1, 1, 2, 2, 0, 0)),
]
# FIXME: we need a custom clip operator to circumvent a pattern detection limitation
@tvm.te.tag_scope(tag=topi.tag.ELEMWISE)
def my_clip(x, a_min, a_max):
"""Unlike topi's current clip, put min and max into two stages."""
const_min = tvm.tir.const(a_min, x.dtype)
const_max = tvm.tir.const(a_max, x.dtype)
x = te.compute(x.shape, lambda *i: tvm.te.min(x(*i), const_max), name="clipA")
x = te.compute(x.shape, lambda *i: tvm.te.max(x(*i), const_min), name="clipB")
return x
# Helper function to get factors
def _find_factors(n):
factors = []
for f in range(1, n + 1):
if n % f == 0:
factors.append(f)
return factors
def run_conv2d_transpose(
env, remote, wl, target, check_correctness=True, print_ir=False, samples=4
):
# Workload assertions
assert wl.hpad == wl.wpad
# Perform packing only if we are targeting the accelerator
if "arm_cpu" in target.keys:
data_pack = False
layout = "NCHW"
fcompute = topi.arm_cpu.conv2d_transpose_nchw
fschedule = topi.arm_cpu.schedule_conv2d_transpose_nchw
elif "vta" in target.keys:
data_pack = True
layout = "NCHW%dn%dc" % (env.BATCH, env.BLOCK_IN)
fcompute = vta.top.conv2d_transpose_packed
fschedule = vta.top.schedule_conv2d_transpose_packed
# Derive shapes depending upon packing
a_shape = (wl.batch, wl.in_filter, wl.height, wl.width)
w_shape = (wl.in_filter, wl.out_filter, wl.hkernel, wl.wkernel)
if data_pack:
data_shape = (
wl.batch // env.BATCH,
wl.in_filter // env.BLOCK_IN,
wl.height,
wl.width,
env.BATCH,
env.BLOCK_IN,
)
kernel_shape = (
wl.out_filter // env.BLOCK_OUT,
wl.in_filter // env.BLOCK_IN,
wl.hkernel,
wl.wkernel,
env.BLOCK_OUT,
env.BLOCK_IN,
)
else:
data_shape = a_shape
kernel_shape = w_shape
data = te.placeholder(data_shape, name="data", dtype=env.inp_dtype)
kernel = te.placeholder(kernel_shape, name="kernel", dtype=env.wgt_dtype)
padding = relay.nn.get_pad_tuple2d((wl.hpad, wl.wpad))
# Define base computation schedule
with target:
res = fcompute(
data, kernel, (wl.hstride, wl.wstride), padding, env.acc_dtype, (wl.o_hpad, wl.o_wpad)
)
res = topi.right_shift(res, env.WGT_WIDTH)
res = my_clip(res, 0, (1 << env.OUT_WIDTH - 1) - 1)
res = topi.cast(res, env.out_dtype)
# Derive base schedule
s = fschedule([res])
if print_ir:
print(vta.lower(s, [data, kernel, res], simple_mode=True))
# Derive number of ops
fout_height = (wl.height - 1) * wl.hstride - 2 * wl.hpad + wl.hkernel + wl.o_hpad
fout_width = (wl.width - 1) * wl.wstride - 2 * wl.wpad + wl.wkernel + wl.o_wpad
num_ops = (
2
* wl.batch
* fout_height
* fout_width
* wl.hkernel
* wl.wkernel
* wl.out_filter
* wl.in_filter
)
# @memoize("vta.tests.test_benchmark_topi.conv2d.verify_nhwc")
def get_ref_data():
# derive min max for act and wgt types (max non inclusive)
a_min, a_max = 0 - (1 << (env.INP_WIDTH - 1)), (1 << (env.INP_WIDTH - 1))
w_min, w_max = 0 - (1 << (env.WGT_WIDTH - 1)), (1 << (env.WGT_WIDTH - 1))
a_np = np.random.randint(a_min, a_max, size=a_shape).astype(data.dtype)
w_np = np.random.randint(
w_min, w_max, size=(wl.in_filter, wl.out_filter, wl.hkernel, wl.wkernel)
).astype(kernel.dtype)
r_np = tvm.topi.testing.conv2d_transpose_nchw_python(
a_np.astype(env.acc_dtype),
w_np.astype(env.acc_dtype),
(wl.hstride, wl.wstride),
wl.hpad,
(wl.o_hpad, wl.o_wpad),
).astype(env.acc_dtype)
return a_np, w_np, r_np
# Data in original format
data_np, kernel_np, res_ref = get_ref_data()
if data_pack:
data_np = data_np.reshape(
wl.batch // env.BATCH,
env.BATCH,
wl.in_filter // env.BLOCK_IN,
env.BLOCK_IN,
wl.height,
wl.width,
).transpose((0, 2, 4, 5, 1, 3))
kernel_np = kernel_np.reshape(
wl.in_filter // env.BLOCK_IN,
env.BLOCK_IN,
wl.out_filter // env.BLOCK_OUT,
env.BLOCK_OUT,
wl.hkernel,
wl.wkernel,
).transpose((2, 0, 4, 5, 3, 1))
kernel_np = np.flip(kernel_np, 2)
kernel_np = np.flip(kernel_np, 3)
# Build
if "vta" in target.keys:
with vta.build_config(disabled_pass={"tir.CommonSubexprElimTIR"}):
mod = vta.build(
s,
[data, kernel, res],
target=target,
target_host=env.target_host,
name="conv2d_transpose",
)
else:
mod = tvm.build(
s,
[data, kernel, res],
target=target,
target_host=env.target_host,
name="conv2d_transpose",
)
temp = utils.tempdir()
mod.save(temp.relpath("conv2d_transpose.o"))
remote.upload(temp.relpath("conv2d_transpose.o"))
f = remote.load_module("conv2d_transpose.o")
dev = remote.device(str(target))
res_np = np.zeros(topi.utils.get_const_tuple(res.shape)).astype(res.dtype)
data_arr = tvm.nd.array(data_np, dev)
kernel_arr = tvm.nd.array(kernel_np, dev)
res_arr = tvm.nd.array(res_np, dev)
time_f = f.time_evaluator("conv2d_transpose", dev, number=samples)
# In vta sim mode, collect simulator runtime statistics
stats = {}
cost = None
if env.TARGET in ["sim", "tsim"]:
# Check if we're in local RPC mode (allows us to rebuild the
# runtime on the fly when varying the VTA designs)
local_rpc = int(os.environ.get("VTA_LOCAL_SIM_RPC", "0"))
if local_rpc:
if env.TARGET == "sim":
remote.get_function("vta.simulator.profiler_clear")()
else:
remote.get_function("vta.tsim.profiler_clear")()
cost = time_f(data_arr, kernel_arr, res_arr)
if env.TARGET == "sim":
stats = json.loads(remote.get_function("vta.simulator.profiler_status")())
else:
stats = json.loads(remote.get_function("vta.tsim.profiler_status")())
else:
simulator.clear_stats()
cost = time_f(data_arr, kernel_arr, res_arr)
stats = simulator.stats()
else:
cost = time_f(data_arr, kernel_arr, res_arr)
# Check correctness
correct = False
if check_correctness:
res_orig = res_arr.numpy()
if data_pack:
res_orig = res_orig.transpose((0, 4, 1, 5, 2, 3)).reshape(
wl.batch, wl.out_filter, fout_height, fout_width
)
res_ref = res_ref >> env.WGT_WIDTH
res_ref = np.clip(res_ref, 0, (1 << env.OUT_WIDTH - 1) - 1)
res_ref = res_ref.astype(env.out_dtype)
correct = np.allclose(res_orig, res_ref)
gops = (num_ops / cost.mean) / float(10**9)
status = "PASSED" if correct else "FAILED"
if "arm_cpu" in target.keys:
device = "CPU"
elif "vta" in target.keys:
device = "VTA"
print("%s CONV2D TEST %s: Time cost = %g sec/op, %g GOPS" % (device, status, cost.mean, gops))
return correct, cost, stats
@pytest.mark.parametrize("device", ["vta", "arm_cpu"])
def test_conv2d_transpose(device):
def _run(env, remote):
if device == "vta":
target = env.target
if env.TARGET not in ["sim", "tsim"]:
assert tvm.runtime.enabled("rpc")
program_fpga(remote, bitstream=None)
reconfig_runtime(remote)
elif device == "arm_cpu":
target = env.target_vta_cpu
with autotvm.tophub.context(target): # load pre-tuned schedule parameters
for _, wl in dcgan_wklds:
print(wl)
run_conv2d_transpose(env, remote, wl, target)
vta.testing.run(_run)
if __name__ == "__main__":
test_conv2d_transpose(device="arm_cpu")
test_conv2d_transpose(device="vta")
| https://github.com/zk-ml/tachikoma |
vta/tests/python/integration/test_benchmark_topi_dense.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Testing topi gemm operator for VTA"""
import os
import json
from collections import namedtuple
import numpy as np
import tvm
from tvm import te
from tvm import autotvm
from tvm.contrib import utils
from tvm.contrib.pickle_memoize import memoize
from tvm import topi
import tvm.topi.testing
import vta
from vta import program_fpga, reconfig_runtime
import vta.testing
from vta.testing import simulator
# FIXME: we need a custom clip operator to circumvent a pattern detection limitation
@tvm.te.tag_scope(tag=topi.tag.ELEMWISE)
def my_clip(x, a_min, a_max):
"""Unlike topi's current clip, put min and max into two stages."""
const_min = tvm.tir.const(a_min, x.dtype)
const_max = tvm.tir.const(a_max, x.dtype)
x = te.compute(x.shape, lambda *i: tvm.te.min(x(*i), const_max), name="clipA")
x = te.compute(x.shape, lambda *i: tvm.te.max(x(*i), const_min), name="clipB")
return x
def run_gemm(
env,
remote,
target,
batch_size,
in_feat,
out_feat,
check_correctness=True,
print_ir=True,
samples=4,
):
# Perform packing only if we are targeting the accelerator
if "arm_cpu" in target.keys:
data_pack = False
elif "vta" in target.keys:
data_pack = True
# Derive shapes depending upon packing
a_shape = (batch_size, in_feat)
w_shape = (out_feat, in_feat)
if data_pack:
data_shape = (batch_size // env.BATCH, in_feat // env.BLOCK_IN, env.BATCH, env.BLOCK_IN)
kernel_shape = (
out_feat // env.BLOCK_OUT,
in_feat // env.BLOCK_IN,
env.BLOCK_OUT,
env.BLOCK_IN,
)
fcompute = vta.top.dense_packed
fschedule = vta.top.schedule_dense_packed
else:
data_shape = a_shape
kernel_shape = w_shape
fcompute = topi.x86.dense_nopack
fschedule = topi.x86.schedule_dense_nopack
data = te.placeholder(data_shape, name="data", dtype=env.inp_dtype)
kernel = te.placeholder(kernel_shape, name="kernel", dtype=env.wgt_dtype)
# Define base computation schedule
with target:
res = fcompute(data, kernel, None, env.acc_dtype)
res = topi.right_shift(res, 8)
res = my_clip(res, 0, (1 << env.OUT_WIDTH - 1) - 1)
res = topi.cast(res, env.out_dtype)
# Derive base schedule
s = fschedule([res])
if print_ir:
print(vta.lower(s, [data, kernel, res], simple_mode=True))
# Derive number of ops
num_ops = 2 * batch_size * in_feat * out_feat
# @memoize("vta.tests.test_benchmark_topi.dense.verify")
def get_ref_data():
# derive min max for act, wgt types (max non inclusive)
a_min, a_max = 0 - (1 << (env.INP_WIDTH - 1)), (1 << (env.INP_WIDTH - 1))
w_min, w_max = 0 - (1 << (env.WGT_WIDTH - 1)), (1 << (env.WGT_WIDTH - 1))
a_np = np.random.randint(a_min, a_max, size=a_shape).astype(data.dtype)
w_np = np.random.randint(w_min, w_max, size=w_shape).astype(kernel.dtype)
r_np = np.dot(a_np.astype(env.acc_dtype), w_np.T.astype(env.acc_dtype)).astype(
env.acc_dtype
)
return a_np, w_np, r_np
# Data in original format
data_np, kernel_np, res_ref = get_ref_data()
if data_pack:
data_np = data_np.reshape(
batch_size // env.BATCH, env.BATCH, in_feat // env.BLOCK_IN, env.BLOCK_IN
).transpose((0, 2, 1, 3))
kernel_np = kernel_np.reshape(
out_feat // env.BLOCK_OUT, env.BLOCK_OUT, in_feat // env.BLOCK_IN, env.BLOCK_IN
).transpose((0, 2, 1, 3))
# Build
if "vta" in target.keys:
mod = vta.build(
s,
[data, kernel, res],
target=tvm.target.Target(target, host=env.target_host),
name="dense",
)
else:
mod = tvm.build(
s,
[data, kernel, res],
target=tvm.target.Target(target, host=env.target_host),
name="dense",
)
temp = utils.tempdir()
mod.save(temp.relpath("dense.o"))
remote.upload(temp.relpath("dense.o"))
f = remote.load_module("dense.o")
dev = remote.device(str(target))
res_np = np.zeros(topi.utils.get_const_tuple(res.shape)).astype(res.dtype)
data_arr = tvm.nd.array(data_np, dev)
kernel_arr = tvm.nd.array(kernel_np, dev)
res_arr = tvm.nd.array(res_np, dev)
time_f = f.time_evaluator("dense", dev, number=samples)
# In vta sim mode, collect simulator runtime statistics
stats = {}
cost = None
if env.TARGET in ["sim", "tsim"]:
# Check if we're in local RPC mode (allows us to rebuild the
# runtime on the fly when varying the VTA designs)
local_rpc = int(os.environ.get("VTA_LOCAL_SIM_RPC", "0"))
if local_rpc:
if env.TARGET == "sim":
remote.get_function("vta.simulator.profiler_clear")()
else:
remote.get_function("vta.tsim.profiler_clear")()
cost = time_f(data_arr, kernel_arr, res_arr)
if env.TARGET == "sim":
stats = json.loads(remote.get_function("vta.simulator.profiler_status")())
else:
stats = json.loads(remote.get_function("vta.tsim.profiler_status")())
else:
simulator.clear_stats()
cost = time_f(data_arr, kernel_arr, res_arr)
stats = simulator.stats()
else:
cost = time_f(data_arr, kernel_arr, res_arr)
# Check correctness
correct = False
if check_correctness:
res_orig = res_arr.numpy()
if data_pack:
res_orig = res_orig.reshape(batch_size, out_feat)
res_ref = res_ref >> 8
res_ref = np.clip(res_ref, 0, (1 << env.OUT_WIDTH - 1) - 1)
res_ref = res_ref.astype(env.out_dtype)
correct = np.allclose(res_orig, res_ref)
gops = (num_ops / cost.mean) / float(10**9)
status = "PASSED" if correct else "FAILED"
if "arm_cpu" in target.keys:
device = "CPU"
elif "vta" in target.keys:
device = "VTA"
print("%s DENSE TEST %s: Time cost = %g sec/op, %g GOPS" % (device, status, cost.mean, gops))
return correct, cost, stats
def test_gemm(device="vta", batch=128, in_feat=128, out_feat=128):
def _run(env, remote):
if device == "vta":
target = env.target
if env.TARGET not in ["sim", "tsim"]:
assert tvm.runtime.enabled("rpc")
program_fpga(remote, bitstream=None)
reconfig_runtime(remote)
elif device == "arm_cpu":
target = env.target_vta_cpu
with autotvm.tophub.context(target): # load pre-tuned schedule parameters
run_gemm(env, remote, target, batch, in_feat, out_feat)
vta.testing.run(_run)
if __name__ == "__main__":
test_gemm("vta", 16, 512, 1008)
| https://github.com/zk-ml/tachikoma |
vta/tests/python/integration/test_benchmark_topi_group_conv2d.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Testing topi group conv2d operator for VTA"""
import json
import os
import pytest
import numpy as np
from collections import namedtuple
import tvm
from tvm import te
from tvm import relay
from tvm import autotvm
from tvm.contrib import utils
from tvm import topi
import tvm.topi.testing
import vta
from vta import program_fpga, reconfig_runtime
import vta.testing
from vta.testing import simulator
Workload = namedtuple(
"GroupConv2DWorkload",
[
"batch",
"height",
"width",
"in_filter",
"out_filter",
"groups",
"hkernel",
"wkernel",
"hpad",
"wpad",
"hstride",
"wstride",
],
)
# Get batch info from env
env = vta.get_env()
# Mobilenet (grouped variant) workloads
mobilenet_wkls = [
("mobilenet.D1", Workload(env.BATCH, 112, 112, 32, 32, 2, 3, 3, 1, 1, 1, 1)),
("mobilenet.D2", Workload(env.BATCH, 112, 112, 64, 64, 4, 3, 3, 1, 1, 2, 2)),
("mobilenet.D3", Workload(env.BATCH, 56, 56, 128, 128, 8, 3, 3, 1, 1, 1, 1)),
("mobilenet.D4", Workload(env.BATCH, 56, 56, 128, 128, 8, 3, 3, 1, 1, 2, 2)),
("mobilenet.D5", Workload(env.BATCH, 28, 28, 256, 256, 16, 3, 3, 1, 1, 1, 1)),
("mobilenet.D6", Workload(env.BATCH, 28, 28, 256, 256, 16, 3, 3, 1, 1, 2, 2)),
("mobilenet.D7", Workload(env.BATCH, 14, 14, 512, 512, 32, 3, 3, 1, 1, 1, 1)),
("mobilenet.D8", Workload(env.BATCH, 14, 14, 512, 512, 32, 3, 3, 1, 1, 2, 2)),
("mobilenet.D9", Workload(env.BATCH, 7, 7, 1024, 1024, 64, 3, 3, 1, 1, 1, 1)),
]
# FIXME: we need a custom clip operator to circumvent a pattern detection limitation
@tvm.te.tag_scope(tag=topi.tag.ELEMWISE)
def my_clip(x, a_min, a_max):
"""Unlike topi's current clip, put min and max into two stages."""
const_min = tvm.tir.const(a_min, x.dtype)
const_max = tvm.tir.const(a_max, x.dtype)
x = te.compute(x.shape, lambda *i: tvm.te.min(x(*i), const_max), name="clipA")
x = te.compute(x.shape, lambda *i: tvm.te.max(x(*i), const_min), name="clipB")
return x
def run_group_conv2d(env, remote, wl, target, check_correctness=True, print_ir=False, samples=4):
# Workload assertions
assert wl.hpad == wl.wpad
# Perform packing only if we are targeting the accelerator
if "arm_cpu" in target.keys:
data_pack = False
layout = "NCHW"
fcompute = topi.nn.group_conv2d_nchw
fschedule = topi.generic.schedule_group_conv2d_nchw
elif "vta" in target.keys:
data_pack = True
layout = "NCHW%dn%dc" % (env.BATCH, env.BLOCK_IN)
fcompute = vta.top.group_conv2d_packed
fschedule = vta.top.schedule_group_conv2d_packed
# Derive shapes depending upon packing
CI_G = wl.in_filter // wl.groups
a_shape = (wl.batch, wl.in_filter, wl.height, wl.width)
w_shape = (wl.out_filter, CI_G, wl.hkernel, wl.wkernel)
b_shape = (wl.batch, wl.out_filter, 1, 1)
if data_pack:
data_shape = (
wl.batch // env.BATCH,
wl.in_filter // env.BLOCK_IN,
wl.height,
wl.width,
env.BATCH,
env.BLOCK_IN,
)
kernel_shape = (
wl.out_filter // env.BLOCK_OUT,
CI_G // env.BLOCK_IN,
wl.hkernel,
wl.wkernel,
env.BLOCK_OUT,
env.BLOCK_IN,
)
bias_shape = (
wl.batch // env.BATCH,
wl.out_filter // env.BLOCK_OUT,
1,
1,
env.BATCH,
env.BLOCK_OUT,
)
else:
data_shape = a_shape
kernel_shape = w_shape
bias_shape = b_shape
data = te.placeholder(data_shape, name="data", dtype=env.inp_dtype)
kernel = te.placeholder(kernel_shape, name="kernel", dtype=env.wgt_dtype)
bias = te.placeholder(bias_shape, name="bias", dtype=env.acc_dtype)
padding = relay.nn.get_pad_tuple2d((wl.hpad, wl.wpad))
# Define base computation schedule
with target:
res = fcompute(
data, kernel, (wl.hstride, wl.wstride), padding, (1, 1), wl.groups, env.acc_dtype
)
res = topi.right_shift(res, 8)
res = topi.add(res, bias)
res = my_clip(res, 0, (1 << env.OUT_WIDTH - 1) - 1)
res = topi.cast(res, env.out_dtype)
# Derive base schedule
s = fschedule([res])
if print_ir:
print(vta.lower(s, [data, kernel, bias, res], simple_mode=True))
# Derive number of ops
fout_height = (wl.height + 2 * wl.hpad - wl.hkernel) // wl.hstride + 1
fout_width = (wl.width + 2 * wl.wpad - wl.wkernel) // wl.wstride + 1
num_ops = (
2
* wl.batch
* fout_height
* fout_width
* wl.hkernel
* wl.wkernel
* wl.out_filter
* wl.in_filter
// wl.groups
)
def get_ref_data():
# derive min max for act, wgt, and bias types (max non inclusive)
a_min, a_max = 0 - (1 << (env.INP_WIDTH - 1)), (1 << (env.INP_WIDTH - 1))
w_min, w_max = 0 - (1 << (env.WGT_WIDTH - 1)), (1 << (env.WGT_WIDTH - 1))
b_min, b_max = 0 - 1 << (env.INP_WIDTH + env.WGT_WIDTH - 2), 1 << (
env.INP_WIDTH + env.WGT_WIDTH - 2
)
a_np = np.random.randint(a_min, a_max, size=a_shape).astype(data.dtype)
w_np = np.random.randint(w_min, w_max, size=w_shape).astype(kernel.dtype)
b_np = np.random.randint(b_min, b_max, size=b_shape).astype(env.acc_dtype)
r_np = tvm.topi.testing.conv2d_nchw_python(
a_np.astype(env.acc_dtype),
w_np.astype(env.acc_dtype),
(wl.hstride, wl.wstride),
wl.hpad,
wl.groups,
).astype(env.acc_dtype)
return a_np, w_np, b_np, r_np
# Data in original format
data_np, kernel_np, bias_np, res_ref = get_ref_data()
if data_pack:
data_np = data_np.reshape(
wl.batch // env.BATCH,
env.BATCH,
wl.in_filter // env.BLOCK_IN,
env.BLOCK_IN,
wl.height,
wl.width,
).transpose((0, 2, 4, 5, 1, 3))
kernel_np = kernel_np.reshape(
wl.out_filter // env.BLOCK_OUT,
env.BLOCK_OUT,
CI_G // env.BLOCK_IN,
env.BLOCK_IN,
wl.hkernel,
wl.wkernel,
).transpose((0, 2, 4, 5, 1, 3))
bias_np = bias_np.reshape(
wl.batch // env.BATCH, wl.out_filter // env.BLOCK_OUT, 1, 1, env.BATCH, env.BLOCK_OUT
)
# Build
if "vta" in target.keys:
with vta.build_config(disabled_pass={"tir.CommonSubexprElimTIR"}):
mod = vta.build(
s,
[data, kernel, bias, res],
target=tvm.target.Target(target, host=env.target_host),
name="conv2d",
)
else:
mod = tvm.build(
s,
[data, kernel, bias, res],
target=tvm.target.Target(target, host=env.target_host),
name="conv2d",
)
temp = utils.tempdir()
mod.save(temp.relpath("conv2d.o"))
remote.upload(temp.relpath("conv2d.o"))
f = remote.load_module("conv2d.o")
dev = remote.device(str(target))
res_np = np.zeros(topi.utils.get_const_tuple(res.shape)).astype(res.dtype)
data_arr = tvm.nd.array(data_np, dev)
kernel_arr = tvm.nd.array(kernel_np, dev)
bias_arr = tvm.nd.array(bias_np, dev)
res_arr = tvm.nd.array(res_np, dev)
time_f = f.time_evaluator("conv2d", dev, number=samples)
# In vta sim mode, collect simulator runtime statistics
stats = {}
cost = None
if env.TARGET in ["sim", "tsim"]:
# Check if we're in local RPC mode (allows us to rebuild the
# runtime on the fly when varying the VTA designs)
local_rpc = int(os.environ.get("VTA_LOCAL_SIM_RPC", "0"))
if local_rpc:
if env.TARGET == "sim":
remote.get_function("vta.simulator.profiler_clear")()
else:
remote.get_function("vta.tsim.profiler_clear")()
cost = time_f(data_arr, kernel_arr, bias_arr, res_arr)
if env.TARGET == "sim":
stats = json.loads(remote.get_function("vta.simulator.profiler_status")())
else:
stats = json.loads(remote.get_function("vta.tsim.profiler_status")())
else:
simulator.clear_stats()
cost = time_f(data_arr, kernel_arr, bias_arr, res_arr)
stats = simulator.stats()
else:
cost = time_f(data_arr, kernel_arr, bias_arr, res_arr)
# Check correctness
correct = False
if check_correctness:
res_orig = res_arr.numpy()
if data_pack:
res_orig = res_orig.transpose((0, 4, 1, 5, 2, 3)).reshape(
wl.batch, wl.out_filter, fout_height, fout_width
)
bias_np = bias_np.transpose((0, 4, 1, 5, 2, 3)).reshape(wl.batch, wl.out_filter, 1, 1)
res_ref = res_ref >> env.WGT_WIDTH
res_ref += bias_np
res_ref = np.clip(res_ref, 0, (1 << env.OUT_WIDTH - 1) - 1)
res_ref = res_ref.astype(env.out_dtype)
correct = np.allclose(res_orig, res_ref)
gops = (num_ops / cost.mean) / float(10**9)
status = "PASSED" if correct else "FAILED"
if "arm_cpu" in target.keys:
device = "CPU"
elif "vta" in target.keys:
device = "VTA"
print(
"%s GROUP CONV2D TEST %s: Time cost = %g sec/op, %g GOPS"
% (device, status, cost.mean, gops)
)
return correct, cost, stats
@pytest.mark.parametrize("device", ["vta", "arm_cpu"])
def test_conv2d(device):
def _run(env, remote):
if device == "vta":
target = env.target
if env.TARGET not in ["sim", "tsim"]:
assert tvm.runtime.enabled("rpc")
program_fpga(remote, bitstream=None)
reconfig_runtime(remote)
elif device == "arm_cpu":
target = env.target_vta_cpu
with autotvm.tophub.context(target): # load pre-tuned schedule parameters
for _, wl in mobilenet_wkls:
print(wl)
run_group_conv2d(env, remote, wl, target)
vta.testing.run(_run)
if __name__ == "__main__":
test_conv2d(device="arm_cpu")
test_conv2d(device="vta")
| https://github.com/zk-ml/tachikoma |
vta/tests/python/pynq/test_program_rpc.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import tvm
from tvm import te
from tvm import rpc
from vta import get_bitstream_path, download_bitstream, program_fpga, reconfig_runtime
host = os.environ.get("VTA_RPC_HOST", "pynq")
port = int(os.environ.get("VTA_RPC_PORT", "9091"))
def program_rpc_bitstream(path=None):
"""Program the FPGA on the RPC server
Parameters
----------
path : path to bitstream (optional)
"""
assert tvm.runtime.enabled("rpc")
remote = rpc.connect(host, port)
program_fpga(remote, path)
def reconfig_rpc_runtime():
"""Reconfig the RPC server runtime"""
assert tvm.runtime.enabled("rpc")
remote = rpc.connect(host, port)
reconfig_runtime(remote)
program_rpc_bitstream()
reconfig_rpc_runtime()
| https://github.com/zk-ml/tachikoma |
vta/tests/python/unittest/test_environment.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import vta
def test_env():
env = vta.get_env()
mock = env.mock
assert mock.alu == "skip_alu"
def test_env_scope():
env = vta.get_env()
cfg = env.cfg_dict
cfg["TARGET"] = "xyz"
with vta.Environment(cfg):
assert vta.get_env().TARGET == "xyz"
assert vta.get_env().TARGET == env.TARGET
if __name__ == "__main__":
test_env()
test_env_scope()
| https://github.com/zk-ml/tachikoma |
vta/tests/python/unittest/test_vta_insn.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Unit test VTA's instructions """
import tvm
from tvm import te
import numpy as np
from tvm import topi
from tvm.contrib import utils
import vta
import vta.testing
from vta.testing import simulator
np.random.seed(0xDEADB)
def test_save_load_out():
"""Test save/store output command"""
def _run(env, remote):
n = 6
x = te.placeholder((n, n, env.BATCH, env.BLOCK_OUT), name="x", dtype=env.acc_dtype)
x_buf = te.compute((n, n, env.BATCH, env.BLOCK_OUT), lambda *i: x(*i), "x_buf")
# insert no-op that won't be optimized away
y_buf = te.compute((n, n, env.BATCH, env.BLOCK_OUT), lambda *i: x_buf(*i) >> 0, "y_buf")
y = te.compute(
(n, n, env.BATCH, env.BLOCK_OUT), lambda *i: y_buf(*i).astype(env.inp_dtype), "y"
)
# schedule
s = te.create_schedule(y.op)
s[x_buf].set_scope(env.acc_scope)
s[x_buf].pragma(x_buf.op.axis[0], env.dma_copy)
s[y_buf].set_scope(env.acc_scope)
s[y_buf].pragma(y_buf.op.axis[0], env.alu)
s[y].pragma(y.op.axis[0], env.dma_copy)
# verification
with vta.build_config():
m = vta.build(s, [x, y], tvm.target.Target("ext_dev", host=env.target_host))
if not remote:
return
temp = utils.tempdir()
m.save(temp.relpath("load_act.o"))
remote.upload(temp.relpath("load_act.o"))
f = remote.load_module("load_act.o")
# verify
dev = remote.ext_dev(0)
x_np = np.random.randint(1, 10, size=(n, n, env.BATCH, env.BLOCK_OUT)).astype(x.dtype)
y_np = x_np.astype(y.dtype)
x_nd = tvm.nd.array(x_np, dev)
y_nd = tvm.nd.empty(y_np.shape, device=dev, dtype=y_np.dtype)
if env.TARGET in ["sim", "tsim"]:
simulator.clear_stats()
f(x_nd, y_nd)
np.testing.assert_equal(y_np, y_nd.numpy())
if env.TARGET in ["sim", "tsim"]:
sim_stats = simulator.stats()
print("Save load execution statistics:")
for k, v in sim_stats.items():
print("\t{:<16}: {:>16}".format(k, v))
vta.testing.run(_run)
def test_padded_load():
"""Test padded load."""
def _run(env, remote):
def check_padded_load(pad_before, pad_after, test_name=None):
# declare
n = 3
m = 5
x = te.placeholder((n, m, env.BATCH, env.BLOCK_OUT), name="x", dtype=env.acc_dtype)
x_buf = topi.nn.pad(x, pad_before, pad_after, name="y")
# insert no-op that won't be optimized away
y_buf = te.compute(
(
n + pad_before[0] + pad_after[0],
m + pad_before[1] + pad_after[1],
env.BATCH,
env.BLOCK_OUT,
),
lambda *i: x_buf(*i) >> 0,
"y_buf",
)
y = te.compute(
(
n + pad_before[0] + pad_after[0],
m + pad_before[1] + pad_after[1],
env.BATCH,
env.BLOCK_OUT,
),
lambda *i: y_buf(*i).astype(env.inp_dtype),
"y",
)
# schedule
s = te.create_schedule(y.op)
s[x_buf].set_scope(env.acc_scope)
s[x_buf].pragma(x_buf.op.axis[0], env.dma_copy)
s[y_buf].set_scope(env.acc_scope)
s[y_buf].pragma(y_buf.op.axis[0], env.alu)
s[y].pragma(y.op.axis[0], env.dma_copy)
# build
with vta.build_config():
mod = vta.build(s, [x, y], tvm.target.Target("ext_dev", host=env.target_host))
if not remote:
return
temp = utils.tempdir()
mod.save(temp.relpath("padded_load.o"))
remote.upload(temp.relpath("padded_load.o"))
f = remote.load_module("padded_load.o")
# verify
dev = remote.ext_dev(0)
x_np = np.random.randint(0, 10, size=(n, m, env.BATCH, env.BLOCK_OUT)).astype(x.dtype)
y_np = np.zeros(
(
n + pad_before[0] + pad_after[0],
m + pad_before[1] + pad_after[1],
env.BATCH,
env.BLOCK_OUT,
)
).astype(y.dtype)
y_np[pad_before[0] : pad_before[0] + n, pad_before[1] : pad_before[1] + m, :] = x_np
x_nd = tvm.nd.array(x_np, dev)
y_nd = tvm.nd.empty(y_np.shape, device=dev, dtype=y_np.dtype)
if env.TARGET in ["sim", "tsim"]:
simulator.clear_stats()
f(x_nd, y_nd)
np.testing.assert_equal(y_np, y_nd.numpy())
if env.TARGET in ["sim", "tsim"]:
sim_stats = simulator.stats()
print("Padded {} load execution statistics:".format(test_name))
for k, v in sim_stats.items():
print("\t{:<16}: {:>16}".format(k, v))
check_padded_load([2, 0, 0, 0], [0, 0, 0, 0], test_name="Y0")
check_padded_load([0, 2, 0, 0], [0, 0, 0, 0], test_name="Y1")
check_padded_load([0, 0, 0, 0], [2, 0, 0, 0], test_name="X0")
check_padded_load([0, 0, 0, 0], [0, 2, 0, 0], test_name="X1")
check_padded_load([1, 1, 0, 0], [1, 1, 0, 0], test_name="all")
vta.testing.run(_run)
def test_gemm():
"""Test GEMM."""
def _run(env, remote):
# declare
o = 4
n = 1
m = 4
x = te.placeholder((o, n, env.BATCH, env.BLOCK_IN), name="x", dtype=env.inp_dtype)
w = te.placeholder((m, n, env.BLOCK_OUT, env.BLOCK_IN), name="w", dtype=env.wgt_dtype)
x_buf = te.compute((o, n, env.BATCH, env.BLOCK_IN), lambda *i: x(*i), "x_buf")
w_buf = te.compute((m, n, env.BLOCK_OUT, env.BLOCK_IN), lambda *i: w(*i), "w_buf")
ko = te.reduce_axis((0, n), name="ko")
ki = te.reduce_axis((0, env.BLOCK_IN), name="ki")
y_gem = te.compute(
(o, m, env.BATCH, env.BLOCK_OUT),
lambda bo, co, bi, ci: te.sum(
x_buf[bo, ko, bi, ki].astype(env.acc_dtype)
* w_buf[co, ko, ci, ki].astype(env.acc_dtype),
axis=[ko, ki],
),
name="y_gem",
)
y_shf = te.compute(
(o, m, env.BATCH, env.BLOCK_OUT), lambda *i: y_gem(*i) >> 8, name="y_shf"
)
y_max = te.compute(
(o, m, env.BATCH, env.BLOCK_OUT), lambda *i: tvm.te.max(y_shf(*i), 0), "y_max"
) # relu
y_min = te.compute(
(o, m, env.BATCH, env.BLOCK_OUT),
lambda *i: tvm.te.min(y_max(*i), (1 << (env.INP_WIDTH - 1)) - 1),
"y_min",
) # relu
y = te.compute(
(o, m, env.BATCH, env.BLOCK_OUT), lambda *i: y_min(*i).astype(env.inp_dtype), name="y"
)
if not remote:
return
def verify(s, name=None):
# Build with the CSE pass disabled as otherwise it would complicate the test
with vta.build_config(disabled_pass={"tir.CommonSubexprElimTIR"}):
mod = vta.build(s, [x, w, y], tvm.target.Target("ext_dev", host=env.target_host))
temp = utils.tempdir()
mod.save(temp.relpath("gemm.o"))
remote.upload(temp.relpath("gemm.o"))
f = remote.load_module("gemm.o")
# verify
dev = remote.ext_dev(0)
x_np = np.random.randint(-128, 128, size=(o, n, env.BATCH, env.BLOCK_IN)).astype(
x.dtype
)
w_np = np.random.randint(-128, 128, size=(m, n, env.BLOCK_OUT, env.BLOCK_IN)).astype(
w.dtype
)
y_np = np.zeros((o, m, env.BATCH, env.BLOCK_OUT)).astype(y.dtype)
x_nd = tvm.nd.array(x_np, dev)
w_nd = tvm.nd.array(w_np, dev)
y_nd = tvm.nd.array(y_np, dev)
y_np = y_np.astype(env.acc_dtype)
for b in range(o):
for i in range(m):
for j in range(n):
y_np[b, i, :] += np.dot(
x_np[b, j, :].astype(env.acc_dtype), w_np[i, j].T.astype(env.acc_dtype)
)
y_np = np.right_shift(y_np, 8)
y_np = np.clip(y_np, 0, (1 << (env.INP_WIDTH - 1)) - 1).astype(y.dtype)
if env.TARGET in ["sim", "tsim"]:
simulator.clear_stats()
f(x_nd, w_nd, y_nd)
np.testing.assert_equal(y_np, y_nd.numpy())
if env.TARGET in ["sim", "tsim"]:
sim_stats = simulator.stats()
print("GEMM schedule:{} execution statistics:".format(name))
for k, v in sim_stats.items():
print("\t{:<16}: {:>16}".format(k, v))
def test_schedule1():
# default schedule with no smt
s = te.create_schedule(y.op)
# set the scope of the SRAM buffers
s[x_buf].set_scope(env.inp_scope)
s[w_buf].set_scope(env.wgt_scope)
s[y_gem].set_scope(env.acc_scope)
s[y_shf].set_scope(env.acc_scope)
s[y_max].set_scope(env.acc_scope)
s[y_min].set_scope(env.acc_scope)
# set pragmas for DMA transfer and ALU ops
s[x_buf].compute_at(s[y_gem], ko)
s[x_buf].pragma(s[x_buf].op.axis[0], env.dma_copy)
s[w_buf].compute_at(s[y_gem], ko)
s[w_buf].pragma(s[w_buf].op.axis[0], env.dma_copy)
s[y_shf].pragma(s[y_shf].op.axis[0], env.alu)
s[y_max].pragma(s[y_max].op.axis[0], env.alu)
s[y_min].pragma(s[y_min].op.axis[0], env.alu)
s[y].pragma(s[y].op.axis[0], env.dma_copy)
# tensorization
s[y_gem].reorder(
ko,
s[y_gem].op.axis[0],
s[y_gem].op.axis[1],
s[y_gem].op.axis[2],
s[y_gem].op.axis[3],
ki,
)
s[y_gem].tensorize(s[y_gem].op.axis[2], env.gemm)
verify(s, name="default")
def test_smt():
# test smt schedule
s = te.create_schedule(y.op)
s[x_buf].set_scope(env.inp_scope)
s[w_buf].set_scope(env.wgt_scope)
s[y_gem].set_scope(env.acc_scope)
s[y_shf].set_scope(env.acc_scope)
s[y_max].set_scope(env.acc_scope)
s[y_min].set_scope(env.acc_scope)
abo, aco, abi, aci = s[y].op.axis
abo1, abo2 = s[y].split(abo, nparts=2)
s[y].bind(abo1, te.thread_axis("cthread"))
s[y_gem].compute_at(s[y], abo1)
s[y_shf].compute_at(s[y], abo1)
s[y_max].compute_at(s[y], abo1)
s[y_min].compute_at(s[y], abo1)
s[y_gem].reorder(
ko,
s[y_gem].op.axis[0],
s[y_gem].op.axis[1],
s[y_gem].op.axis[2],
s[y_gem].op.axis[3],
ki,
)
s[y_gem].tensorize(s[y_gem].op.axis[2], env.gemm)
s[y_shf].pragma(s[y_shf].op.axis[0], env.alu)
s[y_max].pragma(s[y_max].op.axis[0], env.alu)
s[y_min].pragma(s[y_min].op.axis[0], env.alu)
s[x_buf].compute_at(s[y_gem], ko)
s[x_buf].pragma(s[x_buf].op.axis[0], env.dma_copy)
s[w_buf].compute_at(s[y_gem], ko)
s[w_buf].pragma(s[w_buf].op.axis[0], env.dma_copy)
s[y].pragma(abo2, env.dma_copy)
verify(s, name="smt")
test_schedule1()
test_smt()
vta.testing.run(_run)
def test_alu():
def _run(env, remote):
def check_alu(tvm_op, np_op=None, use_imm=False, test_name=None):
"""Test ALU"""
m = 8
n = 8
imm = np.random.randint(1, 5)
# compute
a = te.placeholder((m, n, env.BATCH, env.BLOCK_OUT), name="a", dtype=env.acc_dtype)
a_buf = te.compute(
(m, n, env.BATCH, env.BLOCK_OUT), lambda *i: a(*i), "a_buf"
) # DRAM->SRAM
if use_imm:
res_buf = te.compute(
(m, n, env.BATCH, env.BLOCK_OUT), lambda *i: tvm_op(a_buf(*i), imm), "res_buf"
) # compute
else:
b = te.placeholder((m, n, env.BATCH, env.BLOCK_OUT), name="b", dtype=env.acc_dtype)
b_buf = te.compute(
(m, n, env.BATCH, env.BLOCK_OUT), lambda *i: b(*i), "b_buf"
) # DRAM->SRAM
res_buf = te.compute(
(m, n, env.BATCH, env.BLOCK_OUT),
lambda *i: tvm_op(a_buf(*i), b_buf(*i)),
"res_buf",
) # compute5B
res = te.compute(
(m, n, env.BATCH, env.BLOCK_OUT),
lambda *i: res_buf(*i).astype(env.inp_dtype),
"res",
) # SRAM->DRAM
# schedule
s = te.create_schedule(res.op)
s[a_buf].set_scope(env.acc_scope) # SRAM
s[a_buf].pragma(a_buf.op.axis[0], env.dma_copy) # DRAM->SRAM
s[res_buf].set_scope(env.acc_scope) # SRAM
s[res_buf].pragma(res_buf.op.axis[0], env.alu) # compute
s[res].pragma(res.op.axis[0], env.dma_copy) # SRAM->DRAM
if not use_imm:
s[b_buf].set_scope(env.acc_scope) # SRAM
s[b_buf].pragma(b_buf.op.axis[0], env.dma_copy) # DRAM->SRAM
if not remote:
return
# build
with vta.build_config():
if use_imm:
mod = vta.build(s, [a, res], tvm.target.Target("ext_dev", host=env.target_host))
else:
mod = vta.build(
s, [a, b, res], tvm.target.Target("ext_dev", host=env.target_host)
)
temp = utils.tempdir()
mod.save(temp.relpath("load_act.o"))
remote.upload(temp.relpath("load_act.o"))
f = remote.load_module("load_act.o")
# verify
dev = remote.ext_dev(0)
a_np = np.random.randint(-16, 16, size=(m, n, env.BATCH, env.BLOCK_OUT)).astype(a.dtype)
if use_imm:
res_np = np_op(a_np, imm) if np_op else tvm_op(a_np, imm)
else:
b_np = np.random.randint(-16, 16, size=(m, n, env.BATCH, env.BLOCK_OUT)).astype(
b.dtype
)
res_np = np_op(a_np, b_np) if np_op else tvm_op(a_np, b_np)
res_np = res_np.astype(res.dtype)
a_nd = tvm.nd.array(a_np, dev)
res_nd = tvm.nd.array(np.zeros((m, n, env.BATCH, env.BLOCK_OUT)).astype(res.dtype), dev)
if env.TARGET in ["sim", "tsim"]:
simulator.clear_stats()
if use_imm:
f(a_nd, res_nd)
else:
b_nd = tvm.nd.array(b_np, dev)
f(a_nd, b_nd, res_nd)
np.testing.assert_equal(res_np, res_nd.numpy())
if env.TARGET in ["sim", "tsim"]:
sim_stats = simulator.stats()
print("ALU {} execution statistics:".format(test_name))
for k, v in sim_stats.items():
print("\t{:<16}: {:>16}".format(k, v))
check_alu(lambda x, y: x << y, np.left_shift, use_imm=True, test_name="SHL")
check_alu(tvm.te.max, np.maximum, use_imm=True, test_name="MAX")
check_alu(tvm.te.max, np.maximum, test_name="MAX")
check_alu(lambda x, y: x + y, use_imm=True, test_name="ADD")
check_alu(lambda x, y: x + y, test_name="ADD")
check_alu(lambda x, y: x >> y, np.right_shift, use_imm=True, test_name="SHR")
vta.testing.run(_run)
def test_relu():
"""Test RELU on ALU"""
def _run(env, remote):
m = 8
n = 10
# compute
a = te.placeholder((m, n, env.BATCH, env.BLOCK_OUT), name="a", dtype=env.acc_dtype)
a_buf = te.compute(
(m, n, env.BATCH, env.BLOCK_OUT), lambda *i: a(*i), "a_buf"
) # DRAM->SRAM
max_buf = te.compute(
(m, n, env.BATCH, env.BLOCK_OUT), lambda *i: tvm.te.max(a_buf(*i), 0), "res_buf"
) # relu
min_buf = te.compute(
(m, n, env.BATCH, env.BLOCK_OUT),
lambda *i: tvm.te.min(max_buf(*i), (1 << (env.INP_WIDTH - 1)) - 1),
"max_buf",
) # relu
res = te.compute(
(m, n, env.BATCH, env.BLOCK_OUT),
lambda *i: min_buf(*i).astype(env.inp_dtype),
"min_buf",
) # SRAM->DRAM
# schedule
s = te.create_schedule(res.op)
s[a_buf].set_scope(env.acc_scope) # SRAM
s[a_buf].pragma(a_buf.op.axis[0], env.dma_copy) # DRAM->SRAM
s[max_buf].set_scope(env.acc_scope) # SRAM
s[min_buf].set_scope(env.acc_scope) # SRAM
s[max_buf].pragma(max_buf.op.axis[0], env.alu) # compute
s[min_buf].pragma(min_buf.op.axis[0], env.alu) # compute
s[res].pragma(res.op.axis[0], env.dma_copy) # SRAM->DRAM
# build
with vta.build_config():
mod = vta.build(s, [a, res], tvm.target.Target("ext_dev", host=env.target_host))
if not remote:
return
temp = utils.tempdir()
mod.save(temp.relpath("load_act.o"))
remote.upload(temp.relpath("load_act.o"))
f = remote.load_module("load_act.o")
# verify
dev = remote.ext_dev(0)
a_np = np.random.randint(-256, 256, size=(m, n, env.BATCH, env.BLOCK_OUT)).astype(a.dtype)
res_np = np.clip(a_np, 0, (1 << (env.INP_WIDTH - 1)) - 1).astype(res.dtype)
a_nd = tvm.nd.array(a_np, dev)
res_nd = tvm.nd.array(np.zeros((m, n, env.BATCH, env.BLOCK_OUT)).astype(res.dtype), dev)
if env.TARGET in ["sim", "tsim"]:
simulator.clear_stats()
f(a_nd, res_nd)
np.testing.assert_equal(res_np, res_nd.numpy())
if env.TARGET in ["sim", "tsim"]:
sim_stats = simulator.stats()
print("Relu execution statistics:")
for k, v in sim_stats.items():
print("\t{:<16}: {:>16}".format(k, v))
vta.testing.run(_run)
def test_shift_and_scale():
"""Test shift and scale on ALU"""
def _run(env, remote):
m = 2
n = 8
imm_shift = np.random.randint(0, 8)
imm_scale = np.random.randint(1, 5)
# compute
a = te.placeholder((m, n, env.BATCH, env.BLOCK_OUT), name="a", dtype=env.acc_dtype)
a_buf = te.compute(
(m, n, env.BATCH, env.BLOCK_OUT), lambda *i: a(*i), "a_buf"
) # DRAM->SRAM
res_shift = te.compute(
(m, n, env.BATCH, env.BLOCK_OUT), lambda *i: a_buf(*i) + imm_shift, "res_shift"
) # compute
res_scale = te.compute(
(m, n, env.BATCH, env.BLOCK_OUT), lambda *i: res_shift(*i) >> imm_scale, "res_scale"
) # compute
res = te.compute(
(m, n, env.BATCH, env.BLOCK_OUT), lambda *i: res_scale(*i).astype(env.inp_dtype), "res"
) # SRAM->DRAM
# schedule
s = te.create_schedule(res.op)
s[a_buf].set_scope(env.acc_scope) # SRAM
s[res_shift].set_scope(env.acc_scope) # SRAM
s[res_scale].set_scope(env.acc_scope) # SRAM
s[a_buf].pragma(a_buf.op.axis[0], env.dma_copy) # DRAM->SRAM
s[res_shift].pragma(res_shift.op.axis[0], env.alu) # compute
s[res_scale].pragma(res_scale.op.axis[0], env.alu) # compute
s[res].pragma(res.op.axis[0], env.dma_copy) # SRAM->DRAM
# build
mod = vta.build(s, [a, res], tvm.target.Target("ext_dev", host=env.target_host))
if not remote:
return
temp = utils.tempdir()
mod.save(temp.relpath("load_act.o"))
remote.upload(temp.relpath("load_act.o"))
f = remote.load_module("load_act.o")
# verify
dev = remote.ext_dev(0)
a_np = np.random.randint(-10, 10, size=(m, n, env.BATCH, env.BLOCK_OUT)).astype(a.dtype)
res_np = np.right_shift((a_np + imm_shift), imm_scale)
res_np = res_np.astype(res.dtype)
a_nd = tvm.nd.array(a_np, dev)
res_nd = tvm.nd.array(np.zeros((m, n, env.BATCH, env.BLOCK_OUT)).astype(res.dtype), dev)
if env.TARGET in ["sim", "tsim"]:
simulator.clear_stats()
f(a_nd, res_nd)
np.testing.assert_equal(res_np, res_nd.numpy())
if env.TARGET in ["sim", "tsim"]:
sim_stats = simulator.stats()
print("Shift and scale execution statistics:")
for k, v in sim_stats.items():
print("\t{:<16}: {:>16}".format(k, v))
vta.testing.run(_run)
def test_runtime_array():
def _run(env, remote):
n = 100
dev = remote.ext_dev(0)
x_np = np.random.randint(1, 10, size=(n, n, env.BATCH, env.BLOCK_OUT)).astype("int8")
x_nd = tvm.nd.array(x_np, dev)
np.testing.assert_equal(x_np, x_nd.numpy())
vta.testing.run(_run)
if __name__ == "__main__":
test_runtime_array()
test_save_load_out()
test_padded_load()
test_gemm()
test_alu()
test_relu()
test_shift_and_scale()
| https://github.com/zk-ml/tachikoma |
vta/tutorials/autotvm/tune_alu_vta.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Auto-tuning a ALU fused op on VTA
---------------------------------
"""
import os
from mxnet.gluon.model_zoo import vision
import numpy as np
from PIL import Image
from tvm import topi
import tvm
from tvm import te
from tvm import rpc, autotvm, relay
from tvm.contrib import download
from tvm.autotvm.measure.measure_methods import request_remote
from tvm.autotvm.tuner import XGBTuner, GATuner, RandomTuner, GridSearchTuner
from tvm.autotvm import record
import vta
from vta.testing import simulator
from vta.top import graph_pack
import copy
#################################################################
# Compile network
# ---------------
# Perform vta-specific compilation with Relay from a Gluon model
def compile_network(env, target, model, start_pack, stop_pack):
# Populate the shape and data type dictionary
dtype_dict = {"data": "float32"}
shape_dict = {"data": (env.BATCH, 3, 224, 224)}
# Get off the shelf gluon model, and convert to relay
gluon_model = vision.get_model(model, pretrained=True)
mod, params = relay.frontend.from_mxnet(gluon_model, shape_dict)
# Update shape and type dictionary
shape_dict.update({k: v.shape for k, v in params.items()})
dtype_dict.update({k: str(v.dtype) for k, v in params.items()})
# Perform quantization in Relay
# Note: We set opt_level to 3 in order to fold batch norm
with relay.build_config(opt_level=3):
with relay.quantize.qconfig(global_scale=8.0, skip_conv_layers=[0]):
mod = relay.quantize.quantize(mod, params=params)
# Perform graph packing and constant folding for VTA target
if target.device_name == "vta":
assert env.BLOCK_IN == env.BLOCK_OUT
relay_prog = graph_pack(
mod["main"],
env.BATCH,
env.BLOCK_OUT,
env.WGT_WIDTH,
start_name=start_pack,
stop_name=stop_pack,
)
return relay_prog, params
###########################################
# Set Tuning Options
# ------------------
# Before tuning, we should apply some configurations.
# Here we use an Pynq-Z1 board as an example.
# Tracker host and port can be set by your environment
tracker_host = os.environ.get("TVM_TRACKER_HOST", "0.0.0.0")
tracker_port = int(os.environ.get("TVM_TRACKER_PORT", 9190))
# Load VTA parameters from the vta/config/vta_config.json file
env = vta.get_env()
# This target is used for cross compilation. You can query it by :code:`gcc -v` on your device.
# Set ``device=arm_cpu`` to run inference on the CPU
# or ``device=vta`` to run inference on the FPGA.
device = "vta"
target = env.target if device == "vta" else env.target_vta_cpu
# Name of Gluon model to compile
# The ``start_pack`` and ``stop_pack`` labels indicate where
# to start and end the graph packing relay pass: in other words
# where to start and finish offloading to VTA.
network = "resnet50_v2"
start_pack = "nn.max_pool2d"
stop_pack = "nn.global_avg_pool2d"
# Tuning option
log_file = "%s.alu.%s.log" % (device, network)
tuning_option = {
"log_filename": log_file,
"tuner": "random",
"n_trial": 1000,
"early_stopping": None,
"measure_option": autotvm.measure_option(
builder=autotvm.LocalBuilder(n_parallel=1),
runner=autotvm.RPCRunner(
env.TARGET,
host=tracker_host,
port=tracker_port,
number=5,
timeout=60,
# check_correctness=True, # TODO: re-enable when check_correctness works again.
),
),
}
def log_to_file(file_out, protocol="json"):
"""Log the tuning records into file.
The rows of the log are stored in the format of autotvm.record.encode.
for lhs == rhs, we add an extra rhs = [] record
Parameters
----------
file_out : str
The file to log to.
protocol: str, optional
The log protocol. Can be 'json' or 'pickle'
Returns
-------
callback : callable
Callback function to do the logging.
"""
def _callback(_, inputs, results):
with open(file_out, "a") as f:
for inp, result in zip(inputs, results):
f.write(record.encode(inp, result, protocol) + "\n")
# we only consider task with same lhs and rhs
if inp.task.args[0] == inp.task.args[1]:
args = list(inp.task.args)
args[1] = (args[0][0], (), args[0][2])
inp_copy = copy.deepcopy(inp)
inp_copy.task.args = tuple(args)
f.write(record.encode(inp_copy, result, protocol) + "\n")
return _callback
def tune_tasks(
tasks,
measure_option,
tuner="xgb",
n_trial=10,
early_stopping=None,
log_filename="tuning.log",
use_transfer_learning=True,
):
# create tmp log file
tmp_log_file = log_filename + ".tmp"
if os.path.exists(tmp_log_file):
os.remove(tmp_log_file)
for i, tsk in enumerate(reversed(tasks)):
prefix = "[Task %2d/%2d] " % (i + 1, len(tasks))
# create tuner
if tuner == "xgb" or tuner == "xgb-rank":
tuner_obj = XGBTuner(tsk, loss_type="rank")
elif tuner == "xgb_knob":
tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="knob")
elif tuner == "ga":
tuner_obj = GATuner(tsk, pop_size=50)
elif tuner == "random":
tuner_obj = RandomTuner(tsk)
elif tuner == "gridsearch":
tuner_obj = GridSearchTuner(tsk)
else:
raise ValueError("Invalid tuner: " + tuner)
if use_transfer_learning:
if os.path.isfile(tmp_log_file):
tuner_obj.load_history(autotvm.record.load_from_file(tmp_log_file))
# do tuning
tsk_trial = min(n_trial, len(tsk.config_space))
tuner_obj.tune(
n_trial=tsk_trial,
early_stopping=early_stopping,
measure_option=measure_option,
callbacks=[
autotvm.callback.progress_bar(tsk_trial, prefix=prefix),
log_to_file(tmp_log_file),
],
)
# pick best records to a cache file
autotvm.record.pick_best(tmp_log_file, log_filename)
os.remove(tmp_log_file)
########################################################################
# Register VTA-specific tuning tasks
def register_vta_tuning_tasks():
from tvm.autotvm.task import TaskExtractEnv
@tvm.te.tag_scope(tag=topi.tag.ELEMWISE)
def my_clip(x, a_min, a_max):
"""Unlike topi's current clip, put min and max into two stages."""
const_min = tvm.tir.const(a_min, x.dtype)
const_max = tvm.tir.const(a_max, x.dtype)
x = te.compute(x.shape, lambda *i: tvm.te.min(x(*i), const_max), name="clipA")
x = te.compute(x.shape, lambda *i: tvm.te.max(x(*i), const_min), name="clipB")
return x
# init autotvm env to register VTA operator
TaskExtractEnv()
@autotvm.template("add.vta")
def _topi_add(*args, **kwargs):
assert not kwargs, "Do not support kwargs in template function call"
A, B = args[:2]
with tvm.target.vta():
res = vta.top.op.add_packed(*args, **kwargs)
res = my_clip(res, 0, 127)
res = topi.cast(res, "int8")
if tvm.target.Target.current().device_name == "vta":
s = vta.top.op.schedule_add_packed([res])
else:
s = te.create_schedule([res.op])
return s, [A, B, res]
@autotvm.template("multiply.vta")
def _topi_multiply(*args, **kwargs):
assert not kwargs, "Do not support kwargs in template function call"
A, B = args[:2]
with tvm.target.vta():
res = vta.top.op.multiply_packed(*args, **kwargs)
res = my_clip(res, 0, 127)
res = topi.cast(res, "int8")
if tvm.target.Target.current().device_name == "vta":
s = vta.top.op.schedule_multiply_packed([res])
else:
s = te.create_schedule([res.op])
return s, [A, B, res]
########################################################################
# Finally, we launch tuning jobs and evaluate the end-to-end performance.
def tune_and_evaluate(tuning_opt):
if env.TARGET != "intelfocl":
print("ALU only op only available for intelfocl target")
return
# Register VTA tuning tasks
register_vta_tuning_tasks()
# Perform task extraction on Relay program
print("Extract tasks...")
relay_prog, params = compile_network(env, target, network, start_pack, stop_pack)
mod = tvm.IRModule.from_expr(relay_prog)
tasks = autotvm.task.extract_from_program(
mod,
params=params,
ops=(
relay.op.get("add"),
relay.op.get("multiply"),
),
target=tvm.target.Target(target, host=env.target_host),
)
# filter out non-packed alu task
tasks = list(filter(lambda t: len(t.args[0][1]) > 4, tasks))
# filter out float alu task
tasks = list(filter(lambda t: t.args[0][2] != "float32", tasks))
# We should have extracted 10 convolution tasks
tasks_set = {}
print("Extracted {} alu tasks:".format(len(tasks)))
for tsk in tasks:
print("tsk = ", tsk)
if len(tsk.args[1][1]) == 0:
args = list(tsk.args)
args[1] = args[0]
tsk.args = tuple(args)
if (tsk.name, tsk.args) in tasks_set:
print("task {} already exists".format(tsk))
tasks_set[(tsk.name, tsk.args)] = tsk
tasks = list(tasks_set.values())
print("After merged, final #tasks={}, tasks = {}".format(len(tasks), tasks))
# run tuning tasks
print("Tuning...")
tune_tasks(tasks, **tuning_opt)
# Run the tuning and evaluate the results
tune_and_evaluate(tuning_option)
| https://github.com/zk-ml/tachikoma |
vta/tutorials/autotvm/tune_relay_vta.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Auto-tuning a convolutional network on VTA
==========================================
**Author**: `Lianmin Zheng <https://github.com/merrymercy>`_, `Thierry Moreau <https://homes.cs.washington.edu/~moreau/>`_
Auto-tuning for a specific accelerator design is critical for getting the best
performance for any given operator. This is a tutorial showcases how to tune a
whole convolutional network on VTA.
The operator implementation for VTA in TVM is written in template form.
The template has many tunable knobs (tile factor, virtual threads, etc).
We will tune all convolution operators in the neural network. After tuning,
we produce a log file which stores the best schedule parameters for all tuned
operators. When the TVM compiler compiles these operators, it will query this
log file to get the best knob parameters.
"""
######################################################################
# Install dependencies
# --------------------
# To use the autotvm package in tvm, we need to install some extra dependencies.
# (change "3" to "2" if you use python2):
#
# .. code-block:: bash
#
# pip3 install --user psutil xgboost tornado mxnet requests "Pillow<7" cloudpickle
#
# To make TVM run faster during tuning, it is recommended to use cython
# as FFI of TVM. In the root directory of TVM, execute
# (change "3" to "2" if you use python2):
#
# .. code-block:: bash
#
# pip3 install --user cython
# sudo make cython3
#
# Now return to python code. Import packages.
import os
from mxnet.gluon.model_zoo import vision
import numpy as np
from PIL import Image
from tvm import topi
import tvm
from tvm import te
from tvm import rpc, autotvm, relay
from tvm.contrib import graph_executor, utils, download
from tvm.autotvm.measure.measure_methods import request_remote
from tvm.autotvm.tuner import XGBTuner, GATuner, RandomTuner, GridSearchTuner
import vta
from vta.testing import simulator
from vta.top import graph_pack
#################################################################
# Compile network
# ---------------
# Perform vta-specific compilation with Relay from a Gluon model
def compile_network(env, target, model, start_pack, stop_pack):
# Populate the shape and data type dictionary
dtype_dict = {"data": "float32"}
shape_dict = {"data": (env.BATCH, 3, 224, 224)}
# Get off the shelf gluon model, and convert to relay
gluon_model = vision.get_model(model, pretrained=True)
mod, params = relay.frontend.from_mxnet(gluon_model, shape_dict)
# Update shape and type dictionary
shape_dict.update({k: v.shape for k, v in params.items()})
dtype_dict.update({k: str(v.dtype) for k, v in params.items()})
# Perform quantization in Relay
# Note: We set opt_level to 3 in order to fold batch norm
with tvm.transform.PassContext(opt_level=3):
with relay.quantize.qconfig(global_scale=8.0, skip_conv_layers=[0]):
mod = relay.quantize.quantize(mod, params=params)
# Perform graph packing and constant folding for VTA target
if target.device_name == "vta":
assert env.BLOCK_IN == env.BLOCK_OUT
relay_prog = graph_pack(
mod["main"],
env.BATCH,
env.BLOCK_OUT,
env.WGT_WIDTH,
start_name=start_pack,
stop_name=stop_pack,
)
return relay_prog, params
#################################################################
# Start RPC Tracker
# -----------------
# TVM uses an RPC session to communicate with Pynq boards.
# During tuning, the tuner will send the generated code to the board and
# measure the speed of code on the board.
#
# To scale up tuning, TVM uses an RPC Tracker to manage multiple devices.
# The RPC Tracker is a centralized controller node. We can register all devices to
# the tracker. For example, if we have 10 Pynq boards, we can register all of them
# to the tracker, and run 10 measurements in parallel, accelerating the tuning process.
#
# To start an RPC tracker, run this command on the host machine. The tracker is
# required during the whole tuning process, so we need to open a new terminal for
# this command:
#
# .. code-block:: bash
#
# python -m tvm.exec.rpc_tracker --host=0.0.0.0 --port=9190
#
# The expected output is:
#
# .. code-block:: bash
#
# INFO:RPCTracker:bind to 0.0.0.0:9190
#################################################################
# Register devices to RPC Tracker
# -----------------------------------
# Now we can register our devices to the tracker. The first step is to
# build the TVM runtime for the Pynq devices.
#
# Follow :ref:`vta-index`
# to build the TVM runtime on the device. Then register the device to the tracker with:
#
# .. code-block:: bash
#
# python -m tvm.exec.rpc_server --tracker=[HOST_IP]:9190 --key=pynq
#
# (replace :code:`[HOST_IP]` with the IP address of your host machine)
#
# After registering devices, we can confirm it by querying the rpc_tracker:
#
# .. code-block:: bash
#
# python -m tvm.exec.query_rpc_tracker --host=0.0.0.0 --port=9190
#
# For example, if we have 6 Pynq boards and 11 Raspberry Pi 3B,
# the output can be
#
# .. code-block:: bash
#
# Queue Status
# ----------------------------------
# key total free pending
# ----------------------------------
# pynq 6 6 0
# rpi3b 11 11 0
# ----------------------------------
#
# You can register multiple devices to the tracker to accelerate tuning.
###########################################
# Set Tuning Options
# ------------------
# Before tuning, we should apply some configurations.
# Here we use an Pynq-Z1 board as an example.
# Tracker host and port can be set by your environment
tracker_host = os.environ.get("TVM_TRACKER_HOST", "127.0.0.1")
tracker_port = int(os.environ.get("TVM_TRACKER_PORT", 9190))
# Load VTA parameters from the 3rdparty/vta-hw/config/vta_config.json file
env = vta.get_env()
# This target is used for cross compilation. You can query it by :code:`gcc -v` on your device.
# Set ``device=arm_cpu`` to run inference on the CPU
# or ``device=vta`` to run inference on the FPGA.
device = "vta"
target = env.target if device == "vta" else env.target_vta_cpu
# Name of Gluon model to compile
# The ``start_pack`` and ``stop_pack`` labels indicate where
# to start and end the graph packing relay pass: in other words
# where to start and finish offloading to VTA.
network = "resnet18_v1"
start_pack = "nn.max_pool2d"
stop_pack = "nn.global_avg_pool2d"
# Tuning option
log_file = "%s.%s.log" % (device, network)
tuning_option = {
"log_filename": log_file,
"tuner": "random",
"n_trial": 1000,
"early_stopping": None,
"measure_option": autotvm.measure_option(
builder=autotvm.LocalBuilder(),
runner=autotvm.RPCRunner(
env.TARGET,
host=tracker_host,
port=tracker_port,
number=5,
timeout=60,
module_loader=vta.module_loader(),
# check_correctness=True, # TODO: re-enable when check_correctness works again.
),
),
}
####################################################################
#
# .. note:: How to set tuning options
#
# In general, the default values provided here work well.
# If you have enough time budget, you can set :code:`n_trial`, :code:`early_stopping`
# to larger values, makes the tuning run for longer.
# If your device is under-powered or your conv2d operators are large, consider
# setting a longer timeout.
#
###################################################################
# Begin Tuning
# ------------
# Now we can extract tuning tasks from the network and begin tuning.
# Here, we provide a simple utility function to tune a list of tasks.
# This function is just an initial implementation which tunes them in sequential order.
# We will introduce a more sophisticated tuning scheduler in the future.
#
# Given that the tuning will be done on Pynq FPGA boards, make sure that
# the ```TARGET`` entry in the ``vta_config.json`` file is set to ``pynq``.
# You can skip the implementation of this function for this tutorial.
def tune_tasks(
tasks,
measure_option,
tuner="xgb",
n_trial=1000,
early_stopping=None,
log_filename="tuning.log",
use_transfer_learning=True,
):
# create tmp log file
tmp_log_file = log_filename + ".tmp"
if os.path.exists(tmp_log_file):
os.remove(tmp_log_file)
for i, tsk in enumerate(reversed(tasks)):
prefix = "[Task %2d/%2d] " % (i + 1, len(tasks))
# create tuner
if tuner == "xgb" or tuner == "xgb-rank":
tuner_obj = XGBTuner(tsk, loss_type="rank")
elif tuner == "xgb_knob":
tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="knob")
elif tuner == "ga":
tuner_obj = GATuner(tsk, pop_size=50)
elif tuner == "random":
tuner_obj = RandomTuner(tsk)
elif tuner == "gridsearch":
tuner_obj = GridSearchTuner(tsk)
else:
raise ValueError("Invalid tuner: " + tuner)
if use_transfer_learning:
if os.path.isfile(tmp_log_file):
tuner_obj.load_history(autotvm.record.load_from_file(tmp_log_file))
# do tuning
tsk_trial = min(n_trial, len(tsk.config_space))
tuner_obj.tune(
n_trial=tsk_trial,
early_stopping=early_stopping,
measure_option=measure_option,
callbacks=[
autotvm.callback.progress_bar(tsk_trial, prefix=prefix),
autotvm.callback.log_to_file(tmp_log_file),
],
)
# pick best records to a cache file
autotvm.record.pick_best(tmp_log_file, log_filename)
os.remove(tmp_log_file)
########################################################################
# Register VTA-specific tuning tasks
def register_vta_tuning_tasks():
from tvm.autotvm.task import TaskExtractEnv
@tvm.te.tag_scope(tag=topi.tag.ELEMWISE)
def my_clip(x, a_min, a_max):
"""Unlike topi's current clip, put min and max into two stages."""
const_min = tvm.tir.const(a_min, x.dtype)
const_max = tvm.tir.const(a_max, x.dtype)
x = te.compute(x.shape, lambda *i: tvm.te.min(x(*i), const_max), name="clipA")
x = te.compute(x.shape, lambda *i: tvm.te.max(x(*i), const_min), name="clipB")
return x
# init autotvm env to register VTA operator
TaskExtractEnv()
@autotvm.template("conv2d_packed.vta")
def _topi_nn_conv2d(*args, **kwargs):
assert not kwargs, "Do not support kwargs in template function call"
A, W = args[:2]
with tvm.target.vta():
res = vta.top.conv2d_packed(*args, **kwargs)
res = topi.right_shift(res, 8)
res = my_clip(res, 0, 127)
res = topi.cast(res, "int8")
if tvm.target.Target.current().device_name == "vta":
s = vta.top.schedule_conv2d_packed([res])
else:
s = te.create_schedule([res.op])
return s, [A, W, res]
########################################################################
# Finally, we launch tuning jobs and evaluate the end-to-end performance.
def tune_and_evaluate(tuning_opt):
# Register VTA tuning tasks
register_vta_tuning_tasks()
# Perform task extraction on Relay program
print("Extract tasks...")
relay_prog, params = compile_network(env, target, network, start_pack, stop_pack)
mod = tvm.IRModule.from_expr(relay_prog)
tasks = autotvm.task.extract_from_program(
mod,
params=params,
ops=(relay.op.get("nn.conv2d"),),
target=target,
target_host=env.target_host,
)
# filter out non-packed conv2d task
tasks = list(filter(lambda t: len(t.args[0][1]) > 4 and "conv" in t.name, tasks))
# We should have extracted 10 convolution tasks
assert len(tasks) == 10
print("Extracted {} conv2d tasks:".format(len(tasks)))
for tsk in tasks:
inp = tsk.args[0][1]
wgt = tsk.args[1][1]
batch = inp[0] * inp[4]
in_filter = inp[1] * inp[5]
out_filter = wgt[0] * wgt[4]
height, width = inp[2], inp[3]
hkernel, wkernel = wgt[2], wgt[3]
hstride, wstride = tsk.args[2][0], tsk.args[2][1]
hpad, wpad = tsk.args[3][0], tsk.args[3][1]
print(
"({}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {})".format(
batch,
height,
width,
in_filter,
out_filter,
hkernel,
wkernel,
hpad,
wpad,
hstride,
wstride,
)
)
# We do not run the tuning in our webpage server since it takes too long.
# Comment the following line to run it by yourself.
return
# run tuning tasks
print("Tuning...")
tune_tasks(tasks, **tuning_opt)
# evaluate with tuning history
if env.TARGET != "sim":
# Get remote from fleet node
remote = autotvm.measure.request_remote(
env.TARGET, tracker_host, tracker_port, timeout=10000
)
# Reconfigure the JIT runtime and FPGA.
vta.reconfig_runtime(remote)
vta.program_fpga(remote, bitstream=None)
else:
# In simulation mode, host the RPC server locally.
remote = rpc.LocalSession()
# compile kernels with history best records
with autotvm.tophub.context(target, extra_files=[log_file]):
# Compile network
print("Compile...")
if target.device_name != "vta":
with tvm.transform.PassContext(opt_level=3, disabled_pass={"AlterOpLayout"}):
lib = relay.build(
relay_prog, target=target, params=params, target_host=env.target_host
)
else:
with vta.build_config(opt_level=3, disabled_pass={"AlterOpLayout"}):
lib = relay.build(
relay_prog, target=target, params=params, target_host=env.target_host
)
# Export library
print("Upload...")
temp = utils.tempdir()
lib.export_library(temp.relpath("graphlib.tar"))
remote.upload(temp.relpath("graphlib.tar"))
lib = remote.load_module("graphlib.tar")
# Generate the graph executor
ctx = remote.ext_dev(0) if device == "vta" else remote.cpu(0)
m = graph_executor.GraphModule(lib["default"](ctx))
# upload parameters to device
image = tvm.nd.array((np.random.uniform(size=(1, 3, 224, 224))).astype("float32"))
m.set_input("data", image)
# evaluate
print("Evaluate inference time cost...")
timer = m.module.time_evaluator("run", ctx, number=1, repeat=10)
tcost = timer()
prof_res = np.array(tcost.results) * 1000 # convert to millisecond
print(
"Mean inference time (std dev): %.2f ms (%.2f ms)"
% (np.mean(prof_res), np.std(prof_res))
)
# Run the tuning and evaluate the results
tune_and_evaluate(tuning_option)
######################################################################
# Sample Output
# -------------
# The tuning needs to compile many programs and extract feature from them.
# So a high performance CPU is recommended.
# One sample output is listed below.
# It takes about 2 hours on a 16T CPU, and 6 Pynq boards.
#
# .. code-block:: bash
#
# Extract tasks...
# [Warning] Invalid shape during AutoTVM task creation
# Extracted 10 conv2d tasks:
# Task(func_name=topi_nn_conv2d, args=(('TENSOR', (1, 16, 14, 14, 1, 16), 'int8'), ('TENSOR', (32, 16, 1, 1, 16, 16), 'int8'), (2, 2), (0, 0), (1, 1), 'NCHW1n16c', 'int32'), kwargs={}, workload=('conv2d', (1, 16, 14, 14, 1, 16, 'int8'), (32, 16, 1, 1, 16, 16, 'int8'), (2, 2), (0, 0), (1, 1), 'NCHW1n16c', 'int32'))
# Task(func_name=topi_nn_conv2d, args=(('TENSOR', (1, 8, 28, 28, 1, 16), 'int8'), ('TENSOR', (16, 8, 1, 1, 16, 16), 'int8'), (2, 2), (0, 0), (1, 1), 'NCHW1n16c', 'int32'), kwargs={}, workload=('conv2d', (1, 8, 28, 28, 1, 16, 'int8'), (16, 8, 1, 1, 16, 16, 'int8'), (2, 2), (0, 0), (1, 1), 'NCHW1n16c', 'int32'))
# Task(func_name=topi_nn_conv2d, args=(('TENSOR', (1, 4, 56, 56, 1, 16), 'int8'), ('TENSOR', (8, 4, 1, 1, 16, 16), 'int8'), (2, 2), (0, 0), (1, 1), 'NCHW1n16c', 'int32'), kwargs={}, workload=('conv2d', (1, 4, 56, 56, 1, 16, 'int8'), (8, 4, 1, 1, 16, 16, 'int8'), (2, 2), (0, 0), (1, 1), 'NCHW1n16c', 'int32'))
# Task(func_name=topi_nn_conv2d, args=(('TENSOR', (1, 4, 56, 56, 1, 16), 'int8'), ('TENSOR', (4, 4, 3, 3, 16, 16), 'int8'), (1, 1), (1, 1), (1, 1), 'NCHW1n16c', 'int32'), kwargs={}, workload=('conv2d', (1, 4, 56, 56, 1, 16, 'int8'), (4, 4, 3, 3, 16, 16, 'int8'), (1, 1), (1, 1), (1, 1), 'NCHW1n16c', 'int32'))
# Task(func_name=topi_nn_conv2d, args=(('TENSOR', (1, 8, 28, 28, 1, 16), 'int8'), ('TENSOR', (8, 8, 3, 3, 16, 16), 'int8'), (1, 1), (1, 1), (1, 1), 'NCHW1n16c', 'int32'), kwargs={}, workload=('conv2d', (1, 8, 28, 28, 1, 16, 'int8'), (8, 8, 3, 3, 16, 16, 'int8'), (1, 1), (1, 1), (1, 1), 'NCHW1n16c', 'int32'))
# Task(func_name=topi_nn_conv2d, args=(('TENSOR', (1, 4, 56, 56, 1, 16), 'int8'), ('TENSOR', (8, 4, 3, 3, 16, 16), 'int8'), (2, 2), (1, 1), (1, 1), 'NCHW1n16c', 'int32'), kwargs={}, workload=('conv2d', (1, 4, 56, 56, 1, 16, 'int8'), (8, 4, 3, 3, 16, 16, 'int8'), (2, 2), (1, 1), (1, 1), 'NCHW1n16c', 'int32'))
# Task(func_name=topi_nn_conv2d, args=(('TENSOR', (1, 16, 14, 14, 1, 16), 'int8'), ('TENSOR', (16, 16, 3, 3, 16, 16), 'int8'), (1, 1), (1, 1), (1, 1), 'NCHW1n16c', 'int32'), kwargs={}, workload=('conv2d', (1, 16, 14, 14, 1, 16, 'int8'), (16, 16, 3, 3, 16, 16, 'int8'), (1, 1), (1, 1), (1, 1), 'NCHW1n16c', 'int32'))
# Task(func_name=topi_nn_conv2d, args=(('TENSOR', (1, 8, 28, 28, 1, 16), 'int8'), ('TENSOR', (16, 8, 3, 3, 16, 16), 'int8'), (2, 2), (1, 1), (1, 1), 'NCHW1n16c', 'int32'), kwargs={}, workload=('conv2d', (1, 8, 28, 28, 1, 16, 'int8'), (16, 8, 3, 3, 16, 16, 'int8'), (2, 2), (1, 1), (1, 1), 'NCHW1n16c', 'int32'))
# Task(func_name=topi_nn_conv2d, args=(('TENSOR', (1, 32, 7, 7, 1, 16), 'int8'), ('TENSOR', (32, 32, 3, 3, 16, 16), 'int8'), (1, 1), (1, 1), (1, 1), 'NCHW1n16c', 'int32'), kwargs={}, workload=('conv2d', (1, 32, 7, 7, 1, 16, 'int8'), (32, 32, 3, 3, 16, 16, 'int8'), (1, 1), (1, 1), (1, 1), 'NCHW1n16c', 'int32'))
# Task(func_name=topi_nn_conv2d, args=(('TENSOR', (1, 16, 14, 14, 1, 16), 'int8'), ('TENSOR', (32, 16, 3, 3, 16, 16), 'int8'), (2, 2), (1, 1), (1, 1), 'NCHW1n16c', 'int32'), kwargs={}, workload=('conv2d', (1, 16, 14, 14, 1, 16, 'int8'), (32, 16, 3, 3, 16, 16, 'int8'), (2, 2), (1, 1), (1, 1), 'NCHW1n16c', 'int32'))
# Tuning...
# [Task 1/10] Current/Best: 0.72/ 23.24 GFLOPS | Progress: (480/1000) | 640.31 s Done.
# [Task 2/10] Current/Best: 0.00/ 27.69 GFLOPS | Progress: (576/1000) | 810.09 s Done.
# [Task 3/10] Current/Best: 0.00/ 22.97 GFLOPS | Progress: (1000/1000) | 1125.37 s Done.
# [Task 4/10] Current/Best: 0.00/ 31.26 GFLOPS | Progress: (1000/1000) | 1025.52 s Done.
# [Task 5/10] Current/Best: 0.00/ 15.15 GFLOPS | Progress: (1000/1000) | 1236.58 s Done.
# [Task 6/10] Current/Best: 0.00/ 22.74 GFLOPS | Progress: (1000/1000) | 906.60 s Done.
# [Task 7/10] Current/Best: 0.00/ 15.27 GFLOPS | Progress: (1000/1000) | 1056.25 s Done.
# [Task 8/10] Current/Best: 0.00/ 2.18 GFLOPS | Progress: (1000/1000) | 2275.29 s Done.
# [Task 9/10] Current/Best: 2.23/ 3.99 GFLOPS | Progress: (1000/1000) | 2527.25 s Done.
# [Task 10/10] Current/Best: 1.56/ 6.32 GFLOPS | Progress: (480/1000) | 1304.84 s Done.
# Compile...
# Upload...
# Evaluate inference time cost...
# Mean inference time (std dev): 621.79 ms (0.14 ms)
######################################################################
#
# .. note:: **Experiencing Difficulties?**
#
# The auto tuning module is error-prone. If you always see " 0.00/ 0.00 GFLOPS",
# then there must be something wrong.
#
# First, make sure you set the correct configuration of your device.
# Then, you can print debug information by adding these lines in the beginning
# of the script. It will print every measurement result, where you can find useful
# error messages.
#
# .. code-block:: python
#
# import logging
# logging.getLogger('autotvm').setLevel(logging.DEBUG)
#
# Finally, always feel free to ask our community for help on https://discuss.tvm.apache.org
| https://github.com/zk-ml/tachikoma |
vta/tutorials/frontend/deploy_classification.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Deploy Pretrained Vision Model from MxNet on VTA
================================================
**Author**: `Thierry Moreau <https://homes.cs.washington.edu/~moreau/>`_
This tutorial provides an end-to-end demo, on how to run ImageNet classification
inference onto the VTA accelerator design to perform ImageNet classification tasks.
It showcases Relay as a front end compiler that can perform quantization (VTA
only supports int8/32 inference) as well as graph packing (in order to enable
tensorization in the core) to massage the compute graph for the hardware target.
"""
######################################################################
# Install dependencies
# --------------------
# To use the autotvm package in tvm, we need to install some extra dependencies.
# (change "3" to "2" if you use python2):
#
# .. code-block:: bash
#
# pip3 install --user mxnet requests "Pillow<7"
#
# Now return to the python code. Import packages.
from __future__ import absolute_import, print_function
import argparse, json, os, requests, sys, time
from io import BytesIO
from os.path import join, isfile
from PIL import Image
from mxnet.gluon.model_zoo import vision
import numpy as np
from matplotlib import pyplot as plt
import tvm
from tvm import te
from tvm import rpc, autotvm, relay
from tvm.contrib import graph_executor, utils, download
from tvm.contrib.debugger import debug_executor
from tvm.relay import transform
import vta
from vta.testing import simulator
from vta.top import graph_pack
# Make sure that TVM was compiled with RPC=1
assert tvm.runtime.enabled("rpc")
######################################################################
# Define the platform and model targets
# -------------------------------------
# Execute on CPU vs. VTA, and define the model.
# Load VTA parameters from the 3rdparty/vta-hw/config/vta_config.json file
env = vta.get_env()
# Set ``device=arm_cpu`` to run inference on the CPU
# or ``device=vta`` to run inference on the FPGA.
device = "vta"
target = env.target if device == "vta" else env.target_vta_cpu
# Dictionary lookup for when to start/end bit packing
pack_dict = {
"resnet18_v1": ["nn.max_pool2d", "nn.global_avg_pool2d"],
"resnet34_v1": ["nn.max_pool2d", "nn.global_avg_pool2d"],
"resnet18_v2": ["nn.max_pool2d", "nn.global_avg_pool2d"],
"resnet34_v2": ["nn.max_pool2d", "nn.global_avg_pool2d"],
"resnet50_v2": ["nn.max_pool2d", "nn.global_avg_pool2d"],
"resnet101_v2": ["nn.max_pool2d", "nn.global_avg_pool2d"],
}
# Name of Gluon model to compile
# The ``start_pack`` and ``stop_pack`` labels indicate where
# to start and end the graph packing relay pass: in other words
# where to start and finish offloading to VTA.
model = "resnet18_v1"
assert model in pack_dict
######################################################################
# Obtain an execution remote
# --------------------------
# When target is 'pynq', reconfigure FPGA and runtime.
# Otherwise, if target is 'sim', execute locally.
if env.TARGET not in ["sim", "tsim", "intelfocl"]:
# Get remote from tracker node if environment variable is set.
# To set up the tracker, you'll need to follow the "Auto-tuning
# a convolutional network for VTA" tutorial.
tracker_host = os.environ.get("TVM_TRACKER_HOST", None)
tracker_port = os.environ.get("TVM_TRACKER_PORT", None)
# Otherwise if you have a device you want to program directly from
# the host, make sure you've set the variables below to the IP of
# your board.
device_host = os.environ.get("VTA_RPC_HOST", "192.168.2.99")
device_port = os.environ.get("VTA_RPC_PORT", "9091")
if not tracker_host or not tracker_port:
remote = rpc.connect(device_host, int(device_port))
else:
remote = autotvm.measure.request_remote(
env.TARGET, tracker_host, int(tracker_port), timeout=10000
)
# Reconfigure the JIT runtime and FPGA.
# You can program the FPGA with your own custom bitstream
# by passing the path to the bitstream file instead of None.
reconfig_start = time.time()
vta.reconfig_runtime(remote)
vta.program_fpga(remote, bitstream=None)
reconfig_time = time.time() - reconfig_start
print("Reconfigured FPGA and RPC runtime in {0:.2f}s!".format(reconfig_time))
# In simulation mode, host the RPC server locally.
else:
remote = rpc.LocalSession()
if env.TARGET in ["intelfocl"]:
# program intelfocl aocx
vta.program_fpga(remote, bitstream="vta.bitstream")
# Get execution context from remote
ctx = remote.ext_dev(0) if device == "vta" else remote.cpu(0)
######################################################################
# Build the inference graph executor
# ----------------------------------
# Grab vision model from Gluon model zoo and compile with Relay.
# The compilation steps are:
#
# 1. Front end translation from MxNet into Relay module.
# 2. Apply 8-bit quantization: here we skip the first conv layer,
# and dense layer which will both be executed in fp32 on the CPU.
# 3. Perform graph packing to alter the data layout for tensorization.
# 4. Perform constant folding to reduce number of operators (e.g. eliminate batch norm multiply).
# 5. Perform relay build to object file.
# 6. Load the object file onto remote (FPGA device).
# 7. Generate graph executor, `m`.
#
# Load pre-configured AutoTVM schedules
with autotvm.tophub.context(target):
# Populate the shape and data type dictionary for ImageNet classifier input
dtype_dict = {"data": "float32"}
shape_dict = {"data": (env.BATCH, 3, 224, 224)}
# Get off the shelf gluon model, and convert to relay
gluon_model = vision.get_model(model, pretrained=True)
# Measure build start time
build_start = time.time()
# Start front end compilation
mod, params = relay.frontend.from_mxnet(gluon_model, shape_dict)
# Update shape and type dictionary
shape_dict.update({k: v.shape for k, v in params.items()})
dtype_dict.update({k: str(v.dtype) for k, v in params.items()})
if target.device_name == "vta":
# Perform quantization in Relay
# Note: We set opt_level to 3 in order to fold batch norm
with tvm.transform.PassContext(opt_level=3):
with relay.quantize.qconfig(global_scale=8.0, skip_conv_layers=[0]):
mod = relay.quantize.quantize(mod, params=params)
# Perform graph packing and constant folding for VTA target
assert env.BLOCK_IN == env.BLOCK_OUT
# do device annotation if target is intelfocl or sim
relay_prog = graph_pack(
mod["main"],
env.BATCH,
env.BLOCK_OUT,
env.WGT_WIDTH,
start_name=pack_dict[model][0],
stop_name=pack_dict[model][1],
device_annot=(env.TARGET == "intelfocl"),
)
else:
relay_prog = mod["main"]
# Compile Relay program with AlterOpLayout disabled
if target.device_name != "vta":
with tvm.transform.PassContext(opt_level=3, disabled_pass={"AlterOpLayout"}):
graph, lib, params = relay.build(
relay_prog, target=tvm.target.Target(target, host=env.target_host), params=params
)
else:
if env.TARGET == "intelfocl":
# multiple targets to run both on cpu and vta
target = {"cpu": env.target_vta_cpu, "ext_dev": target}
with vta.build_config(
opt_level=3, disabled_pass={"AlterOpLayout", "tir.CommonSubexprElimTIR"}
):
graph, lib, params = relay.build(
relay_prog, target=tvm.target.Target(target, host=env.target_host), params=params
)
# Measure Relay build time
build_time = time.time() - build_start
print(model + " inference graph built in {0:.2f}s!".format(build_time))
# Send the inference library over to the remote RPC server
temp = utils.tempdir()
lib.export_library(temp.relpath("graphlib.tar"))
remote.upload(temp.relpath("graphlib.tar"))
lib = remote.load_module("graphlib.tar")
if env.TARGET == "intelfocl":
ctxes = [remote.ext_dev(0), remote.cpu(0)]
m = graph_executor.create(graph, lib, ctxes)
else:
# Graph runtime
m = graph_executor.create(graph, lib, ctx)
######################################################################
# Perform image classification inference
# --------------------------------------
# We run classification on an image sample from ImageNet
# We just need to download the categories files, `synset.txt`
# and an input test image.
# Download ImageNet categories
categ_url = "https://github.com/uwsampl/web-data/raw/main/vta/models/"
categ_fn = "synset.txt"
download.download(join(categ_url, categ_fn), categ_fn)
synset = eval(open(categ_fn).read())
# Download test image
image_url = "https://homes.cs.washington.edu/~moreau/media/vta/cat.jpg"
image_fn = "cat.png"
download.download(image_url, image_fn)
# Prepare test image for inference
image = Image.open(image_fn).resize((224, 224))
plt.imshow(image)
plt.show()
image = np.array(image) - np.array([123.0, 117.0, 104.0])
image /= np.array([58.395, 57.12, 57.375])
image = image.transpose((2, 0, 1))
image = image[np.newaxis, :]
image = np.repeat(image, env.BATCH, axis=0)
# Set the network parameters and inputs
m.set_input(**params)
m.set_input("data", image)
# Perform inference and gather execution statistics
# More on: :py:method:`tvm.runtime.Module.time_evaluator`
num = 4 # number of times we run module for a single measurement
rep = 3 # number of measurements (we derive std dev from this)
timer = m.module.time_evaluator("run", ctx, number=num, repeat=rep)
if env.TARGET in ["sim", "tsim"]:
simulator.clear_stats()
timer()
sim_stats = simulator.stats()
print("\nExecution statistics:")
for k, v in sim_stats.items():
# Since we execute the workload many times, we need to normalize stats
# Note that there is always one warm up run
# Therefore we divide the overall stats by (num * rep + 1)
print("\t{:<16}: {:>16}".format(k, v // (num * rep + 1)))
else:
tcost = timer()
std = np.std(tcost.results) * 1000
mean = tcost.mean * 1000
print("\nPerformed inference in %.2fms (std = %.2f) for %d samples" % (mean, std, env.BATCH))
print("Average per sample inference time: %.2fms" % (mean / env.BATCH))
# Get classification results
tvm_output = m.get_output(0, tvm.nd.empty((env.BATCH, 1000), "float32", remote.cpu(0)))
for b in range(env.BATCH):
top_categories = np.argsort(tvm_output.numpy()[b])
# Report top-5 classification results
print("\n{} prediction for sample {}".format(model, b))
print("\t#1:", synset[top_categories[-1]])
print("\t#2:", synset[top_categories[-2]])
print("\t#3:", synset[top_categories[-3]])
print("\t#4:", synset[top_categories[-4]])
print("\t#5:", synset[top_categories[-5]])
# This just checks that one of the 5 top categories
# is one variety of cat; this is by no means an accurate
# assessment of how quantization affects classification
# accuracy but is meant to catch changes to the
# quantization pass that would accuracy in the CI.
cat_detected = False
for k in top_categories[-5:]:
if "cat" in synset[k]:
cat_detected = True
assert cat_detected
| https://github.com/zk-ml/tachikoma |
vta/tutorials/frontend/deploy_detection.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Deploy Pretrained Vision Detection Model from Darknet on VTA
============================================================
**Author**: `Hua Jiang <https://github.com/huajsj>`_
This tutorial provides an end-to-end demo, on how to run Darknet YoloV3-tiny
inference onto the VTA accelerator design to perform Image detection tasks.
It showcases Relay as a front end compiler that can perform quantization (VTA
only supports int8/32 inference) as well as graph packing (in order to enable
tensorization in the core) to massage the compute graph for the hardware target.
"""
######################################################################
# Install dependencies
# --------------------
# To use the autotvm package in tvm, we need to install some extra dependencies.
# (change "3" to "2" if you use python2):
#
# .. code-block:: bash
#
# pip3 install "Pillow<7"
#
# YOLO-V3-tiny Model with Darknet parsing have dependancy with CFFI and CV2 library,
# we need to install CFFI and CV2 before executing this script.
#
# .. code-block:: bash
#
# pip3 install cffi
# pip3 install opencv-python
#
# Now return to the python code. Import packages.
from __future__ import absolute_import, print_function
import sys
import os
import time
import matplotlib.pyplot as plt
import numpy as np
import tvm
import vta
from tvm import rpc, autotvm, relay
from tvm.relay.testing import yolo_detection, darknet
from tvm.relay.testing.darknet import __darknetffi__
from tvm.contrib import graph_executor, utils
from tvm.contrib.download import download_testdata
from vta.testing import simulator
from vta.top import graph_pack
# Make sure that TVM was compiled with RPC=1
assert tvm.runtime.enabled("rpc")
##############################################################################
# Download yolo net configure file, weight file, darknet library file based on
# Model Name
# ----------------------------------------------------------------------------
MODEL_NAME = "yolov3-tiny"
REPO_URL = "https://github.com/dmlc/web-data/blob/main/darknet/"
cfg_path = download_testdata(
"https://github.com/pjreddie/darknet/blob/master/cfg/" + MODEL_NAME + ".cfg" + "?raw=true",
MODEL_NAME + ".cfg",
module="darknet",
)
weights_path = download_testdata(
"https://pjreddie.com/media/files/" + MODEL_NAME + ".weights" + "?raw=true",
MODEL_NAME + ".weights",
module="darknet",
)
if sys.platform in ["linux", "linux2"]:
darknet_lib_path = download_testdata(
REPO_URL + "lib/" + "libdarknet2.0.so" + "?raw=true", "libdarknet2.0.so", module="darknet"
)
elif sys.platform == "darwin":
darknet_lib_path = download_testdata(
REPO_URL + "lib_osx/" + "libdarknet_mac2.0.so" + "?raw=true",
"libdarknet_mac2.0.so",
module="darknet",
)
else:
raise NotImplementedError("Darknet lib is not supported on {} platform".format(sys.platform))
##################################################
# Download yolo categories and illustration front.
# ------------------------------------------------
coco_path = download_testdata(
REPO_URL + "data/" + "coco.names" + "?raw=true", "coco.names", module="data"
)
font_path = download_testdata(
REPO_URL + "data/" + "arial.ttf" + "?raw=true", "arial.ttf", module="data"
)
with open(coco_path) as f:
content = f.readlines()
names = [x.strip() for x in content]
########################################
# Define the platform and model targets.
# --------------------------------------
# Execute on CPU vs. VTA, and define the model.
# Load VTA parameters from the 3rdparty/vta-hw/config/vta_config.json file
env = vta.get_env()
# Set ``device=arm_cpu`` to run inference on the CPU
# or ``device=vta`` to run inference on the FPGA.
device = "vta"
target = env.target if device == "vta" else env.target_vta_cpu
pack_dict = {
"yolov3-tiny": ["nn.max_pool2d", "cast", 4, 186],
}
# Name of Darknet model to compile
# The ``start_pack`` and ``stop_pack`` labels indicate where
# to start and end the graph packing relay pass: in other words
# where to start and finish offloading to VTA.
# the number 4 indicate the ``start_pack`` index is 4, the
# number 186 indicate the ``stop_pack index`` is 186, by using
# name and index number, here we can located to correct place
# where to start/end when there are multiple ``nn.max_pool2d``
# or ``cast``, print(mod.astext(show_meta_data=False)) can help
# to find operator name and index information.
assert MODEL_NAME in pack_dict
#############################
# Obtain an execution remote.
# ---------------------------
# When target is 'pynq' or other FPGA backend, reconfigure FPGA and runtime.
# Otherwise, if target is 'sim', execute locally.
if env.TARGET not in ["sim", "tsim"]:
# Get remote from tracker node if environment variable is set.
# To set up the tracker, you'll need to follow the "Auto-tuning
# a convolutional network for VTA" tutorial.
tracker_host = os.environ.get("TVM_TRACKER_HOST", None)
tracker_port = os.environ.get("TVM_TRACKER_PORT", None)
# Otherwise if you have a device you want to program directly from
# the host, make sure you've set the variables below to the IP of
# your board.
device_host = os.environ.get("VTA_RPC_HOST", "192.168.2.99")
device_port = os.environ.get("VTA_RPC_PORT", "9091")
if not tracker_host or not tracker_port:
remote = rpc.connect(device_host, int(device_port))
else:
remote = autotvm.measure.request_remote(
env.TARGET, tracker_host, int(tracker_port), timeout=10000
)
# Reconfigure the JIT runtime and FPGA.
# You can program the FPGA with your own custom bitstream
# by passing the path to the bitstream file instead of None.
reconfig_start = time.time()
vta.reconfig_runtime(remote)
vta.program_fpga(remote, bitstream=None)
reconfig_time = time.time() - reconfig_start
print("Reconfigured FPGA and RPC runtime in {0:.2f}s!".format(reconfig_time))
# In simulation mode, host the RPC server locally.
else:
remote = rpc.LocalSession()
# Get execution context from remote
ctx = remote.ext_dev(0) if device == "vta" else remote.cpu(0)
#####################################
# Build the inference graph executor.
# -----------------------------------
# Using Darknet library load downloaded vision model and compile with Relay.
# The compilation steps are:
#
# 1. Front end translation from Darknet into Relay module.
# 2. Apply 8-bit quantization: here we skip the first conv layer,
# and dense layer which will both be executed in fp32 on the CPU.
# 3. Perform graph packing to alter the data layout for tensorization.
# 4. Perform constant folding to reduce number of operators (e.g. eliminate batch norm multiply).
# 5. Perform relay build to object file.
# 6. Load the object file onto remote (FPGA device).
# 7. Generate graph executor, `m`.
# Load pre-configured AutoTVM schedules
with autotvm.tophub.context(target):
net = __darknetffi__.dlopen(darknet_lib_path).load_network(
cfg_path.encode("utf-8"), weights_path.encode("utf-8"), 0
)
dshape = (env.BATCH, net.c, net.h, net.w)
dtype = "float32"
# Measure build start time
build_start = time.time()
# Start front end compilation
mod, params = relay.frontend.from_darknet(net, dtype=dtype, shape=dshape)
if target.device_name == "vta":
# Perform quantization in Relay
# Note: We set opt_level to 3 in order to fold batch norm
with tvm.transform.PassContext(opt_level=3):
with relay.quantize.qconfig(
global_scale=23.0,
skip_conv_layers=[0],
store_lowbit_output=True,
round_for_shift=True,
):
mod = relay.quantize.quantize(mod, params=params)
# Perform graph packing and constant folding for VTA target
mod = graph_pack(
mod["main"],
env.BATCH,
env.BLOCK_OUT,
env.WGT_WIDTH,
start_name=pack_dict[MODEL_NAME][0],
stop_name=pack_dict[MODEL_NAME][1],
start_name_idx=pack_dict[MODEL_NAME][2],
stop_name_idx=pack_dict[MODEL_NAME][3],
)
else:
mod = mod["main"]
# Compile Relay program with AlterOpLayout disabled
with vta.build_config(disabled_pass={"AlterOpLayout", "tir.CommonSubexprElimTIR"}):
lib = relay.build(
mod, target=tvm.target.Target(target, host=env.target_host), params=params
)
# Measure Relay build time
build_time = time.time() - build_start
print(MODEL_NAME + " inference graph built in {0:.2f}s!".format(build_time))
# Send the inference library over to the remote RPC server
temp = utils.tempdir()
lib.export_library(temp.relpath("graphlib.tar"))
remote.upload(temp.relpath("graphlib.tar"))
lib = remote.load_module("graphlib.tar")
# Graph executor
m = graph_executor.GraphModule(lib["default"](ctx))
####################################
# Perform image detection inference.
# ----------------------------------
# We run detect on an downloaded image
# Download test image
[neth, netw] = dshape[2:]
test_image = "person.jpg"
img_url = REPO_URL + "data/" + test_image + "?raw=true"
img_path = download_testdata(img_url, test_image, "data")
data = darknet.load_image(img_path, neth, netw).transpose(1, 2, 0)
# Prepare test image for inference
plt.imshow(data)
plt.show()
data = data.transpose((2, 0, 1))
data = data[np.newaxis, :]
data = np.repeat(data, env.BATCH, axis=0)
# Set the network parameters and inputs
m.set_input("data", data)
# Perform inference and gather execution statistics
# More on: :py:method:`tvm.runtime.Module.time_evaluator`
num = 4 # number of times we run module for a single measurement
rep = 3 # number of measurements (we derive std dev from this)
timer = m.module.time_evaluator("run", ctx, number=num, repeat=rep)
if env.TARGET in ["sim", "tsim"]:
simulator.clear_stats()
timer()
sim_stats = simulator.stats()
print("\nExecution statistics:")
for k, v in sim_stats.items():
# Since we execute the workload many times, we need to normalize stats
# Note that there is always one warm up run
# Therefore we divide the overall stats by (num * rep + 1)
print("\t{:<16}: {:>16}".format(k, v // (num * rep + 1)))
else:
tcost = timer()
std = np.std(tcost.results) * 1000
mean = tcost.mean * 1000
print("\nPerformed inference in %.2fms (std = %.2f) for %d samples" % (mean, std, env.BATCH))
print("Average per sample inference time: %.2fms" % (mean / env.BATCH))
# Get detection results from out
thresh = 0.5
nms_thresh = 0.45
tvm_out = []
for i in range(2):
layer_out = {}
layer_out["type"] = "Yolo"
# Get the yolo layer attributes (n, out_c, out_h, out_w, classes, total)
layer_attr = m.get_output(i * 4 + 3).numpy()
layer_out["biases"] = m.get_output(i * 4 + 2).numpy()
layer_out["mask"] = m.get_output(i * 4 + 1).numpy()
out_shape = (layer_attr[0], layer_attr[1] // layer_attr[0], layer_attr[2], layer_attr[3])
layer_out["output"] = m.get_output(i * 4).numpy().reshape(out_shape)
layer_out["classes"] = layer_attr[4]
tvm_out.append(layer_out)
thresh = 0.560
# Show detection results
img = darknet.load_image_color(img_path)
_, im_h, im_w = img.shape
dets = yolo_detection.fill_network_boxes((netw, neth), (im_w, im_h), thresh, 1, tvm_out)
last_layer = net.layers[net.n - 1]
yolo_detection.do_nms_sort(dets, last_layer.classes, nms_thresh)
yolo_detection.draw_detections(font_path, img, dets, thresh, names, last_layer.classes)
plt.imshow(img.transpose(1, 2, 0))
plt.show()
| https://github.com/zk-ml/tachikoma |
vta/tutorials/matrix_multiply.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
.. _basic-mat-mult:
Simple Matrix Multiply
======================
**Author**: `Thierry Moreau <https://homes.cs.washington.edu/~moreau/>`_
In this tutorial, we will build on top of the :ref:`vta-get-started` tutorial
and introduce additional concepts required to implement matrix multiplication
on VTA with the TVM workflow.
"""
######################################################################
# RPC Setup
# ---------
# We start by programming the Pynq's FPGA and building its RPC runtime
# as we did in the VTA introductory tutorial.
from __future__ import absolute_import, print_function
import os
import tvm
from tvm import te
import vta
import numpy as np
from tvm import rpc
from tvm.contrib import utils
from vta.testing import simulator
# Load VTA parameters from the 3rdparty/vta-hw/config/vta_config.json file
env = vta.get_env()
# We read the Pynq RPC host IP address and port number from the OS environment
host = os.environ.get("VTA_RPC_HOST", "192.168.2.99")
port = int(os.environ.get("VTA_RPC_PORT", "9091"))
# We configure both the bitstream and the runtime system on the Pynq
# to match the VTA configuration specified by the vta_config.json file.
if env.TARGET == "pynq" or env.TARGET == "de10nano":
# Make sure that TVM was compiled with RPC=1
assert tvm.runtime.enabled("rpc")
remote = rpc.connect(host, port)
# Reconfigure the JIT runtime
vta.reconfig_runtime(remote)
# Program the FPGA with a pre-compiled VTA bitstream.
# You can program the FPGA with your own custom bitstream
# by passing the path to the bitstream file instead of None.
vta.program_fpga(remote, bitstream=None)
# In simulation mode, host the RPC server locally.
elif env.TARGET in ["sim", "tsim"]:
remote = rpc.LocalSession()
######################################################################
# Computation Declaration
# -----------------------
# In this example we describe a simple matrix multiplication addition, which
# requires multiple computation stages, as shown in the dataflow diagram below.
# First we describe the input tensors :code:`A` and :code:`B` that are living
# in main memory.
# Second, we need to declare intermediate tensors :code:`A_buf` and
# :code:`B_buf`, which will live in VTA's on-chip buffers.
# Having this extra computational stage allows us to explicitly
# stage cached reads and writes.
# Third, we describe the matrix multiplication computation over
# :code:`A_buf` and :code:`B_buf` to produce the product matrix :code:`C_buf`.
# The last operation is a cast and copy back to DRAM, into results tensor
# :code:`C`.
#
# .. image:: https://raw.githubusercontent.com/uwsampl/web-data/main/vta/tutorial/gemm_dataflow.png
# :align: center
######################################################################
# Data Layout
# ~~~~~~~~~~~
# We describe the placeholder tensors :code:`A`, and :code:`B` in a tiled data
# format to match the data layout requirements imposed by the VTA tensor core.
######################################################################
# .. note::
#
# **Data Tiling**
#
# One source of complexity when targeting accelerators is to make sure
# that the data layout matches the layout imposed by the accelerator design.
# VTA is designed around a *tensor core* that performs, one matrix-matrix
# operation per cycle between an activation matrix and a weight matrix,
# adding the result matrix to an accumulator matrix, as shown in the
# figure below.
#
# .. image:: https://raw.githubusercontent.com/uwsampl/web-data/main/vta/tutorial/tensor_core.png
# :align: center
# :width: 480px
#
# The dimensions of that matrix-matrix multiplication are specified in
# the :code:`vta_config.json` configuration file.
# The activation matrix has a :code:`(BATCH, BLOCK_IN)` shape
# and the transposed weight matrix has a :code:`(BLOCK_OUT, BLOCK_IN)` shape,
# thus inferring that the resulting output matrix has a
# :code:`(BATCH, BLOCK_OUT)` shape.
# Consequently input and output tensors processed by VTA need to be
# tiled according to these aforementioned dimension.
#
# The diagram below shows the impact of data tiling on a matrix that is
# originally of shape (4, 8).
# Tiling by a (2, 2) tile shape ensures that data within each tile is
# contiguous.
# The resulting tiled tensor has a shape of (2, 4, 2, 2).
#
# .. image:: https://raw.githubusercontent.com/uwsampl/web-data/main/vta/tutorial/data_tiling.png
# :align: center
# :width: 480px
#
# We first define the variables :code:`m`, :code:`n`, :code:`o` to represent
# the shape of the matrix multiplication. These variables are multiplicative
# factors over the :code:`BLOCK_OUT`, :code:`BLOCK_IN`, and :code:`BATCH`
# tensor dimensions respectively.
# By default, the configuration file sets :code:`BATCH`, :code:`BLOCK_IN`, and
# :code:`BLOCK_OUT` to be 1, 16 and 16 respectively (:code:`BATCH` being set to
# 1 implies that our compute building block is vector-matrix multiply).
#
######################################################################
# .. note::
#
# **Data Types**
#
# It's important to not only match the inner-tile
# dimension of VTA's tensor core, but also to match the specific data types
# expected by VTA.
# VTA for now only supports fixed point data types, which integer width is
# specified in the :code:`vta_config.json` file by :code:`INP_WIDTH` and
# :code:`WGT_WIDTH` for the activations and weights data types respectively.
# In addition, the accumulator data type integer width is specified by
# :code:`ACC_WIDTH`.
#
# By default, the configuration file sets :code:`INP_WIDTH`
# and :code:`WGT_WIDTH` to 8.
# The accumulator width :code:`ACC_WIDTH` is set to 32, in order to avoid
# overflow during accumulation.
# As a result, :code:`env.inp_dtype` and :code:`env.wgt_dtype` are all
# narrow 8-bit integers, while :code:`env.acc_dtype` is a standard 32-bit
# integer.
# Output channel factor m - total 16x16=256 output channels
m = 16
# Input channel factor n - total 16x16=256 input channels
n = 16
# Batch factor o (we use single batch inference)
o = 1
# A placeholder tensor in tiled data format
A = te.placeholder((o, n, env.BATCH, env.BLOCK_IN), name="A", dtype=env.inp_dtype)
# B placeholder tensor in tiled data format
B = te.placeholder((m, n, env.BLOCK_OUT, env.BLOCK_IN), name="B", dtype=env.wgt_dtype)
# A copy buffer
A_buf = te.compute((o, n, env.BATCH, env.BLOCK_IN), lambda *i: A(*i), "A_buf")
# B copy buffer
B_buf = te.compute((m, n, env.BLOCK_OUT, env.BLOCK_IN), lambda *i: B(*i), "B_buf")
######################################################################
# Matrix Multiplication
# ~~~~~~~~~~~~~~~~~~~~~
# Now we're ready to describe the matrix multiplication result tensor :code:`C`,
# with another compute operation.
# The compute function takes the shape of the tensor, as well as a lambda
# function that describes the computation rule for each position of the tensor.
#
# In order to implement matrix multiplication, the lambda function needs to
# include a reduction formula over the input channel dimension axes.
# To create a reduction formula, we can declare a reduction axis using
# :code:`te.reduce_axis`, which takes in the range of reductions.
# :code:`te.sum` takes in the expression to be reduced as well as
# the reduction axes to compute the sum of value over all k in the declared
# ranges.
#
# Note that the reduction needs to be performed over 32-bit :code:`env.acc_dtype`
# accumulator data types.
#
# No computation happens during this phase, as we are only declaring how
# the computation should be done.
# Outer input feature reduction axis
ko = te.reduce_axis((0, n), name="ko")
# Inner input feature reduction axis
ki = te.reduce_axis((0, env.BLOCK_IN), name="ki")
# Describe the in-VTA matrix multiplication
C_buf = te.compute(
(o, m, env.BATCH, env.BLOCK_OUT),
lambda bo, co, bi, ci: te.sum(
A_buf[bo, ko, bi, ki].astype(env.acc_dtype) * B_buf[co, ko, ci, ki].astype(env.acc_dtype),
axis=[ko, ki],
),
name="C_buf",
)
######################################################################
# Casting the Results
# ~~~~~~~~~~~~~~~~~~~
# After the computation is done, we'll need to send the results computed by VTA
# back to main memory.
######################################################################
# .. note::
#
# **Memory Store Restrictions**
#
# One specificity of VTA is that it only supports DRAM stores in the narrow
# :code:`env.inp_dtype` data type format.
# This lets us reduce the data footprint for memory transfers, but also lets
# us quantize the wide accumulator data type down to a data format that
# matches the input activation data type.
# This means that in the context of neural network inference, the outputs
# of a given layer after activation can be consumed directly by the next
# layer.
#
# We perform one last typecast operation to the narrow
# input activation data format.
# Cast to output type, and send to main memory
C = te.compute(
(o, m, env.BATCH, env.BLOCK_OUT), lambda *i: C_buf(*i).astype(env.inp_dtype), name="C"
)
######################################################################
# This concludes the computation declaration part of this tutorial.
######################################################################
# Scheduling the Computation
# --------------------------
# While the above lines describes the computation rule, we can obtain
# :code:`C` in many ways.
# TVM asks the user to provide an implementation of the computation called
# *schedule*.
#
# A schedule is a set of transformations to an original computation that
# transforms the implementation of the computation without affecting
# correctness.
# This simple VTA programming tutorial aims to demonstrate basic schedule
# transformations that will map the original schedule down to VTA hardware
# primitives.
######################################################################
# Default Schedule
# ~~~~~~~~~~~~~~~~
# After we construct the schedule, by default the schedule computes
# :code:`C` in the following way:
# Let's take a look at the generated schedule
s = te.create_schedule(C.op)
print(tvm.lower(s, [A, B, C], simple_mode=True))
######################################################################
# Although this schedule makes sense, it won't compile to VTA.
# In order to obtain correct code generation, we need to apply scheduling
# primitives and code annotation that will transform the schedule into
# one that can be directly lowered onto VTA hardware intrinsics.
# Those include:
#
# - DMA copy operations which will take globally-scoped tensors and copy
# those into locally-scoped tensors.
# - Tensor operations that will perform the matrix multiplication.
######################################################################
# Buffer Scopes
# ~~~~~~~~~~~~~
# First, we set the scope of the buffers to tell TVM that these buffers
# will be living in the VTA's on-chip SRAM caches.
# Below, we tell TVM that :code:`A_buf`, :code:`B_buf`, :code:`C_buf`
# will respectively live in VTA's on-chip input, weight and accumulator
# memory.
######################################################################
# .. note::
#
# **VTA's On-Chip SRAMs**
#
# VTA has three different memory scopes, each corresponding to different
# on-chip SRAM buffers.
#
# - :code:`env.inp_scope`: Input buffer, which is a read-only SRAM buffer
# that stores input matrices of shape :code:`(env.BATCH, env.BLOCK_IN)`
# of type :code:`env.inp_dtype`. The input buffer contains
# `2 ^ LOG_INP_BUFF_SIZE` matrix elements (as specified in the
# :code:`vta_config.json` file).
# - :code:`env.wgt_scope`: Weight buffer, which is a read-only SRAM buffer
# that stores weight matrices of shape :code:`(env.BLOCK_OUT, env.BLOCK_IN)`
# of type :code:`env.wgt_dtype`. The weight buffer contains
# `2 ^ LOG_WGT_BUFF_SIZE` matrix elements.
# - :code:`env.acc_scope`: Accumulator buffer, which is a read/write SRAM
# buffer that stores accumulator matrices of shape
# :code:`(env.BATCH, env.BLOCK_OUT)` of type :code:`env.acc_dtype`.
# The accumulator buffer is VTA's general purpose register file: it holds
# both intermediate results of convolutions and matrix multiplications
# as well as intermediate results of pooling, batch normalization, and
# activation layers. The accumulator buffer contains
# `2 ^ LOG_ACC_BUFF_SIZE` matrix elements.
# Set the intermediate tensor's scope to VTA's on-chip buffers
s[A_buf].set_scope(env.inp_scope)
s[B_buf].set_scope(env.wgt_scope)
s[C_buf].set_scope(env.acc_scope)
######################################################################
# DMA Transfers
# ~~~~~~~~~~~~~
# We need to schedule DMA transfers to move data living in DRAM to
# and from the VTA on-chip buffers.
# This can be achieved using the :code:`compute_at` schedule primitive
# which nests the copying of the buffers into the computation loop
# that performs the matrix multiplication.
#
# We insert :code:`dma_copy` pragmas to indicate to the compiler
# that the copy operations will be performed in bulk via DMA,
# which is common in hardware accelerators.
# Finally, we print the temporary schedule to observe the effects of
# moving the copy operations into the matrix multiplication loop.
# Move buffer copy into matrix multiply loop
s[A_buf].compute_at(s[C_buf], ko)
s[B_buf].compute_at(s[C_buf], ko)
# Tag the buffer copies with the DMA pragma to insert a DMA transfer
s[A_buf].pragma(s[A_buf].op.axis[0], env.dma_copy)
s[B_buf].pragma(s[B_buf].op.axis[0], env.dma_copy)
s[C].pragma(s[C].op.axis[0], env.dma_copy)
# Let's take a look at the transformed schedule
print(tvm.lower(s, [A, B, C], simple_mode=True))
######################################################################
# Tensorization
# ~~~~~~~~~~~~~
# The last step of the schedule transformation consists in applying
# *tensorization* to our schedule.
# Tensorization is analogous to vectorization, but extends the concept
# to a higher-dimensional unit of computation.
# Consequently, tensorization imposes data layout constraints as discussed
# earlier when declaring the data layout input placeholders.
# We've already arranged our tensors in a tiled format, so the next thing
# we need to perform is loop reordering to accommodate for tensorization.
#
# Here we choose to move the outermost reduction axis all the way out.
# This dictates that we first iterate over input channels, then batch
# dimensions, and finally output channels.
# Lastly, we apply the tensorization scheduling primitive :code:`tensorize`
# along the outer axis of the inner-most matrix matrix multiplication tensor
# block.
# We print the finalized schedule that is ready for code-generation
# by the VTA runtime JIT compiler.
s[C_buf].reorder(
ko, s[C_buf].op.axis[0], s[C_buf].op.axis[1], s[C_buf].op.axis[2], s[C_buf].op.axis[3], ki
)
s[C_buf].tensorize(s[C_buf].op.axis[2], env.gemm)
# Let's take a look at the finalized schedule
print(vta.lower(s, [A, B, C], simple_mode=True))
######################################################################
# This concludes the scheduling portion of this tutorial.
######################################################################
# TVM Compilation
# ---------------
# After we have finished specifying the schedule, we can compile it
# into a TVM function.
# Build GEMM VTA kernel
my_gemm = vta.build(
s, [A, B, C], tvm.target.Target("ext_dev", host=env.target_host), name="my_gemm"
)
# Write the compiled module into an object file.
temp = utils.tempdir()
my_gemm.save(temp.relpath("gemm.o"))
# Send the executable over RPC
remote.upload(temp.relpath("gemm.o"))
# Load the compiled module
f = remote.load_module("gemm.o")
######################################################################
# Running the Function
# --------------------
# The compiled TVM function uses a concise C API and can be invoked from
# code language.
#
# TVM provides an array API in python to aid quick testing and prototyping.
# The array API is based on `DLPack <https://github.com/dmlc/dlpack>`_ standard.
#
# - We first create a remote context (for remote execution on the Pynq).
# - Then :code:`tvm.nd.array` formats the data accordingly.
# - :code:`f()` runs the actual computation.
# - :code:`numpy()` copies the result array back in a format that can be
# interpreted.
#
# Get the remote device context
ctx = remote.ext_dev(0)
# Initialize the A and B arrays randomly in the int range of (-128, 128]
A_orig = np.random.randint(-128, 128, size=(o * env.BATCH, n * env.BLOCK_IN)).astype(A.dtype)
B_orig = np.random.randint(-128, 128, size=(m * env.BLOCK_OUT, n * env.BLOCK_IN)).astype(B.dtype)
# Apply packing to the A and B arrays from a 2D to a 4D packed layout
A_packed = A_orig.reshape(o, env.BATCH, n, env.BLOCK_IN).transpose((0, 2, 1, 3))
B_packed = B_orig.reshape(m, env.BLOCK_OUT, n, env.BLOCK_IN).transpose((0, 2, 1, 3))
# Format the input/output arrays with tvm.nd.array to the DLPack standard
A_nd = tvm.nd.array(A_packed, ctx)
B_nd = tvm.nd.array(B_packed, ctx)
C_nd = tvm.nd.array(np.zeros((o, m, env.BATCH, env.BLOCK_OUT)).astype(C.dtype), ctx)
# Clear stats
if env.TARGET in ["sim", "tsim"]:
simulator.clear_stats()
# Invoke the module to perform the computation
f(A_nd, B_nd, C_nd)
######################################################################
# Verifying Correctness
# ---------------------
# Compute the reference result with numpy and assert that the output of the
# matrix multiplication indeed is correct
# Compute reference result with numpy
C_ref = np.dot(A_orig.astype(env.acc_dtype), B_orig.T.astype(env.acc_dtype)).astype(C.dtype)
C_ref = C_ref.reshape(o, env.BATCH, m, env.BLOCK_OUT).transpose((0, 2, 1, 3))
np.testing.assert_equal(C_ref, C_nd.numpy())
# Print stats
if env.TARGET in ["sim", "tsim"]:
sim_stats = simulator.stats()
print("Execution statistics:")
for k, v in sim_stats.items():
print("\t{:<16}: {:>16}".format(k, v))
print("Successful matrix multiply test!")
######################################################################
# Summary
# -------
# This tutorial showcases the TVM workflow to implement a simple matrix
# multiplication example on VTA.
# The general workflow includes:
#
# - Programming the FPGA with the VTA bitstream over RPC.
# - Describing matrix multiplication via a series of computations.
# - Describing how we want to perform the computation using schedule primitives.
# - Compiling the function to the VTA target.
# - Running the compiled module and verifying it against a numpy implementation.
#
| https://github.com/zk-ml/tachikoma |
vta/tutorials/optimize/convolution_opt.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
2D Convolution Optimization
===========================
**Author**: `Thierry Moreau <https://homes.cs.washington.edu/~moreau/>`_
This tutorial provides an overview on how to use TVM to map a 2D convolution
workload efficiently on the VTA design.
We recommend covering the :ref:`vta-mat-mult-opt` tutorial first.
2D convolution is dominant in most computer vision deep neural networks.
In this tutorial, we will demonstrate TVM schedule optimizations to map
2D convolution operators in NCHW layout onto VTA.
We also introduce the notion of latency hiding, which allows us to
maximize VTA's compute and memory resource utilization.
"""
######################################################################
# RPC Setup
# ---------
# We start by programming the Pynq's FPGA and building its RPC runtime.
from __future__ import absolute_import, print_function
import os
import tvm
import tvm.testing
from tvm import te
import vta
import numpy as np
from tvm import rpc
from tvm.contrib import utils
from vta.testing import simulator
# Load VTA parameters from the 3rdparty/vta-hw/config/vta_config.json file
env = vta.get_env()
# We read the Pynq RPC host IP address and port number from the OS environment
host = os.environ.get("VTA_RPC_HOST", "192.168.2.99")
port = int(os.environ.get("VTA_RPC_PORT", "9091"))
# We configure both the bitstream and the runtime system on the Pynq
# to match the VTA configuration specified by the vta_config.json file.
if env.TARGET == "pynq":
# Make sure that TVM was compiled with RPC=1
assert tvm.runtime.enabled("rpc")
remote = rpc.connect(host, port)
# Reconfigure the JIT runtime
vta.reconfig_runtime(remote)
# Program the FPGA with a pre-compiled VTA bitstream.
# You can program the FPGA with your own custom bitstream
# by passing the path to the bitstream file instead of None.
vta.program_fpga(remote, bitstream=None)
# In simulation mode, host the RPC server locally.
elif env.TARGET in ["sim", "tsim"]:
remote = rpc.LocalSession()
######################################################################
# Computation Declaration
# -----------------------
# As a first step, we need to describe our 2D convolution computation
# in NCHW format.
#
# We define the 2D convolution shape by the batch size,
# spatial dimensions, input channels, output channels, kernel dimensions,
# kernel dimensions, padding dimensions, and stride dimensions.
#
# We pick the shape of the 9th convolutional layer of the ResNet-18
# architecture as our convolution workload parameters.
#
# We've added extra operators to the 2D convolution that apply
# shifting and clipping to the output in order to mimic a fixed-point
# convolution followed by a rectified linear activation.
# We describe the TVM dataflow graph of the 2D convolution layer below:
#
# .. image:: https://raw.githubusercontent.com/uwsampl/web-data/main/vta/tutorial/conv2d_dataflow.png
# :align: center
#
# This computation is intentionally too large to fit onto VTA's on-chip
# buffers all at once. Therefore in the scheduling phase we'll
# rely on computation blocking strategies to break the computation down into
# manageable chunks.
#
# .. note::
#
# *Spatial padding*
#
# Note that we'll need to import the TOPI library to apply spatial padding
# on the input feature map tensor.
# Spatial padding facilitates blocking in the context of 2D convolutions
# due to the fact that the same (x, y) spatial location of the input
# feature map of any given layer is read more than once if the convolution
# kernel window size is greater than one.
# On CPUs, and GPUs, one way to increase efficiency of memory accesses
# when parallelizing work is spatial packing, which requires data re-layout.
# VTA load DMA engine can insert padding automatically so that the original
# input feature map does not have to be re-packed in memory.
#
# We show the effect of VTA's on the fly spatial padding when data is being
# loaded from DRAM into VTA's SRAM, following a 2D strided and padded memory
# read.
#
# .. image:: https://raw.githubusercontent.com/uwsampl/web-data/main/vta/tutorial/padding.png
# :align: center
# :width: 480px
from tvm import topi
# 2D convolution layer dimensions taken from ResNet-18 architecture
# (9th convolutional layer)
batch_size = 1
height = 14
width = 14
in_channels = 256
out_channels = 256
kernel_h = 3
kernel_w = 3
pad_h = 1
pad_w = 1
stride_h = 1
stride_w = 1
assert batch_size % env.BATCH == 0
assert in_channels % env.BLOCK_IN == 0
assert out_channels % env.BLOCK_OUT == 0
# Input feature map: (N, IC, H, W, n, ic)
data_shape = (
batch_size // env.BATCH,
in_channels // env.BLOCK_IN,
height,
width,
env.BATCH,
env.BLOCK_IN,
)
# Kernel: (OC, IC, H, W, oc, ic)
kernel_shape = (
out_channels // env.BLOCK_OUT,
in_channels // env.BLOCK_IN,
kernel_h,
kernel_w,
env.BLOCK_OUT,
env.BLOCK_IN,
)
# Derive output feature map dimensions
fout_height = (height + 2 * pad_h - kernel_h) // stride_h + 1
fout_width = (width + 2 * pad_w - kernel_w) // stride_w + 1
# Output feature map: (N, OC, H, W, n, oc)
output_shape = (
batch_size // env.BATCH,
out_channels // env.BLOCK_OUT,
fout_height,
fout_width,
env.BATCH,
env.BLOCK_OUT,
)
# Convolution reduction axes
dy = te.reduce_axis((0, kernel_h), name="dy")
dx = te.reduce_axis((0, kernel_w), name="dx")
ic = te.reduce_axis((0, in_channels // env.BLOCK_IN), name="ic")
ic_tns = te.reduce_axis((0, env.BLOCK_IN), name="ic_tns")
# Input placeholder tensors
data = te.placeholder(data_shape, name="data", dtype=env.inp_dtype)
kernel = te.placeholder(kernel_shape, name="kernel", dtype=env.wgt_dtype)
# Copy buffers:
# Apply spatial padding to input feature map
data_buf = topi.nn.pad(data, [0, 0, pad_h, pad_w, 0, 0], name="data_buf")
kernel_buf = te.compute(kernel_shape, lambda *i: kernel(*i), "kernel_buf")
# Declare 2D convolution
res_conv = te.compute(
output_shape,
lambda bo, co, i, j, bi, ci: te.sum(
data_buf[bo, ic, i * stride_h + dy, j * stride_w + dx, bi, ic_tns].astype(env.acc_dtype)
* kernel_buf[co, ic, dy, dx, ci, ic_tns].astype(env.acc_dtype),
axis=[ic, dy, dx, ic_tns],
),
name="res_conv",
)
# Add shift stage for fix-point normalization
res_shr = te.compute(output_shape, lambda *i: res_conv(*i) >> 8, name="res_shr")
# Apply clipping between (0, input max value)
inp_max = (1 << (env.INP_WIDTH - 1)) - 1
res_max = te.compute(output_shape, lambda *i: tvm.te.max(res_shr(*i), 0), "res_max")
res_min = te.compute(output_shape, lambda *i: tvm.te.min(res_max(*i), inp_max), "res_min")
# Result Tensor
res = te.compute(output_shape, lambda *i: res_min(*i).astype(env.inp_dtype), name="res")
######################################################################
# Scheduling the Computation
# --------------------------
# We'll look at a set of schedule transformations necessary to map the
# 2D convolution onto VTA in an efficient fashion.
# Those include:
#
# - Computation blocking
# - Virtual threading to increase compute utilization
# - Lowering to VTA hardware intrinsics
# Create TVM schedule
s = te.create_schedule(res.op)
# Let's look at the default TVM schedule
print(tvm.lower(s, [data, kernel, res], simple_mode=True))
######################################################################
# Blocking the Computation
# ~~~~~~~~~~~~~~~~~~~~~~~~
# The 2D convolution is by default too large for activations or kernel weights
# to fit on VTA's on-chip buffers all at once.
# We apply blocking along input channels, output channels, and along
# the height spatial dimensions.
# We don't apply blocking along the width spatial dimension since it's
# the innermost dimension in the NCHW layout (and consequently to increase
# locality, it's best not to block along the innermost dimension).
# Let's define tiling sizes
b_block = 1 // env.BATCH
oc_block = 128 // env.BLOCK_OUT
ic_block = 16 // env.BLOCK_IN
h_block = 7
w_block = 14
# Tile the output tensor along the spatial and output channel dimensions
# (since by default we are doing single batch inference, the split along
# the batch dimension has no effect)
b, oc, y, x, b_tns, oc_tns = s[res].op.axis
b_out, b_inn = s[res].split(b, factor=b_block)
oc_out, oc_inn = s[res].split(oc, factor=oc_block)
y_out, y_inn = s[res].split(y, factor=h_block)
x_out, x_inn = s[res].split(x, factor=w_block)
s[res].reorder(b_out, oc_out, y_out, x_out, b_inn, oc_inn, y_inn, x_inn, b_tns, oc_tns)
# Move intermediate computation into each output compute tile
s[res_conv].compute_at(s[res], x_out)
s[res_shr].compute_at(s[res], x_out)
s[res_max].compute_at(s[res], x_out)
s[res_min].compute_at(s[res], x_out)
# Apply additional loop split along reduction axis (input channel)
b_inn, oc_inn, y_inn, x_inn, b_tns, oc_tns = s[res_conv].op.axis
ic_out, ic_inn = s[res_conv].split(ic, factor=ic_block)
# Reorder axes.
# 1) Group the VTA tensor axes in the inner most position: b_tns, oc_tns, ic_tns
# to allow TVM to tensorize.
# 2) We move the ic_out axis all the way out of the convolution loop to block
# along the reduction axis.
# 3) Now we re-order the block axes: b_inn, oc_inn, y_inn, x_inn, ic_inn, dy, dx.
# VTA runtime/hardware requires us to write to a different output feature map
# location for every VTA tensor operation.
# This restriction requires us to order one of oc_inn, y_inn or x_inn right
# before b_tns, since they all affect output feature map indexing.
# Therefore, we choose to bring x_inn inside as shown below.
s[res_conv].reorder(ic_out, b_inn, oc_inn, y_inn, ic_inn, dy, dx, x_inn, b_tns, oc_tns, ic_tns)
######################################################################
# Virtual Threading
# ~~~~~~~~~~~~~~~~~
# Virtual threading is a mechanism that increases task-level pipeline
# parallelism in the VTA hardware design.
# Put it another way, it increases compute resource utilization by hiding
# memory access latency.
#
# In the implementation below, virtual threading distributes work across two
# threads split along the output channel axis.
# We show how work is split when computing the 2D convolution in the figure
# below.
#
# .. image:: https://raw.githubusercontent.com/uwsampl/web-data/main/vta/tutorial/virtual_threading.png
# :align: center
# :width: 480px
# VTA only supports 2 virtual threads
v_threads = 2
# Perform virtual thread split along output channel outer axis
_, tx = s[res].split(oc_out, factor=v_threads)
s[res].reorder(tx, b_out)
s[res].bind(tx, te.thread_axis("cthread"))
# Let's look at the current TVM schedule after blocking and virtual threading
print(tvm.lower(s, [data, kernel, res], simple_mode=True))
######################################################################
# Lowering Copies to DMA Transfers
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Next we set the buffer scopes to the corresponding on-chip VTA SRAM buffers.
# We move the load loops into the 2D convolution computation loop to stage
# memory loads such that they fit in the on-chip SRAM buffers.
# Finally we annotate the load/store loop outer axes with the DMA copy pragma
# to perform bulk memory transfers on VTA.
# Set scope of SRAM buffers
s[data_buf].set_scope(env.inp_scope)
s[kernel_buf].set_scope(env.wgt_scope)
s[res_conv].set_scope(env.acc_scope)
s[res_shr].set_scope(env.acc_scope)
s[res_min].set_scope(env.acc_scope)
s[res_max].set_scope(env.acc_scope)
# Block data and kernel cache reads
s[data_buf].compute_at(s[res_conv], ic_out)
s[kernel_buf].compute_at(s[res_conv], ic_out)
# Use DMA copy pragma on DRAM->SRAM operations
s[data_buf].pragma(s[data_buf].op.axis[0], env.dma_copy)
s[kernel_buf].pragma(s[kernel_buf].op.axis[0], env.dma_copy)
# Use DMA copy pragma on SRAM->DRAM operation in each result block
# (this implies that these copies should be performed along b_inn,
# or result axis 4)
s[res].pragma(s[res].op.axis[4], env.dma_copy)
######################################################################
# Lowering Computation to VTA Compute Intrinsics
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# The last phase is to lower the computation loops down to VTA hardware
# intrinsics by mapping the 2D convolution to tensor intrinsics,
# and mapping the shift, and clipping computation to the vector ALU.
# Apply tensorization over the batch tensor tile axis
s[res_conv].tensorize(b_tns, env.gemm)
# Add an ALU pragma over the shift and clipping operations
s[res_shr].pragma(s[res_shr].op.axis[0], env.alu)
s[res_min].pragma(s[res_min].op.axis[0], env.alu)
s[res_max].pragma(s[res_max].op.axis[0], env.alu)
# Let's look at the final lowered TVM schedule after lowering memory
# loads/stores down to DMA copy intrinsics, and the computation down to
# VTA compute intrinsics.
print(vta.lower(s, [data, kernel, res], simple_mode=True))
######################################################################
# TVM Compilation and Verification
# --------------------------------
# After specifying the schedule, we can compile it into a TVM function.
# We save the module so we can send it over RPC.
# We run the function and verify it against a numpy implementation to
# ensure correctness.
# This library facilitates 2D convolution testing
from tvm.topi.testing import conv2d_nchw_python
# Compile the TVM module
with vta.build_config(disabled_pass={"tir.CommonSubexprElimTIR"}):
my_conv = vta.build(
s, [data, kernel, res], tvm.target.Target("ext_dev", host=env.target_host), name="my_conv"
)
temp = utils.tempdir()
my_conv.save(temp.relpath("conv2d.o"))
remote.upload(temp.relpath("conv2d.o"))
f = remote.load_module("conv2d.o")
# Get the remote device context
ctx = remote.ext_dev(0)
# Initialize the data and kernel arrays randomly in the int range
# of (-128, 128] in NCHW layout
data_np = np.random.randint(-128, 128, size=(batch_size, in_channels, height, width)).astype(
data.dtype
)
kernel_np = np.random.randint(
-128, 128, size=(out_channels, in_channels, kernel_h, kernel_w)
).astype(kernel.dtype)
# Apply packing to the data and kernel arrays from a 2D NCHW
# to a 4D NCHWnc packed layout
data_packed = data_np.reshape(
batch_size // env.BATCH, env.BATCH, in_channels // env.BLOCK_IN, env.BLOCK_IN, height, width
).transpose((0, 2, 4, 5, 1, 3))
kernel_packed = kernel_np.reshape(
out_channels // env.BLOCK_OUT,
env.BLOCK_OUT,
in_channels // env.BLOCK_IN,
env.BLOCK_IN,
kernel_h,
kernel_w,
).transpose((0, 2, 4, 5, 1, 3))
# Format the input/output arrays with tvm.nd.array to the DLPack standard
data_nd = tvm.nd.array(data_packed, ctx)
kernel_nd = tvm.nd.array(kernel_packed, ctx)
res_nd = tvm.nd.array(np.zeros(output_shape).astype(res.dtype), ctx)
# Clear stats
if env.TARGET in ["sim", "tsim"]:
simulator.clear_stats()
# Invoke the module to perform the computation
f(data_nd, kernel_nd, res_nd)
# Verify against numpy implementation
res_ref = conv2d_nchw_python(
data_np.astype(env.acc_dtype),
kernel_np.astype(env.acc_dtype),
(stride_h, stride_w),
(pad_h, pad_w),
).astype(env.acc_dtype)
res_ref = res_ref >> env.INP_WIDTH
res_ref = np.clip(res_ref, 0, inp_max)
res_ref = res_ref.astype(res.dtype)
res_ref = res_ref.reshape(
(
batch_size // env.BATCH,
env.BATCH,
out_channels // env.BLOCK_OUT,
env.BLOCK_OUT,
fout_height,
fout_width,
)
).transpose((0, 2, 4, 5, 1, 3))
tvm.testing.assert_allclose(res_ref, res_nd.numpy())
# Print stats
if env.TARGET in ["sim", "tsim"]:
sim_stats = simulator.stats()
print("Execution statistics:")
for k, v in sim_stats.items():
print("\t{:<16}: {:>16}".format(k, v))
print("Successful 2D convolution test!")
######################################################################
# Summary
# -------
# This tutorial demonstrates how TVM scheduling primitives can be used to
# lower 2D convolution onto hardware accelerator intrinsics, making
# use of hardware specific optimizations, such as latency hiding with
# virtual threading.
#
| https://github.com/zk-ml/tachikoma |
vta/tutorials/optimize/matrix_multiply_opt.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
.. _vta-mat-mult-opt:
Matrix Multiply Blocking
========================
**Author**: `Thierry Moreau <https://homes.cs.washington.edu/~moreau/>`_
This tutorial provides an overview on how to use TVM to map matrix
multiplication efficiently on the VTA design.
We recommend covering the :ref:`basic-mat-mult` tutorial first.
In this tutorial, we will demonstrate TVM schedule optimizations to break large
neural network operators down onto smaller blocks to achieve computation within
limited hardware accelerator resources.
"""
######################################################################
# RPC Setup
# ---------
# We start by programming the Pynq's FPGA and building its RPC runtime.
from __future__ import absolute_import, print_function
import os
import tvm
from tvm import te
import vta
import numpy as np
from tvm import rpc
from tvm.contrib import utils
from vta.testing import simulator
# Load VTA parameters from the 3rdparty/vta-hw/config/vta_config.json file
env = vta.get_env()
# We read the Pynq RPC host IP address and port number from the OS environment
host = os.environ.get("VTA_RPC_HOST", "192.168.2.99")
port = int(os.environ.get("VTA_RPC_PORT", "9091"))
# We configure both the bitstream and the runtime system on the Pynq
# to match the VTA configuration specified by the vta_config.json file.
if env.TARGET == "pynq":
# Make sure that TVM was compiled with RPC=1
assert tvm.runtime.enabled("rpc")
remote = rpc.connect(host, port)
# Reconfigure the JIT runtime
vta.reconfig_runtime(remote)
# Program the FPGA with a pre-compiled VTA bitstream.
# You can program the FPGA with your own custom bitstream
# by passing the path to the bitstream file instead of None.
vta.program_fpga(remote, bitstream=None)
# In simulation mode, host the RPC server locally.
elif env.TARGET in ["sim", "tsim"]:
remote = rpc.LocalSession()
######################################################################
# Computation Declaration
# -----------------------
# As a first step, we need to describe our matrix multiplication computation.
# We define the matrix multiplication as the computation one would find in a
# fully connected layer, defined by its batch size, input channels, and output
# channels.
# These have to be integer multiples of the VTA tensor shape:
# :code:`BATCH`, :code:`BLOCK_IN`, and :code:`BLOCK_OUT` respectively.
#
# We've added extra operators to the matrix multiplication that apply
# shifting and clipping to the output in order to mimic a fixed-point
# matrix multiplication followed by a rectified linear activation.
# We describe the TVM dataflow graph of the fully connected layer below:
#
# .. image:: https://raw.githubusercontent.com/uwsampl/web-data/main/vta/tutorial/fc_dataflow.png
# :align: center
#
# This computation is intentionally too large to fit onto VTA's on-chip
# buffers all at once. Therefore in the scheduling phase we'll
# rely on computation blocking strategies to break the computation down into
# manageable chunks.
# Fully connected layer dimensions: 1024 x 1024
batch_size = 1
in_channels = 1024
out_channels = 1024
assert batch_size % env.BATCH == 0
assert in_channels % env.BLOCK_IN == 0
assert out_channels % env.BLOCK_OUT == 0
# Let's derive the tiled input tensor shapes
data_shape = (batch_size // env.BATCH, in_channels // env.BLOCK_IN, env.BATCH, env.BLOCK_IN)
weight_shape = (
out_channels // env.BLOCK_OUT,
in_channels // env.BLOCK_IN,
env.BLOCK_OUT,
env.BLOCK_IN,
)
output_shape = (batch_size // env.BATCH, out_channels // env.BLOCK_OUT, env.BATCH, env.BLOCK_OUT)
num_ops = in_channels * out_channels * batch_size * 2
# Reduction axes
ic = te.reduce_axis((0, in_channels // env.BLOCK_IN), name="ic")
ic_tns = te.reduce_axis((0, env.BLOCK_IN), name="ic_tns")
# Input placeholder tensors
data = te.placeholder(data_shape, name="data", dtype=env.inp_dtype)
weight = te.placeholder(weight_shape, name="weight", dtype=env.wgt_dtype)
# Copy buffers
data_buf = te.compute(data_shape, lambda *i: data(*i), "data_buf")
weight_buf = te.compute(weight_shape, lambda *i: weight(*i), "weight_buf")
# Declare matrix multiply computation
res_gemm = te.compute(
output_shape,
lambda bo, co, bi, ci: te.sum(
data_buf[bo, ic, bi, ic_tns].astype(env.acc_dtype)
* weight_buf[co, ic, ci, ic_tns].astype(env.acc_dtype),
axis=[ic, ic_tns],
),
name="res_gem",
)
# Add shift stage for fix-point normalization
res_shr = te.compute(output_shape, lambda *i: res_gemm(*i) >> env.INP_WIDTH, name="res_shr")
# Apply clipping between (0, input max value)
inp_max = (1 << (env.INP_WIDTH - 1)) - 1
res_max = te.compute(output_shape, lambda *i: tvm.te.max(res_shr(*i), 0), "res_max")
res_min = te.compute(output_shape, lambda *i: tvm.te.min(res_max(*i), inp_max), "res_min")
# Apply typecast to input data type before sending results back
res = te.compute(output_shape, lambda *i: res_min(*i).astype(env.inp_dtype), name="res")
######################################################################
# Scheduling the Computation
# --------------------------
# We'll look at a set of schedule transformations necessary to map the
# matrix multiplications onto VTA in an efficient fashion.
# Those include:
#
# - Computation blocking
# - Lowering to VTA hardware intrinsics
# Create TVM schedule
s = te.create_schedule(res.op)
# Let's look at the default TVM schedule
print(tvm.lower(s, [data, weight, res], simple_mode=True))
######################################################################
# Blocking the Computation
# ~~~~~~~~~~~~~~~~~~~~~~~~
# The matrix multiplication is by default too large for activations or weights
# to fit on VTA's on-chip buffers all at once.
# We block the (1, 1024) by (1024, 1024) matrix multiplication into
# smaller (1, 256) by (256, 256) matrix multiplications so the intermediate
# tensors can fit on the accelerator's on-chip SRAM.
# This approach is similar to blocking techniques applied to CPUs and GPUs in
# order to increase cache hit rate.
#
# We perform blocking along each axes (the batch axis being untouched since
# we are performing singe-batch inference).
# We also leave the inner-most tensorization axes as-is in order to allow
# TVM to pattern-match tensorization.
# We show the outcome of blocking on the computation schedule in the diagram
# below:
#
# .. image:: https://raw.githubusercontent.com/uwsampl/web-data/main/vta/tutorial/blocking.png
# :align: center
# :width: 480px
#
# .. note::
#
# The code after loop splitting and reordering is equivalent to the following
# pseudo-code. We ignore the batch axis since we are only performing single-batch
# inference in this example:
#
# .. code-block:: c
#
# for (int oc_out = 0; oc_out < 4; ++oc_out) {
# // Initialization loop
# for (int oc_inn = 0; oc_inn < 16; ++oc_inn) {
# for (int oc_tns = 0; oc_tns < 16; ++oc_tns) {
# int j = (oc_out * 16 + oc_inn) * 16 + oc_tns;
# C[0][j] = 0;
# }
# }
# for (int ic_out = 0; ic_out < 4; ++ic_out) {
# // Block loop
# for (int oc_inn = 0; oc_inn < 16; ++oc_inn) {
# for (int ic_inn = 0; ic_inn < 16; ++ic_inn) {
# // Tensorization loop
# for (int oc_tns = 0; oc_tns < 16; ++oc_tns) {
# for (int ic_tns = 0; ic_tns < 16; ++ic_tns) {
# int i = (ic_out * 16 + ic_inn) * 16 + ic_tns;
# int j = (oc_out * 16 + oc_inn) * 16 + oc_tns;
# C[0][i] = C[0][i] + A[0][i] * B[j][i];
# }
# }
# }
# }
# }
# }
# }
# Let's define tiling sizes (expressed in multiples of VTA tensor shape size)
b_block = 1 // env.BATCH
i_block = 256 // env.BLOCK_IN
o_block = 256 // env.BLOCK_OUT
# Tile the output tensor along the batch and output channel dimensions
# (since by default we are doing single batch inference, the split along
# the batch dimension has no effect)
b, oc, b_tns, oc_tns = s[res].op.axis
b_out, b_inn = s[res].split(b, b_block)
oc_out, oc_inn = s[res].split(oc, o_block)
s[res].reorder(b_out, oc_out, b_inn, oc_inn)
# Move intermediate computation into each output compute tile
s[res_gemm].compute_at(s[res], oc_out)
s[res_shr].compute_at(s[res], oc_out)
s[res_max].compute_at(s[res], oc_out)
s[res_min].compute_at(s[res], oc_out)
# Apply additional loop split along reduction axis (input channel)
b_inn, oc_inn, b_tns, oc_tns = s[res_gemm].op.axis
ic_out, ic_inn = s[res_gemm].split(ic, i_block)
# Reorder axes. We move the ic_out axis all the way out of the GEMM
# loop to block along the reduction axis
s[res_gemm].reorder(ic_out, b_inn, oc_inn, ic_inn, b_tns, oc_tns, ic_tns)
# Let's look at the current TVM schedule after blocking
print(tvm.lower(s, [data, weight, res], simple_mode=True))
######################################################################
# Lowering Copies to DMA Transfers
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Next we set the buffer scopes to the corresponding on-chip VTA SRAM buffers.
# We move the load loops into the matrix multiply computation loop to stage
# memory loads such that they fit in the on-chip SRAM buffers.
# Finally we annotate the load/store loop outer axes with the DMA copy pragma
# to perform bulk memory transfers on VTA.
# Set scope of SRAM buffers
s[data_buf].set_scope(env.inp_scope)
s[weight_buf].set_scope(env.wgt_scope)
s[res_gemm].set_scope(env.acc_scope)
s[res_shr].set_scope(env.acc_scope)
s[res_min].set_scope(env.acc_scope)
s[res_max].set_scope(env.acc_scope)
# Block data and weight cache reads
s[data_buf].compute_at(s[res_gemm], ic_out)
s[weight_buf].compute_at(s[res_gemm], ic_out)
# Use DMA copy pragma on DRAM->SRAM operations
s[data_buf].pragma(s[data_buf].op.axis[0], env.dma_copy)
s[weight_buf].pragma(s[weight_buf].op.axis[0], env.dma_copy)
# Use DMA copy pragma on SRAM->DRAM operation
# (this implies that these copies should be performed along b_inn,
# or result axis 2)
s[res].pragma(s[res].op.axis[2], env.dma_copy)
######################################################################
# Lowering Computation to VTA Compute Intrinsics
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# The last phase is to lower the computation loops down to VTA hardware
# intrinsics by mapping the matrix multiplication to tensor intrinsics,
# and mapping the shift, and clipping computation to the vector ALU.
# Apply tensorization over the batch tensor tile axis
s[res_gemm].tensorize(b_tns, env.gemm)
# Add an ALU pragma over the shift and clipping operations
s[res_shr].pragma(s[res_shr].op.axis[0], env.alu)
s[res_min].pragma(s[res_min].op.axis[0], env.alu)
s[res_max].pragma(s[res_max].op.axis[0], env.alu)
# Let's look at the final lowered TVM schedule after lowering memory
# loads/stores down to DMA copy intrinsics, and the computation down to
# VTA compute intrinsics.
print(vta.lower(s, [data, weight, res], simple_mode=True))
######################################################################
# TVM Compilation and Verification
# --------------------------------
# After specifying the schedule, we can compile it into a TVM function.
# We save the module so we can send it over RPC.
# We run the function and verify it against a numpy implementation to
# ensure correctness.
# Compile the TVM module
my_gemm = vta.build(
s, [data, weight, res], tvm.target.Target("ext_dev", host=env.target_host), name="my_gemm"
)
temp = utils.tempdir()
my_gemm.save(temp.relpath("gemm.o"))
remote.upload(temp.relpath("gemm.o"))
f = remote.load_module("gemm.o")
# Get the remote device context
ctx = remote.ext_dev(0)
# Initialize the data and weight arrays randomly in the int range of (-128, 128]
data_np = np.random.randint(-128, 128, size=(batch_size, in_channels)).astype(data.dtype)
weight_np = np.random.randint(-128, 128, size=(out_channels, in_channels)).astype(weight.dtype)
# Apply packing to the data and weight arrays from a 2D to a 4D packed layout
data_packed = data_np.reshape(
batch_size // env.BATCH, env.BATCH, in_channels // env.BLOCK_IN, env.BLOCK_IN
).transpose((0, 2, 1, 3))
weight_packed = weight_np.reshape(
out_channels // env.BLOCK_OUT, env.BLOCK_OUT, in_channels // env.BLOCK_IN, env.BLOCK_IN
).transpose((0, 2, 1, 3))
# Format the input/output arrays with tvm.nd.array to the DLPack standard
data_nd = tvm.nd.array(data_packed, ctx)
weight_nd = tvm.nd.array(weight_packed, ctx)
res_nd = tvm.nd.array(np.zeros(output_shape).astype(res.dtype), ctx)
# Clear stats
if env.TARGET in ["sim", "tsim"]:
simulator.clear_stats()
# Invoke the module to perform the computation
f(data_nd, weight_nd, res_nd)
# Verify against numpy implementation
res_ref = np.dot(data_np.astype(env.acc_dtype), weight_np.T.astype(env.acc_dtype))
res_ref = res_ref >> env.INP_WIDTH
res_ref = np.clip(res_ref, 0, inp_max)
res_ref = res_ref.astype(res.dtype)
res_ref = res_ref.reshape(
batch_size // env.BATCH, env.BATCH, out_channels // env.BLOCK_OUT, env.BLOCK_OUT
).transpose((0, 2, 1, 3))
np.testing.assert_equal(res_ref, res_nd.numpy())
# Print stats
if env.TARGET in ["sim", "tsim"]:
sim_stats = simulator.stats()
print("Execution statistics:")
for k, v in sim_stats.items():
print("\t{:<16}: {:>16}".format(k, v))
print("Successful blocked matrix multiply test!")
######################################################################
# Summary
# -------
# This tutorial demonstrates how TVM scheduling primitives can achieve
# computation blocking for a matrix multiplication example.
# This allows us to map arbitrarily large computation onto limited
# hardware accelerator resources.
#
| https://github.com/zk-ml/tachikoma |
vta/tutorials/vta_get_started.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
.. _vta-get-started:
Get Started with VTA
====================
**Author**: `Thierry Moreau <https://homes.cs.washington.edu/~moreau/>`_
This is an introduction tutorial on how to use TVM to program the VTA design.
In this tutorial, we will demonstrate the basic TVM workflow to implement
a vector addition on the VTA design's vector ALU.
This process includes specific scheduling transformations necessary to lower
computation down to low-level accelerator operations.
To begin, we need to import TVM which is our deep learning optimizing compiler.
We also need to import the VTA python package which contains VTA specific
extensions for TVM to target the VTA design.
"""
from __future__ import absolute_import, print_function
import os
import tvm
from tvm import te
import vta
import numpy as np
######################################################################
# Loading in VTA Parameters
# ~~~~~~~~~~~~~~~~~~~~~~~~~
# VTA is a modular and customizable design. Consequently, the user
# is free to modify high-level hardware parameters that affect
# the hardware design layout.
# These parameters are specified in the :code:`vta_config.json` file by their
# :code:`log2` values.
# These VTA parameters can be loaded with the :code:`vta.get_env`
# function.
#
# Finally, the TVM target is also specified in the :code:`vta_config.json` file.
# When set to *sim*, execution will take place inside of a behavioral
# VTA simulator.
# If you want to run this tutorial on the Pynq FPGA development platform,
# follow the *VTA Pynq-Based Testing Setup* guide.
env = vta.get_env()
######################################################################
# FPGA Programming
# ----------------
# When targeting the Pynq FPGA development board, we need to configure
# the board with a VTA bitstream.
# We'll need the TVM RPC module and the VTA simulator module
from tvm import rpc
from tvm.contrib import utils
from vta.testing import simulator
# We read the Pynq RPC host IP address and port number from the OS environment
host = os.environ.get("VTA_RPC_HOST", "192.168.2.99")
port = int(os.environ.get("VTA_RPC_PORT", "9091"))
# We configure both the bitstream and the runtime system on the Pynq
# to match the VTA configuration specified by the vta_config.json file.
if env.TARGET == "pynq" or env.TARGET == "de10nano":
# Make sure that TVM was compiled with RPC=1
assert tvm.runtime.enabled("rpc")
remote = rpc.connect(host, port)
# Reconfigure the JIT runtime
vta.reconfig_runtime(remote)
# Program the FPGA with a pre-compiled VTA bitstream.
# You can program the FPGA with your own custom bitstream
# by passing the path to the bitstream file instead of None.
vta.program_fpga(remote, bitstream=None)
# In simulation mode, host the RPC server locally.
elif env.TARGET in ("sim", "tsim", "intelfocl"):
remote = rpc.LocalSession()
if env.TARGET in ["intelfocl"]:
# program intelfocl aocx
vta.program_fpga(remote, bitstream="vta.bitstream")
######################################################################
# Computation Declaration
# -----------------------
# As a first step, we need to describe our computation.
# TVM adopts tensor semantics, with each intermediate result
# represented as multi-dimensional array. The user needs to describe
# the computation rule that generates the output tensors.
#
# In this example we describe a vector addition, which requires multiple
# computation stages, as shown in the dataflow diagram below.
# First we describe the input tensors :code:`A` and :code:`B` that are living
# in main memory.
# Second, we need to declare intermediate tensors :code:`A_buf` and
# :code:`B_buf`, which will live in VTA's on-chip buffers.
# Having this extra computational stage allows us to explicitly
# stage cached reads and writes.
# Third, we describe the vector addition computation which will
# add :code:`A_buf` to :code:`B_buf` to produce :code:`C_buf`.
# The last operation is a cast and copy back to DRAM, into results tensor
# :code:`C`.
#
# .. image:: https://raw.githubusercontent.com/uwsampl/web-data/main/vta/tutorial/vadd_dataflow.png
# :align: center
######################################################################
# Input Placeholders
# ~~~~~~~~~~~~~~~~~~
# We describe the placeholder tensors :code:`A`, and :code:`B` in a tiled data
# format to match the data layout requirements imposed by the VTA vector ALU.
#
# For VTA's general purpose operations such as vector adds, the tile size is
# :code:`(env.BATCH, env.BLOCK_OUT)`.
# The dimensions are specified in
# the :code:`vta_config.json` configuration file and are set by default to
# a (1, 16) vector.
#
# In addition, A and B's data types also needs to match the :code:`env.acc_dtype`
# which is set by the :code:`vta_config.json` file to be a 32-bit integer.
# Output channel factor m - total 64 x 16 = 1024 output channels
m = 64
# Batch factor o - total 1 x 1 = 1
o = 1
# A placeholder tensor in tiled data format
A = te.placeholder((o, m, env.BATCH, env.BLOCK_OUT), name="A", dtype=env.acc_dtype)
# B placeholder tensor in tiled data format
B = te.placeholder((o, m, env.BATCH, env.BLOCK_OUT), name="B", dtype=env.acc_dtype)
######################################################################
# Copy Buffers
# ~~~~~~~~~~~~
# One specificity of hardware accelerators, is that on-chip memory has to be
# explicitly managed.
# This means that we'll need to describe intermediate tensors :code:`A_buf`
# and :code:`B_buf` that can have a different memory scope than the original
# placeholder tensors :code:`A` and :code:`B`.
#
# Later in the scheduling phase, we can tell the compiler that :code:`A_buf`
# and :code:`B_buf` will live in the VTA's on-chip buffers (SRAM), while
# :code:`A` and :code:`B` will live in main memory (DRAM).
# We describe A_buf and B_buf as the result of a compute
# operation that is the identity function.
# This can later be interpreted by the compiler as a cached read operation.
# A copy buffer
A_buf = te.compute((o, m, env.BATCH, env.BLOCK_OUT), lambda *i: A(*i), "A_buf")
# B copy buffer
B_buf = te.compute((o, m, env.BATCH, env.BLOCK_OUT), lambda *i: B(*i), "B_buf")
######################################################################
# Vector Addition
# ~~~~~~~~~~~~~~~
# Now we're ready to describe the vector addition result tensor :code:`C`,
# with another compute operation.
# The compute function takes the shape of the tensor, as well as a lambda
# function that describes the computation rule for each position of the tensor.
#
# No computation happens during this phase, as we are only declaring how
# the computation should be done.
# Describe the in-VTA vector addition
C_buf = te.compute(
(o, m, env.BATCH, env.BLOCK_OUT),
lambda *i: A_buf(*i).astype(env.acc_dtype) + B_buf(*i).astype(env.acc_dtype),
name="C_buf",
)
######################################################################
# Casting the Results
# ~~~~~~~~~~~~~~~~~~~
# After the computation is done, we'll need to send the results computed by VTA
# back to main memory.
######################################################################
# .. note::
#
# **Memory Store Restrictions**
#
# One specificity of VTA is that it only supports DRAM stores in the narrow
# :code:`env.inp_dtype` data type format.
# This lets us reduce the data footprint for memory transfers (more on this
# in the basic matrix multiply example).
#
# We perform one last typecast operation to the narrow
# input activation data format.
# Cast to output type, and send to main memory
C = te.compute(
(o, m, env.BATCH, env.BLOCK_OUT), lambda *i: C_buf(*i).astype(env.inp_dtype), name="C"
)
######################################################################
# This concludes the computation declaration part of this tutorial.
######################################################################
# Scheduling the Computation
# --------------------------
# While the above lines describes the computation rule, we can obtain
# :code:`C` in many ways.
# TVM asks the user to provide an implementation of the computation called
# *schedule*.
#
# A schedule is a set of transformations to an original computation that
# transforms the implementation of the computation without affecting
# correctness.
# This simple VTA programming tutorial aims to demonstrate basic schedule
# transformations that will map the original schedule down to VTA hardware
# primitives.
######################################################################
# Default Schedule
# ~~~~~~~~~~~~~~~~
# After we construct the schedule, by default the schedule computes
# :code:`C` in the following way:
# Let's take a look at the generated schedule
s = te.create_schedule(C.op)
print(tvm.lower(s, [A, B, C], simple_mode=True))
######################################################################
# Although this schedule makes sense, it won't compile to VTA.
# In order to obtain correct code generation, we need to apply scheduling
# primitives and code annotation that will transform the schedule into
# one that can be directly lowered onto VTA hardware intrinsics.
# Those include:
#
# - DMA copy operations which will take globally-scoped tensors and copy
# those into locally-scoped tensors.
# - Vector ALU operations that will perform the vector add.
######################################################################
# Buffer Scopes
# ~~~~~~~~~~~~~
# First, we set the scope of the copy buffers to indicate to TVM that these
# intermediate tensors will be stored in the VTA's on-chip SRAM buffers.
# Below, we tell TVM that :code:`A_buf`, :code:`B_buf`, :code:`C_buf`
# will live in VTA's on-chip *accumulator buffer* which serves as
# VTA's general purpose register file.
#
# Set the intermediate tensors' scope to VTA's on-chip accumulator buffer
s[A_buf].set_scope(env.acc_scope)
s[B_buf].set_scope(env.acc_scope)
s[C_buf].set_scope(env.acc_scope)
######################################################################
# DMA Transfers
# ~~~~~~~~~~~~~
# We need to schedule DMA transfers to move data living in DRAM to
# and from the VTA on-chip buffers.
# We insert :code:`dma_copy` pragmas to indicate to the compiler
# that the copy operations will be performed in bulk via DMA,
# which is common in hardware accelerators.
# Tag the buffer copies with the DMA pragma to map a copy loop to a
# DMA transfer operation
s[A_buf].pragma(s[A_buf].op.axis[0], env.dma_copy)
s[B_buf].pragma(s[B_buf].op.axis[0], env.dma_copy)
s[C].pragma(s[C].op.axis[0], env.dma_copy)
######################################################################
# ALU Operations
# ~~~~~~~~~~~~~~
# VTA has a vector ALU that can perform vector operations on tensors
# in the accumulator buffer.
# In order to tell TVM that a given operation needs to be mapped to the
# VTA's vector ALU, we need to explicitly tag the vector addition loop
# with an :code:`env.alu` pragma.
# Tell TVM that the computation needs to be performed
# on VTA's vector ALU
s[C_buf].pragma(C_buf.op.axis[0], env.alu)
# Let's take a look at the finalized schedule
print(vta.lower(s, [A, B, C], simple_mode=True))
######################################################################
# This concludes the scheduling portion of this tutorial.
######################################################################
# TVM Compilation
# ---------------
# After we have finished specifying the schedule, we can compile it
# into a TVM function. By default TVM compiles into a type-erased
# function that can be directly called from python side.
#
# In the following line, we use :code:`tvm.build` to create a function.
# The build function takes the schedule, the desired signature of the
# function(including the inputs and outputs) as well as target language
# we want to compile to.
#
my_vadd = vta.build(
s, [A, B, C], tvm.target.Target("ext_dev", host=env.target_host), name="my_vadd"
)
######################################################################
# Saving the Module
# ~~~~~~~~~~~~~~~~~
# TVM lets us save our module into a file so it can loaded back later. This
# is called ahead-of-time compilation and allows us to save some compilation
# time.
# More importantly, this allows us to cross-compile the executable on our
# development machine and send it over to the Pynq FPGA board over RPC for
# execution.
# Write the compiled module into an object file.
temp = utils.tempdir()
my_vadd.save(temp.relpath("vadd.o"))
# Send the executable over RPC
remote.upload(temp.relpath("vadd.o"))
######################################################################
# Loading the Module
# ~~~~~~~~~~~~~~~~~~
# We can load the compiled module from the file system to run the code.
f = remote.load_module("vadd.o")
######################################################################
# Running the Function
# --------------------
# The compiled TVM function uses a concise C API and can be invoked from
# any language.
#
# TVM provides an array API in python to aid quick testing and prototyping.
# The array API is based on `DLPack <https://github.com/dmlc/dlpack>`_ standard.
#
# - We first create a remote context (for remote execution on the Pynq).
# - Then :code:`tvm.nd.array` formats the data accordingly.
# - :code:`f()` runs the actual computation.
# - :code:`numpy()` copies the result array back in a format that can be
# interpreted.
#
# Get the remote device context
ctx = remote.ext_dev(0)
# Initialize the A and B arrays randomly in the int range of (-128, 128]
A_orig = np.random.randint(-128, 128, size=(o * env.BATCH, m * env.BLOCK_OUT)).astype(A.dtype)
B_orig = np.random.randint(-128, 128, size=(o * env.BATCH, m * env.BLOCK_OUT)).astype(B.dtype)
# Apply packing to the A and B arrays from a 2D to a 4D packed layout
A_packed = A_orig.reshape(o, env.BATCH, m, env.BLOCK_OUT).transpose((0, 2, 1, 3))
B_packed = B_orig.reshape(o, env.BATCH, m, env.BLOCK_OUT).transpose((0, 2, 1, 3))
# Format the input/output arrays with tvm.nd.array to the DLPack standard
A_nd = tvm.nd.array(A_packed, ctx)
B_nd = tvm.nd.array(B_packed, ctx)
C_nd = tvm.nd.array(np.zeros((o, m, env.BATCH, env.BLOCK_OUT)).astype(C.dtype), ctx)
# Invoke the module to perform the computation
f(A_nd, B_nd, C_nd)
######################################################################
# Verifying Correctness
# ---------------------
# Compute the reference result with numpy and assert that the output of the
# matrix multiplication indeed is correct
# Compute reference result with numpy
C_ref = (A_orig.astype(env.acc_dtype) + B_orig.astype(env.acc_dtype)).astype(C.dtype)
C_ref = C_ref.reshape(o, env.BATCH, m, env.BLOCK_OUT).transpose((0, 2, 1, 3))
np.testing.assert_equal(C_ref, C_nd.numpy())
print("Successful vector add test!")
######################################################################
# Summary
# -------
# This tutorial provides a walk-through of TVM for programming the
# deep learning accelerator VTA with a simple vector addition example.
# The general workflow includes:
#
# - Programming the FPGA with the VTA bitstream over RPC.
# - Describing the vector add computation via a series of computations.
# - Describing how we want to perform the computation using schedule primitives.
# - Compiling the function to the VTA target.
# - Running the compiled module and verifying it against a numpy implementation.
#
# You are more than welcome to check other examples out and tutorials
# to learn more about the supported operations, schedule primitives
# and other features supported by TVM to program VTA.
#
| https://github.com/zk-ml/tachikoma |
web/apps/node/example.js | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Example code to start the runtime.
*/
const path = require("path");
const fs = require("fs");
const tvmjs = require("../../dist");
const wasmPath = tvmjs.wasmPath();
const EmccWASI = require(path.join(wasmPath, "tvmjs_runtime.wasi.js"));
const wasmSource = fs.readFileSync(path.join(wasmPath, "tvmjs_runtime.wasm"));
// Here we pass the javascript module generated by emscripten as the
// LibraryProvider to provide WASI related libraries.
// the async version of the API.
tvmjs.instantiate(wasmSource, new EmccWASI())
.then((tvm) => {
const log_info = tvm.getGlobalFunc("testing.log_info_str");
log_info("hello world");
// List all the global functions from the runtime.
console.log("Runtime functions using EmccWASI\n", tvm.listGlobalFuncNames());
});
| https://github.com/zk-ml/tachikoma |
web/apps/node/wasi_example.js | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Example code to start the runtime.
*/
const { WASI } = require('wasi');
const path = require("path");
const fs = require("fs");
const tvmjs = require("../../dist");
const wasmPath = tvmjs.wasmPath();
const wasmSource = fs.readFileSync(path.join(wasmPath, "tvmjs_runtime.wasm"));
const wasi = new WASI({ args: process.argv, env: process.env });
// Here we pass the javascript module generated by emscripten as the
// LibraryProvider to provide WASI related libraries.
const tvm = new tvmjs.Instance(new WebAssembly.Module(wasmSource), wasi);
// List all the global functions from the runtime.
console.log("Runtime using WASI\n", tvm.listGlobalFuncNames());
| https://github.com/zk-ml/tachikoma |
web/apps/node/wasi_rpc_server.js | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Example code to start the RPC server on nodejs using WASI
*/
const { WASI } = require("wasi");
const tvmjs = require("../../dist");
// Get import returns a fresh library in each call.
const getImports = () => {
return new WASI({
args: process.argv,
env: process.env
});
};
const proxyUrl = "ws://localhost:8888/ws";
new tvmjs.RPCServer(proxyUrl, "wasm", getImports, console.log);
| https://github.com/zk-ml/tachikoma |
web/emcc/decorate_as_wasi.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Decorate emcc generated js to a WASI compatible API."""
import sys
template_head = """
function EmccWASI() {
"""
template_tail = """
this.Module = Module;
this.start = Module.wasmLibraryProvider.start;
this.imports = Module.wasmLibraryProvider.imports;
this.wasiImport = this.imports["wasi_snapshot_preview1"];
}
if (typeof module !== "undefined" && module.exports) {
module.exports = EmccWASI;
}
"""
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage <file-in> <file-out>")
result = template_head + open(sys.argv[1]).read() + template_tail
with open(sys.argv[2], "w") as fo:
fo.write(result)
| https://github.com/zk-ml/tachikoma |
web/emcc/preload.js | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/* eslint-disable no-unused-vars */
/**
* JS config used by --pre-js in emcc.
* Wrap module as a LibraryProvider.
*/
var __wasmLib = {};
function __wasmLibInstantiateWasm(imports, successCallback) {
__wasmLib.imports = imports;
__wasmLib.successCallback = successCallback;
}
function __wasmLibStart(wasmInstance) {
__wasmLib.successCallback(wasmInstance);
}
__wasmLib.start = __wasmLibStart;
var Module = {
"instantiateWasm": __wasmLibInstantiateWasm,
"wasmLibraryProvider": __wasmLib
};
| https://github.com/zk-ml/tachikoma |
web/jest.config.js | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/* eslint-disable no-undef */
module.exports = {
testEnvironment: "node",
testMatch: [
"**/tests/node/*.js"
],
};
| https://github.com/zk-ml/tachikoma |
web/rollup.config.js | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import commonjs from '@rollup/plugin-commonjs';
import resolve from '@rollup/plugin-node-resolve';
export default {
input: 'dist/index.js',
output: {
file: 'dist/tvmjs.bundle.js',
format: 'umd',
name: 'tvmjs',
exports: 'named',
globals: {'ws': 'ws',
'perf_hooks': 'perf_hooks',
'@webgpu/types': 'webgputypes'}
},
plugins: [commonjs(), resolve()],
external: ['ws', 'perf_hooks', '@webgpu/types']
};
| https://github.com/zk-ml/tachikoma |
web/tests/node/test_module_load.js | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/* eslint-disable no-undef */
// Load Emscripten Module, need to change path to root/lib
const path = require("path");
const fs = require("fs");
const assert = require("assert");
const tvmjs = require("../../dist");
const wasmPath = tvmjs.wasmPath();
const EmccWASI = require(path.join(wasmPath, "tvmjs_runtime.wasi.js"));
const wasmSource = fs.readFileSync(path.join(wasmPath, "test_addone.wasm"));
const tvm = new tvmjs.Instance(
new WebAssembly.Module(wasmSource),
new EmccWASI()
);
// Load system library
const sysLib = tvm.systemLib();
function randomArray(length, max) {
return Array.apply(null, Array(length)).map(function () {
return Math.random() * max;
});
}
test("add one", () => {
// grab pre-loaded function
const faddOne = sysLib.getFunction("add_one");
assert(tvm.isPackedFunc(faddOne));
const n = 124;
const A = tvm.empty(n).copyFrom(randomArray(n, 1));
const B = tvm.empty(n);
// call the function.
faddOne(A, B);
const AA = A.toArray(); // retrieve values in js array
const BB = B.toArray(); // retrieve values in js array
// verify
for (var i = 0; i < BB.length; ++i) {
assert(Math.abs(BB[i] - (AA[i] + 1)) < 1e-5);
}
faddOne.dispose();
});
| https://github.com/zk-ml/tachikoma |
web/tests/node/test_ndarray.js | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/* eslint-disable no-undef */
const path = require("path");
const fs = require("fs");
const assert = require("assert");
const tvmjs = require("../../dist/tvmjs.bundle")
const wasmPath = tvmjs.wasmPath();
const EmccWASI = require(path.join(wasmPath, "tvmjs_runtime.wasi.js"));
const wasmSource = fs.readFileSync(path.join(wasmPath, "tvmjs_runtime.wasm"));
let tvm = new tvmjs.Instance(new WebAssembly.Module(wasmSource), new EmccWASI());
// Basic fields.
assert(tvm.listGlobalFuncNames() !== undefined);
// Test ndarray
function testArrayCopy(dtype, arrayType) {
let data = [1, 2, 3, 4, 5, 6];
let a = tvm.empty([2, 3], dtype).copyFrom(data);
assert(a.device.toString() == "cpu(0)");
assert(a.shape[0] == 2 && a.shape[1] == 3);
let ret = a.toArray();
assert(ret instanceof arrayType);
assert(ret.toString() == arrayType.from(data).toString());
// test multiple dispose.
a.dispose();
a.dispose();
}
test("array copy", () => {
testArrayCopy("float32", Float32Array);
testArrayCopy("int", Int32Array);
testArrayCopy("int8", Int8Array);
testArrayCopy("uint8", Uint8Array);
testArrayCopy("float64", Float64Array);
});
| https://github.com/zk-ml/tachikoma |
web/tests/node/test_packed_func.js | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/* eslint-disable no-undef */
const path = require("path");
const fs = require("fs");
const assert = require("assert");
const tvmjs = require("../../dist");
const wasmPath = tvmjs.wasmPath();
const EmccWASI = require(path.join(wasmPath, "tvmjs_runtime.wasi.js"));
const wasmSource = fs.readFileSync(path.join(wasmPath, "tvmjs_runtime.wasm"));
let tvm = new tvmjs.Instance(
new WebAssembly.Module(wasmSource),
new EmccWASI()
);
test("GetGlobal", () => {
let flist = tvm.listGlobalFuncNames();
let faddOne = tvm.getGlobalFunc("testing.add_one");
let fecho = tvm.getGlobalFunc("testing.echo");
assert(faddOne(tvm.scalar(1, "int")) == 2);
// check function argument with different types.
assert(fecho(1123) == 1123);
assert(fecho("xyz") == "xyz");
let bytes = new Uint8Array([1, 2, 3]);
let rbytes = fecho(bytes);
assert(rbytes.length == bytes.length);
for (let i = 0; i < bytes.length; ++i) {
assert(rbytes[i] == bytes[i]);
}
assert(fecho(undefined) == undefined);
let arr = tvm.empty([2, 2]).copyFrom([1, 2, 3, 4]);
let arr2 = fecho(arr);
assert(arr.handle == arr2.handle);
assert(arr2.toArray().toString() == arr.toArray().toString());
let mod = tvm.systemLib();
let ret = fecho(mod);
assert(ret.handle == mod.handle);
assert(flist.length != 0);
mod.dispose();
ret.dispose();
arr.dispose();
arr2.dispose();
fecho.dispose();
faddOne.dispose();
});
test("ReturnFunc", () => {
function addy(y) {
function add(x, z) {
return x + y + z;
}
return add;
}
let fecho = tvm.getGlobalFunc("testing.echo");
let myf = tvm.toPackedFunc(addy);
assert(tvm.isPackedFunc(myf));
let myf2 = tvm.toPackedFunc(myf);
assert(myf2._tvmPackedCell.handle === myf._tvmPackedCell.handle);
let f = myf(10);
assert(tvm.isPackedFunc(f));
assert(f(11, 0) == 21);
assert(f("x", 1) == "x101");
assert(f("x", "yz") == "x10yz");
fecho.dispose();
myf.dispose();
myf2.dispose();
// test multiple dispose.
f.dispose();
f.dispose();
});
test("RegisterGlobal", () => {
tvm.registerFunc("xyz", function (x, y) {
return x + y;
});
let f = tvm.getGlobalFunc("xyz");
assert(f(1, 2) == 3);
f.dispose();
let syslib = tvm.systemLib();
syslib.dispose();
});
test("NDArrayCbArg", () => {
let use_count = tvm.getGlobalFunc("testing.object_use_count");
let fcheck = tvm.toPackedFunc(function (x) {
assert(use_count(x) == 2);
x.dispose();
});
let x = tvm.empty([2], "float32").copyFrom([1, 2]);
assert(use_count(x) == 1);
fcheck(x);
assert(use_count(x) == 1);
});
test("Logging", () => {
const log_info = tvm.getGlobalFunc("testing.log_info_str");
log_info("helow world")
log_info.dispose();
});
| https://github.com/zk-ml/tachikoma |
web/tests/python/prepare_test_libs.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Prepare test library for standalone wasm runtime test.
import tvm
from tvm import te
from tvm.contrib import emcc
from tvm.relay.backend import Runtime
import os
def prepare_test_libs(base_path):
runtime = Runtime("cpp", {"system-lib": True})
target = "llvm -mtriple=wasm32-unknown-unknown-wasm"
if not tvm.runtime.enabled(target):
raise RuntimeError("Target %s is not enbaled" % target)
n = te.var("n")
A = te.placeholder((n,), name="A")
B = te.compute(A.shape, lambda *i: A(*i) + 1.0, name="B")
s = te.create_schedule(B.op)
fadd = tvm.build(s, [A, B], target, runtime=runtime, name="add_one")
wasm_path = os.path.join(base_path, "test_addone.wasm")
fadd.export_library(wasm_path, emcc.create_tvmjs_wasm)
if __name__ == "__main__":
curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
prepare_test_libs(os.path.join(curr_path, "../../dist/wasm"))
| https://github.com/zk-ml/tachikoma |
web/tests/python/webgpu_rpc_test.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Simple testcode to test Javascript RPC
To use it, start a rpc proxy with "python -m tvm.exec.rpc_proxy".
Connect javascript end to the websocket port and connect to the RPC.
"""
import tvm
from tvm import te
from tvm import rpc
from tvm.contrib import utils, emcc
from tvm.relay.backend import Runtime
import numpy as np
proxy_host = "127.0.0.1"
proxy_port = 9090
def test_rpc():
if not tvm.runtime.enabled("rpc"):
return
# generate the wasm library
target = tvm.target.Target("webgpu", host="llvm -mtriple=wasm32-unknown-unknown-wasm")
runtime = Runtime("cpp", {"system-lib": True})
if not tvm.runtime.enabled(target_host):
raise RuntimeError("Target %s is not enbaled" % target_host)
n = 2048
A = te.placeholder((n,), name="A")
B = te.compute(A.shape, lambda *i: A(*i) + 1.0, name="B")
s = te.create_schedule(B.op)
num_thread = 2
xo, xi = s[B].split(B.op.axis[0], factor=num_thread)
s[B].bind(xi, te.thread_axis("threadIdx.x"))
s[B].bind(xo, te.thread_axis("blockIdx.x"))
fadd = tvm.build(s, [A, B], target, runtime=runtime, name="addone")
temp = utils.tempdir()
wasm_path = temp.relpath("addone_gpu.wasm")
fadd.export_library(wasm_path, emcc.create_tvmjs_wasm)
wasm_binary = open(wasm_path, "rb").read()
remote = rpc.connect(
proxy_host,
proxy_port,
key="wasm",
session_constructor_args=["rpc.WasmSession", wasm_binary],
)
def check(remote):
# basic function checks.
dev = remote.webgpu(0)
adata = np.random.uniform(size=n).astype(A.dtype)
a = tvm.nd.array(adata, dev)
b = tvm.nd.array(np.zeros(n, dtype=A.dtype), dev)
np.testing.assert_equal(a.numpy(), adata)
f1 = remote.system_lib()
addone = f1.get_function("addone")
addone(a, b)
np.testing.assert_equal(b.numpy(), a.numpy() + 1)
print("Test pass..")
check(remote)
test_rpc()
| https://github.com/zk-ml/tachikoma |
web/tests/python/websock_rpc_test.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Simple testcode to test Javascript RPC
To use it, start a rpc proxy with "python -m tvm.exec.rpc_proxy".
Connect javascript end to the websocket port and connect to the RPC.
"""
import tvm
from tvm import te
from tvm import rpc
from tvm.contrib import utils, emcc
from tvm.relay.backend import Runtime
import numpy as np
proxy_host = "127.0.0.1"
proxy_port = 9090
def test_rpc():
if not tvm.runtime.enabled("rpc"):
return
# generate the wasm library
runtime = Runtime("cpp", {"system-lib": True})
target = "llvm -mtriple=wasm32-unknown-unknown-wasm"
if not tvm.runtime.enabled(target):
raise RuntimeError("Target %s is not enbaled" % target)
n = te.var("n")
A = te.placeholder((n,), name="A")
B = te.compute(A.shape, lambda *i: A(*i) + 1.0, name="B")
s = te.create_schedule(B.op)
fadd = tvm.build(s, [A, B], target, runtime=runtime, name="addone")
temp = utils.tempdir()
wasm_path = temp.relpath("addone.wasm")
fadd.export_library(wasm_path, emcc.create_tvmjs_wasm)
wasm_binary = open(wasm_path, "rb").read()
remote = rpc.connect(
proxy_host,
proxy_port,
key="wasm",
session_constructor_args=["rpc.WasmSession", wasm_binary],
)
def check(remote):
# basic function checks.
faddone = remote.get_function("testing.asyncAddOne")
fecho = remote.get_function("testing.echo")
assert faddone(100) == 101
assert fecho(1, 2, 3) == 1
assert fecho(1, 2, 3) == 1
assert fecho(100, 2, 3) == 100
assert fecho("xyz") == "xyz"
assert bytes(fecho(bytearray(b"123"))) == b"123"
# run the generated library.
f1 = remote.system_lib()
dev = remote.cpu(0)
a = tvm.nd.array(np.random.uniform(size=1024).astype(A.dtype), dev)
b = tvm.nd.array(np.zeros(1024, dtype=A.dtype), dev)
# invoke the function
addone = f1.get_function("addone")
addone(a, b)
# time evaluator
time_f = f1.time_evaluator("addone", dev, number=100, repeat=10)
time_f(a, b)
cost = time_f(a, b).mean
print("%g secs/op" % cost)
np.testing.assert_equal(b.numpy(), a.numpy() + 1)
check(remote)
test_rpc()
| https://github.com/zk-ml/tachikoma |
host/src/main.rs | use methods::{PREDICTION_ELF, PREDICTION_ID};
use risc0_zkvm::Prover;
use risc0_zkvm::serde::{from_slice, to_vec};
use std::io;
fn main() {
// Make the prover.
let mut prover =
Prover::new(PREDICTION_ELF).expect("Prover should be constructed from valid ELF binary");
// TODO: Implement communication with the guest here
println!("Please input the sepal length, sepal width, petal length, petal width.");
let mut input = String::new();
io::stdin().read_line(&mut input).unwrap();
let mut s = input.split_whitespace();
let sepal_length: u32 = s.next().unwrap().parse().unwrap();
let sepal_width: u32 = s.next().unwrap().parse().unwrap();
let petal_length: u32 = s.next().unwrap().parse().unwrap();
let petal_width :u32 = s.next().unwrap().parse().unwrap();
prover.add_input_u32_slice(&to_vec(&sepal_length).unwrap());
prover.add_input_u32_slice(&to_vec(&sepal_width).unwrap());
prover.add_input_u32_slice(&to_vec(&petal_length).unwrap());
prover.add_input_u32_slice(&to_vec(&petal_width).unwrap());
// Run prover & generate receipt
let receipt = prover.run().expect(
"Code should be provable unless it had an error or exceeded the maximum cycle limit",
);
// TODO: Implement code for transmitting or serializing the receipt for
// other parties to verify here
// Optional: Verify receipt to confirm that recipients will also be able to
// verify your receipt
receipt.verify(&PREDICTION_ID).expect(
"Code you have proven should successfully verify; did you specify the correct method ID?",
);
// Extract journal of receipt
let c: u32 = from_slice(&receipt.journal).unwrap();
let dic = ["setosa", "versicolor", "virginica"];
// Print an assertion
println!("This is the {} flower, and I can prove it!", dic[c as usize]);
}
| https://github.com/only4sim/ZK-DTP |
methods/build.rs | fn main() {
risc0_build::embed_methods();
}
| https://github.com/only4sim/ZK-DTP |
methods/guest/src/bin/prediction.rs | // TODO: Rename this file to change the name of this method from METHOD_NAME
#![no_main]
// If you want to try std support, also update the guest Cargo.toml file
#![no_std] // std support is experimental
use risc0_zkvm::guest::env;
risc0_zkvm::guest::entry!(main);
pub fn main() {
// TODO: Implement your guest code here
let sepal_length: u32 = env::read();
let sepal_width: u32 = env::read();
let petal_length: u32 = env::read();
let petal_width: u32 = env::read();
let prediction: u32 = predict(sepal_length, sepal_width, petal_length, petal_width);
env::commit(&prediction);
}
fn predict(sepal_length: u32, sepal_width: u32, petal_length: u32, petal_width :u32) -> u32 {
if petal_width <= 80 {
return 0
}
else {
if petal_width <= 175 {
if petal_length <= 495 {
if petal_width <= 165 {
return 1
}
else {
return 2
}
}
else {
if petal_width <= 155 {
return 2
}
else {
if sepal_length <= 695 {
return 1
}
else {
return 2
}
}
}
}
else{
if petal_length <= 485 {
if sepal_length <= 595 {
return 1
}
else {
return 2
}
}
else {
return 2
}
}
}
} | https://github.com/only4sim/ZK-DTP |
methods/src/lib.rs | include!(concat!(env!("OUT_DIR"), "/methods.rs"));
| https://github.com/only4sim/ZK-DTP |
plot_iris_dataset.ipynb | {
"cells": [
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"%matplotlib inline"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"\n",
"# The Iris Dataset\n",
"This data sets consists of 3 different types of irises'\n",
"(Setosa, Versicolour, and Virginica) petal and sepal\n",
"length, stored in a 150x4 numpy.ndarray\n",
"\n",
"The rows being the samples and the columns being:\n",
"Sepal Length, Sepal Width, Petal Length and Petal Width.\n",
"\n",
"The below plot uses the first two features.\n",
"See [here](https://en.wikipedia.org/wiki/Iris_flower_data_set) for more\n",
"information on this dataset.\n"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAApMAAAH1CAYAAACqU3UnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAACgpElEQVR4nOzdd1RU1xYH4N80ptBBuqDYBVHB3ns3FmLsvcSSqtFE00w1xjRNjL2XmFhjYg323ntFESuKdBiYPvv9gfAcZ0ZxGBjR/a3lei/3cNj7DnDZ3LlnHwERERhjjDHGGLOB0NEJMMYYY4yxkouLScYYY4wxZjMuJhljjDHGmM24mGSMMcYYYzbjYpIxxhhjjNmMi0nGGGOMMWYzLiYZY4wxxpjNxI4IajQakZCQAFdXVwgEAkekwBhjjDHGnoKIkJWVhcDAQAiF1u8/OqSYTEhIQHBwsCNCM8YYY4yx53Dnzh2ULl3a6rhDiklXV1cAucm5ubk5IgXGGGOMMfYUmZmZCA4Ozq/brHFIMZn31rabmxsXk4wxxhhjL7BnPZLIC3AYY4wxxpjNuJhkjDHGGGM242KSMcYYY4zZjItJxhhjjDFmMy4mGWOMMcaYzbiYZIwxxhhjNuNikjHGGGOM2YyLScYYY4wxZjMuJhljjDHGmM24mGSMMcYYYzbjYpIxxhhjjNmMi0nGGGOMMWYzLiYZY4wxxpjNuJhkjDHGGGM242KSMcYYY4zZjItJxhhjjDFmMy4mGWOMMcaYzbiYZIwxxhhjNuNikjHGGGOM2YyLScYYY4wxZjMuJhljjDHGmM24mGSMMcYYYzbjYpIxxhhjjNmMi0nGGGOMMWYzLiYZY4wxxpjNuJhkjDHGGGM242KSMcYYY4zZjItJxhhjjDFmMy4mGWOMMcaYzbiYZIwxxhhjNuNikjHGGGOM2YyLScYYY4wxZjMuJhljjDHGmM24mGSMMcYYYzbjYpIxxhhjjNmMi0nGGGOMMWYzLiYZY4wxxpjNuJhkjDHGGGM242KSMcYYY4zZjItJxhhjjDFmMy4mGWOMMcaYzbiYZIwxxhhjNuNikjHGGGOM2YyLScYYY4wxZjMuJhljjDHGmM24mGSMMcYYYzbjYpIxxhhjjNmMi0nGGGOMMWYzLiYZY4wxxpjNuJhkjDHGGGM242KSMcYYY4zZjItJxhhjjDFmMy4mGWOMMcaYzbiYZIwxxhhjNuNikjHGGGOM2YyLScYYY4wxZjMuJhljjDHGmM24mGSMMcYYYzbjYpIxxhhjjNmMi0nGGGOMMWYzsaMTYIy9GjQaDTZt2oS4uDh4enqiW7du8PHxcXRajDHGComLScZYkVu/fj1Gv/kmHqakwF0igVKvx9tvvYX3x47FlClTIBKJHJ0iY4wxG3ExyRgrUv/99x/eeOMNtJPKMMHHD5UkEqQZDViSnY0ff/gBRqMRP/zwg6PTZIwxZiMBEVFxB83MzIS7uzsyMjLg5uZW3OEZY8Wofp06EFy4gL88vSESCEzGfs3KxC+qHNy5exd+fn4OypAxxpglBa3XeAEOY6zI3LhxA0dPnMBQubNZIQkAA51dIDAasW7dOgdkxxhjzB64mGSMFZmUlBQAQIjY8jORHkIh3MWS/I9jjDFW8nAxyRgrMiEhIRAKhTij1Vocv63XI1mrQWhoaDFnxhhjzF64mGSMFRk/Pz+81qkT5qpVSDcaTcaICD8pM+Hu6oro6GgHZcgYY6ywuJhkjBWp73/4ARkyGbqkpWBVdjZidTrsUqvQPy0V63JyMGPmTCgUCkenyRhjzEZcTDLGilTlypVx8MgRVG3ZEh9mpqNlUiIGpqYgNbQsNmzYgIEDBzo6RcYYY4XAfSYZY0WuSpUq2LRlC+7du4ebN2/Cw8MDYWFhEFhY4c0YY6xk4WKSMVZsgoKCEBQU5Og0GGOM2RG/zc0YY4wxxmzGxSRjjDHGGLMZF5OMMcYYY8xmXEwyxhhjjDGbcTHJGGOMMcZsxsUkY4wxxhizGReTjDHGGGPMZlxMMsYYY4wxm3ExyRhjjDHGbMbFJGOMMcYYsxkXk4wxxhhjzGZcTDLGGGOMMZtxMckYY4wxxmzGxSRjjDHGGLMZF5OMMcYYY8xmYkcnwBhzrOvXr+PatWtwd3dHvXr1IBKJHJ0SY4yxEoTvTDL2irp06RJaNW+OihUromPHjmjUqBFCg4OxYMECR6fGGGOsBOE7k4y9gmJjY9GkYUN4q9X4zcML9aROuG8wYHF6BkaMGIHMzEyMGzfO0WkyxhgrAQRERMUdNDMzE+7u7sjIyICbm1txh2fsldfzjTdw7N9/scXTG+5C0zcovsxIx3KdFvfu34eXl5eDMmSMMeZoBa3X+G1uxl4xaWlp2LBhA4ZJ5WaFJAC87eIKg16PVatWOSA7xhhjJQ0Xk4y9Yh48eAC9wYBqEonFcW+RCIFOTrh9+3YxZ8YYY6wk4mKSsVeMt7c3AOCGQW9xPMtoxEOdDr6+vsWZFmOMsRKKi0nGXjG+vr5o17o1FqpU0Fh4ZHppthJaIvTu3dsB2THGGCtpuJhk7BX09ZQpiCcj+qel4rhGAyLCA4MBUzMz8H1WJt4fOxZBQUGOTpMxxlgJwMUkY6+gOnXqYOv27Ujy90P3lCSUeZCA2on3sUivwyeffopp06Y5OkXGGGMlBPeZZOwV1bx5c1yNi8OePXsQGxsLNzc3dOrUCe7u7o5OjTHGWAnCfSYZY4wxxpgZ7jPJGGOMMcaKHBeTjDHGGGPMZvzMJGNFRKfTYePGjVi3bh2ys7MRHh6OESNGoFy5co5OjTHGGLMbvjPJWBG4e/cuIqtXxxtvvIErf29ETkwM5vz0EypUqICff/7Z0ekxxhhjdsN3JhmzM6PRiC6dOiH9Rjy2lvJFhJMTAEBlNOKnrEx88MEHKFeuHLp16+bYRBljjDE74DuTjNnZjh07cPrcOcxwdcsvJAFALhTiEzd3NJTJ8cPUqQ7MkDHGGLMfLiYZs7PNmzejjEyGeo8VknkEAgF6ymQ4dPQo0tLSHJAdY4wxZl9cTDJmZ1qtFi4CAQQCgcVxF4Ew/+MYY4yxko6LScbsrFatWrisVuOuXm9xfIdGjeCAAPj4+BRzZowxxpj9cTHJmJ316dMHbi4u+CQrE5onNpg6qFFjvVqFUW+/DaGQf/wYY4yVfLyamzE7c3Z2xqrVq9GtSxe0SE3GG05S+AiF2K/VYptahZatWmH8+PGOTpMxxhizC741wlgRaN++PY6dOIEWvXphrkGPSZkZuBkSjOm//opNmzfDycLiHMYYY6wkEhA98T5cMSjoxuGMvSyIyOqCHMYYY+xFVNB6je9MMlYMuJBkjDH2suJikjHGGGOM2YyLScbYC42IkJycjISEBBgMBkenwxhj7AlcTDLGXlh//fUX6kRGwcfHB0FBQQgNDsZ3333HDd8ZY+wFwsUkY+yF9O2336J3795wib2KmR5eWOTljUYZmZj86afo3rUrdDqdo1NkjDEGXs3NGHsBXbp0CeHh4XjfxRXj3dxNxvaq1RiQloI5c+dixIgRDsqQMcZefryamzFWYs2bNw+lJE54x9X84tVMJkMrmRxzZs50QGaMMcaexMUkY+yFc/niRdQViSC10lKpkUSCS1euFHNWjDHGLOFikjH2wnF2dUUSrD+Bk2w0wsXZuRgzYowxZg0Xk4yxF87rr7+O42o1LunMV22rjEas1Wrwes+eDsiMMcbYk7iYZIy9cHr06IEqFStiaEYGjmg0yFsneEOvw5CMNGSJRBg7dqyDs2SMMQZwMckYewFJpVL8t3MnSlWuhB4pSWiYmoxWaSlo+jARV2QybNqyBZUrV3Z0mowxxgCIHZ0AY4xZEhwcjJNnzmDnzp3YunUrtFotJtWpg549e0Iulzs6PcYYY49wn0nGGGOMMWaG+0wyxhhjjLEix8UkY4wxxhizGReTjL0k1q1bhzZt2qBx48YYO3YslEqlo1NijDH2CuBnJhkr4e7evYs6UVF4kJQEZ4EAzgIBHhqNkAiF+HbqVEyYMMHRKTLGGCuB+JlJxl4BRqMRNSMikJWcjJkeXrjgH4hT/oHY5eOHemIJJn74If766y9Hp8kYY+wlxsUkYyXY77//jpT0dPzu6YVuCgUkj/ayriSRYIl3KQSJRPiQ70wyxhgrQlxMMlaCzZs3D4EiEVpJZWZjMoEAA51dcOfOHeTk5DggO8YYY68CLiYZK8GUSiUChSIIHt2RfFKgSAQCkJ6eXqx5McYYe3VwMclYCVa2bFlc1OugNBotjh/TaCARCODv71/MmTHGGHtVcDHJWAk2depUqIkwQ5mJJxszXNHp8GdONuo1agShkH/UGWOMFQ3em5uxEqxevXro0LEjZm/Zgms6Pfo6O8NTIMQejRoLspUQSaVYuXKlo9NkjDH2EuPbFYyVcJs3b8bbb7+NQ2TE0NQUdE9JwkxlFspVq4ZLV68iJCTE0Skyxhh7iXHTcsZeEkajEfv27UNGRgYaNGgAX19fR6fEGGOsBCtovcZvczP2khAKhWjevLmj02CMMfaK4be5GWOMMcaYzbiYZIwxxhhjNuNikrFn0Gq1aNeuHaRiMcQCASQCAby9vbFkyRJHp1ZiaDQazJ07F7Vr1oSnmxvKhYTg448/xv379x2dGmPsBRUbG4sxY8YgICAAnp6eaNasGf766y8YrfTVLYyLFy9ixIgR8Pf3h5eXF1q1aoUNGzaYtVxjlvECHMaeQqVSwadUKWTn5KCBkxTNpFKkGY1YnZODDDJi9FtvYebMmY5O84WWk5ODDm3b4sChQ2gjkyNKLMY9gwEbtBrI3d2xa+9ehIeHOzpNxtgLZMeOHXjttdcgk8lQp04dODs74/Lly7h69Sr69++PpUuX2q1/7qZNm9CjRw84OzujTp06kMlkuHTpEq5du4aRI0di9uzZVncZe9kVtF7jYpKxp4iMjMTZM2cw29MLneWK/OM5RiOGpKbgiFaDazduIDQ01IFZvtg++OADzJkxAys9vFBHKs0/nmIwoE9GGhASggtXrryyF2vGmKnMzEyEhIQgODgYI0eOhJOTU/7YsWPHsGjRIsyZMwdvvvlmoWMlJycjJCQElStXxvDhwyGRSPLHDh48iGXLlmHFihXo169foWOVRAWt1/htbsasMBgMuHj2LDrL5CaFJAAohEL86OEJI/DKXmQKIicnBwvnzcNQucKkkAQAb5EIXzm74lJsLHbv3u2gDBljL5oVK1YgKysL/fv3NykkAaBu3bqoWbMmZsyYYZdYixcvhl6vx4ABA0wKSQBo1KgRwsPD8euvv9ol1suMi0nGrLh48SJ0RGgvl1scDxaLUVUswcWLF4s5s5LjypUryFAq0U5m+TWs5+QET4kEhw8fLubMGGMvqqNHjyI0NBSenp4Wx2vWrIlLly4hOzvbLrEqVKgAFxcXi+M1atTA8ePHi+Q5zZcJF5OMWSGTyQAAmqc8CaIh4n2vn0IkEgGw/hoaAOiIIBZzy1vGWC6RSASdTmd1PG8s7/pS1LFEIhE/hvMM/FuQMSsqVaoEiVCItTk5Fscv6LSIM+jRtGnTYs6s5AgPD0eQnx/Wqyy/hjFqNZR6Pdq2bVvMmTHGXlRt27bF7du3ce/ePbMxIsKxY8fQuHHj/D/4CxsrLi4OSUlJFmMdP34crVq14mLyGbiYZOwp2nXsiENaDX7OyoTusbtrt/R6jElNhRjAypUrHZfgC04sFuO9Dz7AqpxsrMrOhvGx1/C8VotPsrPQomlTREZGOjBLxtiLJDo6GmXKlMGiRYuQkpKSf1yv1+Off/5BbGwsxo8fb5dYffr0gZ+fHxYsWIC0tLT84zqdDmvXrsXNmzfxwQcf2CXWy4xXczP2DOXLl8eNGzfgLRSiqVSGZIMBB7QaiADMmjcPI0aMcHSKLzSj0Yg333wTCxcuRKhUikihCPeIcFStQs2ICGyLiYGfn5+j02SMvUAuX76MNm3a4P79+wgLC4NCoUBsbCzS09MxdepUfPTRR3aLdebMGbRr1w4pKSkICwuDXC7HlStXkJWVhRkzZuCdd96xW6yShlsDMWZHP/30E6ZMmYLsjAxAKET1mjWxatUqlC9f3tGplQhEhIMHD2LBggW4ERsLDy8v9OnXD9HR0ZA+scqbMcYAICsrCytWrMDff/8NlUqF6tWrY9SoUahWrZrdY6Wnp2PZsmX4999/odFoEBUVhVGjRqFKlSp2j1WScDHJGGOMMcZsxn0mGWOMMcZYkeNikjHGGGOM2YybuzFWQBcvXsSFCxegUCjQokULq01u7eHcuXO4fPkyXFxc0KJFCygUimfOycrKwu7du/OfLapatWqR5ccYY4zl4WKSsWe4cuUK3hw2DPsPHco/5qpQ4P0PPsAXX3xh16bl586dw5vDhuHoiRP5xzxcXTFh4kRMmjTJYq8zg8GAzz//HL9Onw7lYz0xmzVujPmLFqFixYp2y48xxhh7EheTjD3FzZs30bRRI7hnZ2OOpxeaSmVINRqxIluJb7/5BsnJyZg1a5ZdYsXGxqJ5kyYI0GixwNMbjaRSJBkNWJqdjU8++QTp6emYNm2a2bxRo0Zh8cKFGOXsgr6+/vAUCrFXo8YPx4+jaaNGOHbyJIKDg+2SI2OMMfYkXs3N2FMMHz4cm5YvR4ynN7ye2LproTILkzMzcOXKFVSuXLnQsfr26YMDGzZgu6c33J642zkzKxPTspWIj49HSEhI/vELFy4gIiICU9w9MNDZ9G33ZIMBrVNT0HPEcPz++++Fzo8xxtirhVdzM1ZIOp0Oq1auRD+pzKyQBIB+zi7wkkiwbNmyQsdSKpVYu3YtBktlZoUkAAxxdoFCKMSKFStMji9duhQ+Tk7orXA2m1NKJEJfqRTLly6FwWAodI6MMcaYJVxMMmZFZmYmctRqVBFLLI7LBAKEisVISEgodKzU1FTo9HpUthLLWShEsMTJLNb9+/dRXiSGk5V9Y6tIJMjKzkZ2dnahc2SMMcYs4WKSMSvc3Nwgl0pxVa+zOK4hQrxej4CAgELH8vLygkQsRqyVWNlGI+7otGaxAgICEGfQm+wb/rirOh1cFAo4O5vfuWSMMcbsgYtJxqyQSCTo278/VmjUSDOav028MjsbqTodBg0aVOhYLi4u6NGjB5Zo1Mg0Gs3GF2crkWM0on///ibHBw0ahCStFn/lmN95TDYYsFKjwcDBgyGy8DY9Y4wxZg9cTDL2FJ9++imMLi6ITkvDZlUOlEYjbun1mJKZgS+yMjBq1Ci7LL4BgMlffIFMJyf0SE/FdpUKSqMRN/Q6fJmRjqlZmRg3bhzKlCljMqdatWoYNmwYPsnMwPeZGbit1yPLaMQmVQ5eT0+FyN0NH330kV3yY4wxxizh1dyMPcPly5fx5rBhOHD4cP4xV4UC740bhy+++MKud/3Onj2LkcOHm/WZHP/RR5g0aZLFnpYGgwGfffYZfpsxw6TPZNNGjTB/0SJUqlTJbvkxxhh7dRS0XuNikrECOn/+PC5evJi/A46rq2uRxTpz5kz+DjitWrUq0A44mZmZ+TvgREREIDw8vMjyY4wx9vLjYpIxxhhjjNmM+0wyxhhjjLEix8UkY4wxxhizGe/NzSzS6XTYsGED/li5EqnJyShXsSKGDRuGxo0bQ2ClQXZxu3LlCubMmYMzJ09CJpfjta5dMWDAgKfeilepVFi9ejXWrF6NrIwMVA4Lw8iRI1GrVq1izJwxxizLu66dPn0aMpkMXbp0eeZ1jTFH42cmmZmUlBS0a90aJ8+cQW2ZHMECAc4aDbih0WDwoEFYsHChw/sW/vLLL/jggw/gLZGgkUiEDAL2a9Tw8/XF9h07UK1aNbM5d+7cQZsWLXA1Lg4NZXL4CQQ4ZjDgnlaD8ePHY9q0aS9MocwYe/XkXdfc3NxQqVIlqFQqXLp0CX5+fvjvv/8sXtcYK0q8AIfZrH3btjixZw8Wu3sgykkKADASYa0qB+Mz0vH1N9/g448/dlh+27ZtQ4cOHTDK2QUT3NwhfVQA3tPrMSQzHVleXoiNi4NMJsufQ0SoExmJxMtXsNTdA5UludsWGoiwKFuJLzMzMH/+fAwfPtwh58QYe7XlXdfatm2LLl26QPLoGpWamorZs2fDaDTi2rVrJtc1xooaL8BhNrl48SK2x8TgKxfX/EISAIQCAXoqnDFArsCvv/wCrVbrsBx//OEHRMrk+OSxQhIAgsRizHJ1x52EBKxdu9Zkzt69e3Hy7Fn85OqaX0gCgEggwAgXV3SSK/DztGlwwN9WjDGGH3/8EeXKlUN0dHR+IQnkbrU6bNgw3L171+y6xtiLgotJZiImJgYykQgdZHKL49EKBRKTk3Hu3LliziyXXq/Hrt278bpUavEt6QoSCWrK5Ni+fbvJ8e3bt8NfKkXDxwrkx0XL5Lh87Rru3r1bJHkzxpg1er0eu3btQt26dS1e1/z9/REaGmp2XWPsRcHFJDOh1+shEgggsTIue3Sh0+v1xZfUY4xGI4gIUlh/tlEG8/z0ej2kAoHVZyIdfV6MsVdX3nXt8TuST5JIJHx9Yi8sLiaZiXr16iFbr8dBrcbi+FaVCi4KhcN2V3FyckJkRAS2W8nvocGAExo16tWrZ3K8fv36uKVW45LO8tvz29QqBPj4IDg42O45M8bY0zg5OaFGjRo4e/asxfGMjAzExcWZXdcYe1FwMclMNG7cGDXCwzE5W4kkg8Fk7LRWiwVqFQYNGVKkWwk+y9vvv48dqhysy8k2Oa4hwsTMDMjkcgwaNMhkrEuXLigdEIBJWVnINBpNxvap1fhLrcKot9+GWMzdshhjxe/dd9/FuXPncOTIEZPjOp0OK1euhEwmM7uuMfai4NXczMyVK1fQomlTZKelobuTFMFiMU7rddiuUqFunTr4b+dOuLi4OCw/o9GIYcOGYcmSJagvl6OFWIIMImzQapAGYN2GDejYsaPZvGPHjqFd69YQqtXo7iSFv0iEwzoddqty0KFdO2z45x84OTkV/wkxxl55j1/XKlWqhPDwcOTk5OD48ePIzs7G+vXrLV7XGCtKvJqb2axKlSo4dfYs3v3oI+z39sJMowEJ5cphxm+/YeeePQ4tJAFAKBRi0aJFWL16NeR16mCOANggl+G1wYNx8vRpqxfcunXr4vS5cxj8zjv4z80Vv5MR2WFVsWDhQvz9779cSDLGHObx61pgYCB2796NM2fOoEePHjh16hQXkuyFxncmGWOMMcaYGb4zyRhjjDHGihwXk4wxxhhjzGa8dJW9ko4ePYrExETUrl0bgYGBBZrz8OFDHDlyBF5eXmjYsCGEwmf/LUZEuH37NrKzsxESEuLw500tyczMxJ07d+Du7o7SpUs7Oh3GGGMlDN+ZZK+UL774Am7Ozqhfvz66du2K4KAgVKpU6ak7+sTFxSEiIgKBfn7o2rUrmjRpAjeFAuPGjXtqrPXr16NWzZooW7YswsPD4e/ri5EjRyI5Odnep2WThIQEDB48GL6lSqFatWoIDg5Gg7p1sW3bNkenxhhjrAThBTjslTFixAgsWLAA9ZycMMTZBX5CEY5oNZinVCJHJMSJ06dRrVo1kzm3bt1CWKVKEOp0GO7sgqZSGVKNRqzIUWKPRoPo6GisW7fOLNbs2bMxZswYNJXJ0V+uQCmhEAe0GixWq+BXtiwOPrrD6Sj3799Hg7p1oXr4EEOlMtRzkuK+0YClqhwc02iwYsUK9O3b12H5McYYc7yC1mtcTLJXwu3btxFapgxelyvwk4cnhI9tq3hHr0eHpET4V66My5cvm8yrX78+zh07hi2lfFHxsa3OiAhfZmZgYbYSp06fRs2aNfPHkpKSUDooCH0kTvjG3cNkC8cbeh1eS0vF0Lffxs8//1x0J/wMQ4YMweY//sBmTy8Eiv7/tIuRCO9lpGGnQIh7D+6/kG/LM8YYKx68mpuxx0ycOBEA8JGbu0khCQDBYjGGubji2pUrJm9Ba7VanDx2DP0UziaFJAAIBAKMdXWDE4CPPvrIZGzp0qUQGo0Y7+Zmthd4ObEEfZykWLxgAXQ6nR3PsOCysrLw5x9/YJhUZlJIAoBQIMBEFzcoc7KxevVqh+THGGOsZOFikr0Srl69itIiEfxFIovjdZ2cYABMnp28desW9ESo6yS1OMddKEQViQTx8fEmx69du4aKTlJ4Ci3HqieVIj0rCykpKbadTCHdvXsXaq3W6nkFicUIlkoRGxtbzJkxxhgribiYZK8ENzc3pBqN0Fp5quP+o33IAwIC8o/5+PhAACDBoLc4x0iE+wYDnJ2dTY67u7vjoUEPw1NiCQQCh72F7O7unpuH0WBxXE2EFL0+/+MYY4yxp+Fikr0SPvjgAyiJsEGVYzZmIMLibCW83N1RtWrV/OMeHh4I8PfHsuxsaCwUhtvVajw0GjF69GiT4z179kSiVostapXZHB0RVmjU6NShg8OKycDAQDSqXx9LVSoYLZzX2pxsKPV6vPHGGw7IjjHGWEljUzFpNBoRGxuLAwcOYN++fSb/GHsRde7cGWWCgzEpPQ1/ZGdD/aiIuqXXY0xaKs7pdPjk88/N5n33/feIN+gxNDUZcfrcZxy1RFifk4P30lPh4+WF4cOHm8ypXbs2XuvUCeOzMrE2Jzv/bugNvQ4j09NwTa/HJ599VsRn/HSTv/oKJzQavJORhjv63DuvKiIsz1ZiclYmBvTrhwoVKjg0R8YYYyUEPafDhw9TaGgoCYVCEggEJv+EQmGBPkdGRgYBoIyMjOcNz5jNUlJSqGxICAEguUBA/kIRCQASATRu3Dir877++msSCwQEgPyEQnJ+9P8D/Pzo3r17FudkZWVRdLduBIBcxWIqLZMRAPL28KB///23qE7xuaxatYrcXVxIAFCwTEbOYjEBoIH9+5NKpXJ0eowxxhysoPXac7cGqlmzJipVqoQvv/wSAQEBZqtVC/KcFbcGYo60fft2/Pzzz8jKykJ4eDi+++47lCpV6qlzMjMz8cknn+DUqVNQKBQYM2YMunfv/sxYly5dwoYNG5CdnY2wsDD06NEDMpnMXqdSaNnZ2VizZg1iY2Ph5uaGHj168B1JxhhjAIqwz6SzszPOnj1bqF84XEwyxhhjjL3YiqzPZL169XD9+vVCJccYY4wxxl4O4md/iGnvvXfeeQcffPABHjx4gIiICEieaOZcvXp1+2bIGGOMMcZeWAV6m1soFEIgEMDah+aNCQQCGAyWe9c9jt/mLhkuXbqEP//8E6mpqQgNDcWAAQPg6+tbJLHWrl2L6dOnIzMzE+XLl8fUqVNRuXLlIom1cuVK/P7771AqlahcuTKmTZuG0NDQp87JysrCH3/8gQsXLkChUKBbt26oX7++2TPDjyMiHDt2zOSZyb59+xZJ/0aj0Yi5c+di8eLFUKvVqFmzJqZNmwZ/f3+7x3pZGY1GbN++HTExMTAYDKhfvz6io6MhlVpu7l4YBoMBmzdvxu7du0FEaNy4Mbp27Wr2x/mTbt++jeXLl+P+/fvw9/dH//79UbZsWbvnxxhjwHPUawVZzXPz5s0C/7Pn6iDmGGq1mvr37UsAyEsioTC5gmQiEUnEYvrpp5/sGislJYVKBwYSAHIXCKiiWExigIQA9e7d266x7t+/T/4+PgSAPIVCqigWk+hRrGHDhlmdt27dOnJ1diaRQEBV5Qryl0oJALVo2pRSUlIszklLS6PWLVvmrgB3cqKqcjmJBAJyUSjozz//tOt5XblyhTxdXQkAlRIKqbxYnL9KfdKkSXaN9bKKi4uj8KqVCQCFekuoiq/k0Yp9Hzp8+LBdY12+fJnKly9PAMjf358CAgIIAAUHB9OpU6cszjEajfThhx+SUCgkuVxOISEhpFAoSCAQ0NixY8lgMNg1R8YYIyp4vfbcrYH27t1LOp3O7LhOp6O9e/faNTnmGMOHDyepUEg/uHvSjYAguhtYms77B9Kbzi4EgJYtW2a3WGVDQkgK0AwPT4p/FOucXwANUjgTABo/frzdYgX4+pJcIKDZnl5081Gs034B1EuuIAD05Zdfms05cOAAiYRC6qxQ0DE/f7obWJpuBwTRYi9v8pJIqGmjRmQ0Gk3mGI1GatW8OXlIJLTA05tuPYp1wi+AuikUJBQKaffu3XY5J51OR+7OzuQhENBSL2+6/SjWUV9/aieVkQCgBQsW2CXWy0qpVFK5siFUoZSYDg0FGT8H0WTQpTGgxmVE5O7mQvHx8XaJlZaWRkFBQRQUFESTJk2iuXPn0ty5c+nzzz+n0NBQ8vLyooSEBLN5U6ZMIQDUrVs3mjFjBs2dO5d+/fVXio6OJoFAQF988YVd8mOMsccVWWsgkUiE+/fvm73dmZKSAl9fX36bu4S7e/cuypQpg89dXDHcxdVsfERaKq77++HK9esQCgu3gVJMTAzatm2Lae6e6PvEloREhP6pyTghECAzO7vQsVavXo1evXphpocXuikUJmNGIkSnJOGakxPSs7JMxjp37Ij43buxxdMb4ife0t6lVmFgagr27t2Lpk2b5h8/fPgwGjZsiEVe3mgrk5vMMRChS1oK/Bo3xvaYmEKdEwBMmzYNH330Ef70LoXGUtOWQzoitEpKhLpUKdy7f7/QsV5W8+bNw6hRI3H1LaCit+lYpgYI/U2EIaPex48//ljoWL/88gs+/PBDfP311/Dy8jIZUyqV+PTTTzFhwgR8+eWX+cdVKhUCAwMRFRWFXr16mX3OtWvX4siRI0hISHDYrkqMsZdTka3mpkfPRj4pJSXFbI9iVvL8888/EAHorbD8tRyoUOBafDwuXLhQ6FjTp0+HFED0E8UdkPsc7kCFC7LVauzZs6fQsX7//Xe4CQToJJebjQkFAgxSuCBDqTQ5L5VKhS3btqGvVGZWSAJAC6kMwVIp1q5da3J83bp1CJBK0Vpq3k9SJBCgr1SG/3bsQGZmZqHPa+nSpQgRidDIyfy5PolAgAEKZ9x/8AA5OebbSLJc69etRZtyArNCEgDcpEC/cAPWrl5ll1jr1q1DRESEWSEJAC4uLoiKisLq1atNju/Zswfp6elo1qyZxc/ZrFkzZGVlYefOnXbJkTHGnleBVnMDQHR0NIDcX/KDBw82eSjdYDDg3LlzaNiwof0zZMVKqVRCIRLBxcqdQB+hKP/j7BHLVSiEzMoiFh9Rbg7JycmFjpWdnQ0PoRASK7F8H8V6+PBh/jGVSgUigo+V10IgEMBHKDJ7LZRKJbyFQgitxsp9DXNycgp9Z16lUsFHKLK6EMhXJAIBSE9Ph8JC0c4AZVYmyrtYf4PG3wVQKrPtEisrKwuenp5Wx93c3HDr1i3T/B59f7m6mr9TAPx/owh7/EwyxpgtCnxn0t3dHe7u7iAiuLq65v+3u7s7/P398eabb2LFihVFmSsrBlWrVkWGTocLOq3F8YMaNcQikV12SalRowaSjcb8Pa+fdEijgRBA/fr1Cx2rWrVquGMw4N6jfaifdFCjgQi5Ozzl8fDwgL+PDw5pNBbnpBkNuKhRo2rVqibHq1atiqsaDZKtPPJxQKNBKU/PZ+66UxAVKlTABZ0WGUajxfH9ajWchEJe1f0UVcOqYc9tMQyWX0LsuilE1aphdokVFhaGa9euWe2MERsbi7Aw01h5319Xr161OOfy5csmH8cYY8WtwMXk4sWLsXjxYkyePBkLFy7M/+/Fixdj7ty5mDRpkl1+OTLH6tChA0r7B+BbZRY0T/zCu28wYK5ahejoaLu0CPrmm28gFgjwdUYGdE/Euq3XY65SiXIVKiAkJKTQsaZOnQohgG8yM2B4IlacXodF2UqEPfH2o1AoxIhRo7BGo8Z5rWlxTUSYmpkJiMUYPHiwydiAAQMglkgwJSsDxidiXdbp8KdGjWFvvgmxuMBvDFg1bdo0aAFMy8wwK1DOaLVYp8pB4+bNC/3M6cts5KhRuJ2mxy9HzMf+vQrsvGHEqDFv2SXWqFGjkJCQgH379pmNHT9+HHFxcRg9erTJ8WrVqqFhw4bYvHmz2eMKKpUKmzZtQlRUFKKiouySI2OMPbeiXQdkGa/mfrHt2LGDpBIJVZbK6Ft3D1ruVYrGubpRKYkTBQcG0p07d+wW67PPPiMBQFXEYpr6KNZ7Lq7kJhCQVCym8+fP2y3W2LFjSQBQNbGEfnD3pOVepWiMiys5CwSkcHKi69evm83JysqiOlFRpBCLabizCy3z8qbpHp5UVyYnADR//nyLsZYuXUoCgYBqyeT0s4cnLfPypjedXchFLKbI6tXt+r3fp08fAkB1nJxouocnLfXypqHOLiQFyM3ZmRITE+0W62X10UcfEQDqUkVAq3uANvYGDa4JEosEFN29G+n1ervEMRqN9NZbbxEAioqKopEjR9Lo0aOpXr16JBQKqW/fvhbb/Jw/f548PDyoVKlSFB0dTe+88w716NGDfHx8yM3NjU6fPm2X/Bhj7HF2bQ1Us2ZNioyMLNA/eybHHOfYsWPUpXNnEgqFBICc5XIaOXIk3bt3z+6x5syZQ76lShEAwqP+iBEREXThwgW7x/rll1+olKdnfiwxBBQZGWmxkMyTmZlJEydONJnXtFEj2rJly1Njbd++nZo3aZI/x8vdnSZMmEDp6en2Pi367LPPyONRr0kAJBEIqGHDhlxIFpDRaKQlS5ZQRHjV/NewTHAQTZs2zWIrtMLGmjt3LlWuXDk/VmhoKE2fPv2pRWtsbCz179+fnJyccr/GEgn16dOHLl++bNf8GGMsj11bAz3epkKtVmPWrFkICwtDgwYNAABHjhzBxYsXMWbMGHz33XfPvBvKrYFKjszMTKSnp8PX1xcymfnqZHu6desWHjx4gKpVqxb590V8fDwePnz4XLF0Oh0SExMhl8vh7W1h6a8VqampyM7Ohp+fH5ycnGxNuUCuXbuG9PR0hIeH84IbGxARHj58CL1ej4CAgCJ9PICI8ODBAxAR/P39CxwrOzsbKSkp8PLy4lZAjLEiVdB67bn7TA4fPhwBAQH4+uuvTY5PnjwZd+7cwaJFi+yWHGOMMcYYc4wiKybd3d1x4sQJVKxY0eT4tWvXULt2bWRkZNgtOcYYY4wx5hhF1rRcLpfj4MGDZscPHjxY5G+DMmZPz/l3lM1zGLM3o5VWUCXdy3pejL3snruYfP/99zF69Gi8++67WLFiBVasWIF33nkHb731FsaOHVsUOTJmN1euXMHw4cPh4eoKsViMsEqVMH36dKjVaqtzdDod5syZgxrh4RCLxXB1dsaA/v1x9uzZYsycvequXr2Kxo0bw0kigUgkgkQiQYMGDXDx4kVHp1Yox48fR61atSAWiyESieDk5ISWLVuaNW9njL24nvttbiB3n+MZM2aYNMt977330LNnzwLN57e5mSPs3bsXnTp0gLvBgDecpPAXiXBEp8VmtRqNGjbE1v/+g/yJ7RZ1Oh2iu3XDlq1b0VYmR1MnJ6QajVij1eC+0Yj1GzagU6dODjoj9qo4fvw4GjduBECAhg0bIigoCAkJCTh08CCMRNi9ezcaNWrk6DSf25YtW9C1a1dIJBI0btwYPj4+uH37No4cOQKxWIxTp05xM3bGHKjInpm0By4mWXHTaDQoGxyMcllKLPXwhPyxlbPHNRr0TU/FuIkT8c0335jM++mnnzDxww+x2MMLLR57jENDhNHpaTgiEuLOvXv8fcyKlK+vD3Q6HT788COTTgJpaWmYNu17AEBKSqqj0rOJ0WiEm5sbXFxcMGHCBJPtIh88eIBp06ahVKlSuHnzpuOSZOwVV2TPTDJWEm3YsAEPkpIwxdXNpJAEgDpSKfrI5Jg3eza0j+10Q0SY9euv6CqTmRSSACAVCPCNmxuys7N5G1FWpHbt2oWkpGR069bdrCWVp6cnunePRmpqGrZs2eKgDG2zePFiZGdno2fPnmb7jvv7+6Njx464ffs2Lly44KAMGWMFVaBi0svLC8nJyQByL15eXl5W/zH2Ijpx4gRCZXJUkEgsjreRyZCUmoq7d+/mH0tPT8eN27fRWmp5YVmgSIwImQwnT54skpwZA4DNmzcDACIiIiyOV69e3eTjSoqYmBgIhUKzvcjz1KhRA0RU4opkxl5FBdoc+Jdffsn/y/GXX36BQCAo0qQYszcnJydkGw0wEkFo4fs3y0j5H5dH8qjwVBotPwlCRMgiKvJG5OzVltclQ61WW2xSrlKpTD6upJBKpTAajdBqtRZzzzuvJ59jZoy9ePiZSfZKOHjwIBo3boxlXt5oKTP/5TQkLRWJoWVx5sIFkz+WmjVuDPWpU1jn4WX2R9RJrQZdk5Pw77//onPnzkV+DuzVdOvWLZQLDUWHjh3RpUsXs/EtW7bgn3/+wdWrV836/77IDh8+jIYNG6J3795o0aKF2fhff/2FvXv3Ijk5GR4eHsWfIGOs6J6ZHDhwIBYvXoy4uLhCJchYcWrYsCEa1quHCcosnNRq8o+rifBzViZiVDmYMGmSWcE4YeJEHFOp8FVmBlSP9cC7pNPinaxMVKtaFR06dCi282CvnjJlyiAyKgpbt27FsWPH8nsxGo1GnDhxAv/++y+qVatWogpJAGjQoAHKli2LdevW4dy5c/k9XI1GI/bt24fdu3ejWbNmXEgyVgLYtJ3ivn37cP36dQQFBaFZs2Zo3rw5mjVrVuCLGd+ZZI7w4MEDdGzXDqfPnUOETAZ/AKcMBqTodPjqq6/w2WefWZz366+/YuzYsXAVilBHLEIKgNNqNSqXL4/tO3eiTJkyxXoe7NWjVCoRHhaG23fuwMvLC8HBwbh79y5SUlIQGBiIy5cvl8hr6cOHDxEWFoaUlBT4+voiICAAN2/eREZGBipVqoTz58/zYySMOVCRtwa6d+8e9u3bh71792Lv3r2IjY1FQECAyQKGwibHmL3p9Xr8+++/WLNmDbIyM1G5ShWMGDEClStXfuq8+Ph4zJ8/HxcuXIBCoUC3bt0QHR3Nv+hYsTEajZg/fz6mT5+O1JQUeHp54d1338WoUaMgFJbcxhxGoxE//fQT5s2bh8zMTPj4+ODDDz/EwIEDHZ0aY6+8Ii8mc3JycODAAezevRt79uzBqVOnEBYWhtOnT9stOcYYY4wx5hhF9szkxx9/jIYNG8Lb2xsTJ06EWq3GxIkT8eDBgwIVkowxxhhj7OXx3HcmhUIhfHx8MHbsWERHR6NSpUrPHZTvTDLGGGOMvdgKWq8VqM/k406fPo29e/diz549+Omnn+Dk5JS/CKd58+Y2FZes4HJycrB161YkJycjJCQEbdq0gVj83F/GAlEqldi6dStSU1MRGhqKVq1aQSQSPXPeypUrsX37dkilUowePRpRUVFFkp+t0tPTsXXrVmRlZaFy5cpo2rQp9059yej1euzYsQO3bt2Ct7c3OnbsCIVC4ei0HGb79u1YtWoVBAIB+vfvj1atWj1zjlqtxrZt25CYmIigoCC0a9cuv/fqi+LcuXM4duwYxGIxWrZsiZCQkGfOsfW6durUKZw8eRJOTk5o06YNAgMD7XEKZogIx48fx9mzZyGVStGuXTv4+fkVSSzG7IYK6cyZMzRo0CASi8UkFAoLNCcjI4MAUEZGRmHDvzKMRiP9+uuv5OnmRgBIABAACvLzo/Xr19s91vfff09uzs4mscoEBdGWLVuszvvvv//M5ggAqliuHCUlJdk1R1vo9Xr6+OOPSS6VmuRYuXx5OnDggKPTY3byzz//UHBQQO7XWJD7NXZ3c6GffvqJjEajo9MrVhcuXCAfHx/Co+/1vH9+vr505coVq/Pmz59P3t7ej15DQe4cPz/6448/ijF76+Lj46lJkyYm5yQUCql3796UmZlpcU7edc3V1dXkvEJCQp56XYuNjaX6dWubfD+JREIaPGggZWdn2/W8Lly4QFFRUSb5SSQSevPNN0mtVts1FmMFUdB67bmLSaPRSCdPnqSffvqJXnvtNfL09CSRSESRkZH0/vvv2zU59n8zZswgANRf4UwHfP3pTkAQbfPxpbZyOQkEAtq0aZPdYk2ZMoUA0FBnFzr8KNa/pXypuVxOIqGQdu7caTbn5MmTJBEKqaxIRAs9velmQBBd8g+kL9zcSQqQj7c3GQwGu+Voi3fffZeEAgG96+JKJ/wC6E5AEK319qE6MhnJpVI6efKkQ/Njhbd9+3YSCgXUqZKAjo8AGT8Hxb0LeqtObhHwww8/ODrFYpOYmEgymYxcXFxoyJAhNHPmTPrtt99o8ODB5OzsTHK5nFJSUszmzZ8/nwBQgwYN6Msvv6Q5c+bQ559/TrVq1SIAtHr1ageczf8lJSVRcHAw+fj40MiRI2nWrFk0Y8YM6tu3LykUCmratCnp9XqzeXnXtZYtW9K3335Lc+bMoYkTJ1K1atVIJBJZvK7du3ePAvx8qLKPiP7uBdJ9Bkr/CDS9HUjhJKT27dra7Q+U+Ph48vb2ptKlS9Pbb79Ns2fPpp9//pmio6NJIpFQdHT0K/fHEHO8gtZrz/3MpKenJ5RKJWrUqJH/9naTJk2eq7EsPzP5fLKzsxHk748uRsJ3Hp4mY0Yi9EtLRVpoWZy7eLHQb9emp6cj0N8fA8USfObuYTKmJ0KPtBSIqlXDkePHTcbq1KmDyydPYr+vP3yfeMvoP7UKQ1NT8OOPP+KDDz4oVH62unXrFsqVK4dJLq4Y7eJqMqYiQsfUZFRt0wYb//3XIfmxwiMi1IqsAbeMC9g5gCB6Ynnh+9uAhRfkSLifmL897MusT58++Ouvv/D555+bvSV7584dfPPNNxg0aBCWLFmSf1yj0SA4OBjly5fH4MGDTa4nRITZs2cjIyMDcXFxDmtH9MUXX+D777/H5MmT4eXlZTJ25coV/PLLL/jnn3/w2muv5R9PT09HQEAAmjRpgh49epjMMRgM+Pnnn+Hl5YWjR4+ajE2YMAELZv2Cy6MN8H9iJ8t/rgJd/wR27tyJli1bFvq8xowZgz/++AOff/652baZx44dw8KFC3HkyBHUq1ev0LEYK6giW829YsUKpKSk4MSJE/jpp5/w2muv8Q4FRWzTpk3IUCrNiiAAEAoEGKlwxoXLl3H27NlCx1q/fj00Wi1GWoglFgjwptwZR0+cwLVr10zGzp48iT4KZ7NCEgDaSGUIFYkwZ86cQudnq1WrVkEuFGKgwtlsTC4QYKhMjk1btiAtLc0B2TF7uHTpEk6fPY/xDcwLSQAY3xDIzlFh48aNxZ+cA2zZvBnVIyIsPtsXHByM8LAwbPz7b5PjMTExSEpKQrt27cz+MBUIBGjbti1u3ryJw4cPF2XqT7V06VLUrl3brJAEgCpVqqBMmTJYvny5yfH169dDq9WiTZs2ZnNEIhFatWqFY8eOmV3Xli9djMER5oUkALxWCajiKzaLZQsiwvLly9GwYUOL+6/Xrl0bpUqVskssxorCcxeTnTp14ruJxSwpKQlOQiFCrCy0Kf/o+MOHD+0Sy00stlgUPi2WjgjlrOQnEAhQUSJBRkZGofOz1cOHD+EvkcDZyt2UcmIxjEYjUlJSijkzZi9JSUkAgErelsdLuwHOUpFdfk5KAo1WAz9/f6vj/gEBUGs0JsfyXkNrCz7yjjvyNUxOTn7qghRfX18kJiaaHEtKSoJCoYC7u7vFOf6PXqcnzyspJc3q95NAAFTy1OPhE7FsodFooFQqrZ5XXheVV+V7l5U8JXfbhFdI6dKloTUacVWnszh+4dHx4OBgu8RK1+lwW6+3Ekub/3GPcxKJcNFKfgYinNPqUKpUqULnZ6vg4GDc02qR9tj+2o+7qNNBIhbD19e3mDNj9pL3PXnmgeXxaymAUm2wy89JSaCQK3D79i2r47du3YKzs+kK97zX8M6dOxbn5B135GsYFBRkdac1IsK9e/fMVnWXLl0aSqUSycnJFufdvn07/+NM5gX6W/1+MhiBsw/FCC7ACvJnkUql8Pb2tvq66/V6JCQkvDLfu6zk4WKyBOjYsSN8vb0xXZmFJx9x1RBhliob9evUQdWqVQsdq3v37nB3ccEMC7FURiPmqFRo1aKF2X7UDZs0wdqcbNzQmxeUa1U5eGA0YMKECYXOz1b9+vWDQSDAbGWW2Via0YhFajXeeOMNvuteglWoUAFNGjXE1EMiqJ74NiQCvtoHeHu6mzxL9zLr2asXrly5iuvXr5uNXb2ae7xfv/4mx1u2bIng4GBs3rwZxif+8DIYDNi6dSvCw8NRq1atIs39aYYPH45Tp04hISHBbCzv+NChQ02Od+/eHW5ubti8ebPZdU2r1WLHjh1o2bKl2XVt6PA3seKCENcsvGGx7CxwK01vFssWAoEAQ4cOxeHDhy2+O7J//35kZGRgyJAhhY7FWFGweTvFwuAFOM9v1apV6Nu3L1rJ5Bjp7IxQsQTndVr8lpONS0Yjdu3ejYYNG9ol1sKFCzF8+HB0kMsxXOGCELEYp7Va/KbKxg2BAPsOHDDrHRkfH4/wypUhNxgw1tUNbaQyKImwJicb87OVCC1XDrHXrjl0D+HvvvsOH3/8MXrIFRjk7AJ/kQiHNRr8qspGmlyOI8ePo0KFCg7LjxXesWPH0LxZU4R76/BJYyPqBAJxacDPh4GNV3Oft3tV9nxWKpUILl0a2Tk56NSpE2rVqgUiwokTJ7Blyxa4urrg7t17Zv03N27ciOjoaFSpUgVt27aFv78/7t69i+3bt+PmzZvYtm2bXRac2CorKwuNGjXCrVu30K5dO9SsWRM6nQ5HjhzBjh070LVrV6xZs8bsmc+861pkZCRatWqFUqVKIT4+Htu2bUNSUhL2799vdl1LS0tDg3p1kJF4C5800uO1SkCWFlhyBph+VID+/Qdg8ZIldulT+/DhQ9SrVw+ZmZno0KEDqlWrhpycHBw8eBC7d+/GqFGjMGvWrELHYex5FLheK8IV5VZxayDbrFu3jiqVK2fSW61OVBQdPHjQ7rFWrlxJ5UJCTGI1qFuXjh8/bnXO+fPnKTgoKL9/IwASQ0BNmjQhjUZj9xyfl9FopNmzZ1OQn5/JebVq0YIuXbrk6PSYnRw5coTq1alt8jWuUK4s/fXXX45Ordjdv3+fqlSpkt+zEI/6F4aHhT219+umTZuoatWqJq9hjRo1aNeuXcWYvXUpKSnUt29fkkgk+fm5urrShx9+SFqt1uq8lStXUtmyZU3Oq379+k+9rj148IDe6NGDRCJh/hwPd1f67LPPLLYgKoy7d+9S165dSSj8fywvLy/65ptvHN5ajb2a7Noa6J9//ilwFdulS5dnfgzfmbSd0WjEiRMnkJSUhJCQEERERBRprKNHj+bvFBEWFlageadOncKmTZsgl8sxZMgQhz4raYler8fhw4eRlZWFSpUq8d3Il9TFixfzd8CpU6eOQ++KO1pcXBz+/PNPCIVC9O7dG6Ghoc+cQ0Q4efJk/g44NWrUeOF2ikpMTMTp06chFotRv359iyuhn2TrdS0hIQFnz56Fk5MTGjRoUKQ7Kt25cwcXLlyATCZDgwYNIJPJiiwWY09T0HqtQMVkQS/CAoEABoPBbskxxhhjjDHHsOve3E8+iM0YY4wxxhjAq7kZY4wxxlgh2FRMZmdnY8uWLZgzZw5+/fVXk3/s5ZCTk4P58+ejRdOmqB4Whu5du2LTpk1FcpdaqVRi9uzZaNa4MaqHhaFHdDS2b99u1sLjSYsXL0bFChXgKpPBw8UFbdu2xcWLF+2eH2Ml2bVr1zBu3DhERkYiKioK48ePR1xcnKPTyvfgwQM0bdoUUicnOEkkkMlkeP3116FSqewe6/bt22jQoAGcnJwgkUggl8vRu3dvaLVau8di7JXyvCt7Tp06Rf7+/uTm5kYikYh8fHxIIBCQs7MzhYaG2nV1EHOMhIQEqlqpEgkFAmohl9MghTNVl8kJAEV36/bU1ZLP69atW1ShbFkSCQTU+lGsMJmMAFC/vn2trpZs3rw5AaBAkYgGKJzpNZmcJACJBQJasmSJ3fJjrCT7448/SCwWk5ubGzVq1IgaNmxILi4uJJFIaPXq1Y5Ojw4fPkxikYgAUNUqVahZs2ZUtkwZAkBSqZTu379vt1j//fcfiUSi/NXszZo1o9KlSxMAksvllJ6ebrdYjL0s7Lqa+3HNmzdHpUqVMGfOHLi7u+Ps2bOQSCTo378/3nvvPURHRz/zc/ACnBdb8yZNEHvsGFa6e6KSRJJ/fKtKhTEZafjo44/x9ddfFzoOEaFuVC08vHwJK9w9UE4syT++UaXCuxlp+HbKFEycONFk3kcffYRp06bhQ1c3vOXiCtGjFaYpBgMGpSbjol6Puw8e8G427JV2+fJlVK9eHbVr10b//v0hefSzrNVqsXz5cpw+fRoXL15ExYoVHZajTCaFWCzB2LFjTXZ3uXDhAmbNmgVvb2+zrRFt5eTkBLlcjrFjx5rsV3769GnMmzcPQUFB+TvhMMZyFbRee+63uc+cOYMPPvgAQqEQIpEIGo0GwcHBmDZtGj7++ONCJc0c7/Tp09h74AC+cnY1KSQBoINcjsFyBWbPnAm1Wl3oWAcPHsSJM6fxnbNrfiEJ5HYF6KZQoI9cgd+mT4f+ia0d582ejTpOTnjX1S2/kAQAb5EIv3t6Q0+EDz/8sND5MVaSzZw5Ey4uLhgwYEB+IQnkFlUDBw6EXC53aBPs+fPnQ6PRomfPnmbbBFarVg3t2rVDcnISYmNjCx3ru+++g06nQ9++fU0KSQCIjIxEixYtcO/ePTx4YGXvRMbYUz13MSmRSPJbBfn6+ub/Jefu7m51X1FWcuzevRsKkQhtrPQ16yJXICU9HefOnbNLLC+JBE2kUiux5EhITMTVq1fzj6nVaqRnZSFabrnHWxmxGNUlEuzZs6fQ+TFWku3cuRM1a9aEWGzetEMikaB69eqIiYlxQGa55s+fD4FAYHVrxjp16sBoJEyfPr3QsVauXAmxWIzq1as/JZYRv//+e6FjMfYqKlBroMdFRkbi+PHjqFixIpo1a4bPP/8cycnJWL58OapVq1YUObJilPfUg7XWxKInPq6wsQQCwVNiCcxi5S0AetpfQWKrn5GxV4fRaHxqj2ChUGiXn2Nb5cW2lmPecXss+jMajbnXGitN1+0Zi7FX0XPfmZwyZQoCAgIAAN9++y08PT0xevRoJCUlYd68eXZPkBWvRo0aIcdgwB6N5bexN6tVcHdxscsfDo0aNUKKVosjVlZSblar4OvtjUqVKuUfUygUcFUo8K+VlZ4JBj1O6bSoX79+ofNjrCRr0qQJzp49a3EjCYPBgPPnz6NZs2YOyCxXnz59QEQ4ffq0xfGTJ09CIBBgzJgxhY7VtWtX6HQ6q90eTp06BaFQiOHDhxc6FmOvoucuJmvXro0WLVoAyH2be9u2bcjMzMTJkydRo0YNuyfIile9evVQNyoKk7OVuPvEs4oHNWosVOVg+MiRcHZ2LnSsli1bolqVKvgkOwsPnviFt0utwkpVDka99RacnJxMxgYMHowDWg2WZitN7qwojUa8n5YGoUCAH3/8sdD5MVaSvfPOO0hNTcXq1atN7rgZDAb8+eefyMjIsEuhZqtx48bBSSLB6tWr8fDhQ5OxuLg4bN26FR4e7lbfmn4e33zzDcRiMVatWoWUlBSTsatXryImJgY+Pj4F2maSMWbuuVdz53n48GH+s2xVqlSBj49Pgefyau4X282bN9GiSRPcT0hAB6kMISIRThv02K9SoXWrVvh30ya77RV79epVtGrWHKnJSejoJEWQSITjBj0Oq1R4rVMnrF2/3qyYNBqNqFWrFs6cOYPKYjHayOTIMBqxQZUDFRF+mj4d7733nl3yY6wkmzdvHkaNGgVvb2/UrFkTRISzZ88iNTUVCxYswJAhQxya35YtW9Cly2sgAiIja8LX1w+3bt3EpUuXIZGIcfVqrN0KvDVr1qBPnz4AgKioKJQqVQo3btzA1atX4eTkhFu3bsHf398usRh7WRS4XnvenkOZmZnUv39/EovFJBAISCAQkFgspn79+hW4Txf3mXzxpaam0rRp06hmtWoUEhBAzRo3puXLl9u1x2SepKQkmjJlClUPC6MygYHUqnlzWrVqldUek0REBoOBpkyZQoF+fiQTiUghkVCdOnVo//79ds+PsZLs5MmTNHjwYCpbtiyFhobS0KFD6fTp045OK9+lS5eoWrVqJBGLSSwSkUQipiZNmlBKSordY50+fZqqVKlCYrGYRCIRSSQSatmyJfeYZMyKIusz2atXL5w+fRq//fYbGjRoAAA4fPgw3nvvPdSsWRN//vmn/SpdxhhjjDHmEAWt1567mHR2dsb27dvRuHFjk+P79+9H+/btkZ2dbbfkGGOMMcaYYxRZ03Jvb2+4u7ubHXd3d4enp+fzfjrGGGOMMVaCPXefyU8//RTjxo3D8uXL8x9WfvDgASZMmIDPPvvM7gm+zLKzs3Hx4kUIhUJERERAaqV595NiY2ORnJyM4OBgs50jSiqj0YhNmzbh/v37qFevHmrWrFmgeWlpabh69SqkUimqV68OkUj0zDlEhIsXLyIrKwvlypWDn59fIbN/MeS1e1GpVKhUqRK8vb2LLJZarcaGDRuQnZ2NNm3aoEyZMgWal5CQgJs3b8LT0xNVqlSx2vfPHo4fP45Tp04hJCQE7dq1e2rPxTxarRbnzp2DwWBAeHg4XFxcChTr4MGDuHDhAsqVK4c2bdoUNvUXxooVK3Du3DnUqVMHb7zxRoHm2Hpd2759O27evImaNWuiXr16BZqTlZWFS5cuQSwWIyIiwmyxnqMRES5fvoz09HSEhobmt9V7FluuayXBw4cPERcXBxcXF4SHhxfoZ7I4r2usEJ73YcyaNWuSi4sLSSQSKl++PJUvX54kEgm5uLhQZGSkyb/CPtD5ssrOzqb333+fXBUKAkAAqJSnJ33xxRek0+mszouJiaE6UVH5cwBQm1at6MyZM8WYvf1NmDCBFE5OJufl7+tLO3bssDonKSmJBg8aRFKJJH9OSGAgzZw5k4xGo9V5f/zxB1WpUCF/jkgopNejo+nmzZtFcWrFwmg00pw5c6hsSOn883KSiGlA//6UmJho11gGg4Gio6PJSSzMjyUUgCpWrEjXr1+3Ou/KlSvUuVNHEggE+fOqVwujv//+2675ERGtX7+eSpXyNvl+clYo6Ouvv7Y6R6/X09dff00+Pj7/n+PsTO+88w4plUqr81asWEFenp4msVxdXOjHH3+0+3kVp3fffZfEYrHJeUkkEvr888+tzsm/rrk8dl3z8njmde37778nFxdnk1je3t70119/WZ2TmZlJb731Fikeu4b6+vrSlClTyGAwFOrc7WXdunUUFhb2/58ToZBee+01unbtmtU5SUlJNHjwYJJKpfnzSpcu/czr2ovu9u3b1KNHD5PvqQoVKtDy5cutzsm7roWEhPz/uubkRAMGDLD7dY1ZV2QLcL788ssCf+zkyZMtHn+Vn5nUarVo36YNjh48iGFyBTrK5NCB8LdKhaU52XijZ0/8sWqV2R2bf//9F927dUNtJyeMkDsjVCzGeZ0Ws1U5SBCLsXf/fkRGRjrorGw3fPhwLFy4EM2kUgx1dkFpkRjHtRr8psxCotGI7Tt2oGXLliZz0tPT0ah+fSTeuIE3ZXK0kMmQYST8mZONdaocTJo0CVOmTDGL9fvvv+Ptt99GW7kcg+TO8BOJcESrwWxVDsjTE0eOHy+Rd3onT56Mr776Cn0jgBFRgLcc2B4H/HBEDA+/EBw6csxuf83Xrl0bJ0+exKAawLBIwF0GbI4Fvj8I6AVOuHItDqVLlzaZExsbi4b168FTlIUP6xvQMBi4mQ7MOCZATBxh+fLl6N+/v13yW7duHXr27IlSpUqhffv2CA0NxcOHD7Fjxw5cu3YNEyZMwLRp00zmEBEGDRqElStXomnTpqhXrx7EYjHOnDmDnTt3olatWti5c6fZHbZly5ZhyJAh8PPzQ/v27RESEoIHDx4g5r//cCM+Hl9++SU+//xzu5xXcRoxYgQWLlyYf0c3ICAAd+/exbZt25CQkICPPvoI3333nckcrVaL9u3a4OihA3i/rhGvhwE6A/DHeeD3EwK88Ybl69pnn32Gb775BuXLl0fbtm3h6+uLW7duYdu2rXj4MAkrVqzIb+eTR6VSoWXLljh79ixatWqFmjVrQqvV4ujRo9i/fz8GDRqEhQsXFuld72dZsmQJhgwZgmrVqqF58+bw9vbG9evXERMTA6PRiCNHjqB8+fImc9LT09GwYUPcu3cPrVq1QrVq1ZCTk4ODBw/iyJEjVq9rL7qEhATUq1cPOTk5aNOmDSpXrozMzEzs3bsXp0+fxs8//4yxY8eazcu7rtWtWxeNGzeGi4sLLl68iB07dsDX1xeHDx/mu5TFoMgW4NjDq1xMLl68GMOGDsUabx/Uf+KX0z+qHIxJS8X27dvRtm3b/ON6vR5lSwejamYGFnp4QfTYRTLbaES3tFT41qmNPfv3F9t52MODBw9QOiAA0XIFfvbwNLn4pxoMaJf0EOIAf9y+e9dk3meffYZfpk7FFi9vlBdLTMZmZmVialYmrl69arJzTmpqKoICAtBLLME37h4msRINBnRMS0GHvn2xePHiIjrbohEfH4/y5cvji2aEz5/YzOR6KlBrgQij3vkA33//faFjbdy4Ed26dcOPbYAPGpqOXXwI1JoHNGzaArt27TIZ696tK84f2Ixjwwzwkv//uJGAgRuAzbddcS/hARQKy/utP49S3t4QSySYNGkS5PL/BzMajZg7dy4uXDiPtLR0k7ev9+zZgxYtWmDIkCFmOyfFxcXhxx9/xKxZszBy5EiTMXc3N7i5u+Ojjz4yeXvVYDBg5m+/4dr161AqlS/cW69Po9VqoVAoUL58ebz//vsmb69qtVr8+OOPuHfvHnQ6ncm8xYsXY9iwodgzCGj6xBMPf10Aeq+D2XVNrVbD1dUVVatUwZi33jJ5y1OtVuP776ciJycHaWnpJp/vt99+w9ixY/Hhhx+ibNmyJmMHDx7EsmXLcODAATRq1KhwL4aNlEolAgICEBERgUGDBplca5RKJaZOnYpmzZphzZo1JvM+++wz/PTTT5g4caJZv8utW7fi77//NruulQQjR47EX3/9hUmTJpmsqyAirFmzBvv378edO3fg6+ubP5Z3XevcuTM6d+5s8vkePnyI7777Dm+//bZdrmvs6YpsAQ6Q+xfUggULMGnSJKSmpgLI3Y7q3r17tmX7ClkwZw6ayxVmhSQAvCaTI0wmw4L5802Ob9++HfcSH2C8i6tJIQkAzkIh3lEosPfAAVy7dq1Ic7e3jz/+GEYAE9zczO4ieIlEGO3iirv37pmcFxFhwdy56OEkNSskAWC4iyu8JBIsXLjQ5Pgff/wBg16Psa7msfxEIgyRyvDnH39AqVTa7wSLwaJFi+AuF2J8Q/OxCl7A0OoGLFowzy57Dn/11VfwlgPvWHicLdwXGFgDOLR/r8nxxMRE/PPvvxhf37SQBAChAPi6BZCRmYX169cXOr8DBw4gJTUVHTt2NCkkgdy9l7t27Qq93oCvv/7aZGzBggUIDAy0+Jxe+fLlUb16dbOtYv/55x9kZmWhU6dOZsWiSCRCl0fb9/3000+FPq/iNG7cOBgMBnTt2tXsOT0nJyd07twZer3e7Jf4gnlz0L6C0KyQBICe4UD1ALHZdW3q1KnQ6/Xo0rWr2bNzMpkMHTt2Qnp6BmJiYkzG5s2bhxo1apgVkgDQoEED+Pn5YcGCBc9x1va1Zs0a5OTkoEuXLmbXGhcXF7Rs2RJ///23yU48RIT58+ejbt26Fhunt27dGq6urmbXtRedSqXC8uXL0bRpU7MFugKBAJ06dQIALF++3GRs0aJFUCgUJn985PH19UWDBg2wcOFC3kv9BfLcxeS5c+dQqVIlfP/99/jxxx+Rnp4OAFi/fj0mTZpk7/xeOjdu3ECk2PK6J4FAgJpCEeKvXzc5Hh8fDyehENUklu9wRD46Hh8fb99ki9j169dRSihEoMjy6xHp5ARC7h8qeTQaDR4kJSHSyt0emUCAMLHY7LW4ceMGQqRSlLLyIHukkxPUWi0ePHhg28k4SHx8PCJ8AYV5XQ0AqFcaSE5Nt0uRfP/+fdQOBJysrAWoFwRo9EZoH9tr/fbt2zAaCfWCLM8J9QR8Xc2/XrY4ceJE7ue0smNKYGAgJBIJrly5YnI8Li4OZcqUsfq2aNmyZc3yy/uetBYrr9C5cOFCgfN/EZw9exaA9fPKO37kyBGT4zduxKFeoOVf7AIBUC9Aj/g40z92L168CJFIiJCQkKfGyvu65rl586bV/ITC3M9348YNi+PFIT4+Hp6envDy8rI4HhoaCr1ejzt37uQf02g0SExMtHpeEokEpUuXLnHX+KSkJKhUKqvn5ezsDD8/P7Pzio+PR1BQkNW7+qGhoUhJSSlxf/y/zJ67mBw3bhwGDx6Ma9eumWyp17FjR+zbt8+uyb2MvLy8cMegtzp+h4zwKlXKbI7WaMT9J/avznP70eezdvF6UXl7eyPdaITSyl+Xea/T479spFIpFDIZblt5LYgId41k9lp4eXnhoU4HlZWnOu48+nwlrb2Vl5cXbmUIYLTysEp8GiB1ktjlLWQXFxfEpQHWHoyJTwdEQpj8Asj7OsSnW56TrgbScox2+d7Ne1YzOTnZ4nhmZiZ0Op3Z1q/e3t5m+zU/Ljk52ez7IjAwEACszss7XtI6BeTla+01zDv+5HOxXl7eVr/GABCfIYRXKdPX3dfXFwaDEWlpaRbn5L2GQUGmf4l4enpazQ/IfaTFkddCLy8vZGVlQa1WWxzPy/3xHKVSKeRyudXzIiKHn5ct3N3dIRAIrJ6XTqdDWlqaxet1amqq1TuPycnJub8L7HBdY/bx3MXk8ePHzZ4dAnJ/4EvaXR1H6DtwIDZpNEiwUFBe0mmxX6VC3ycWI3Tu3BnOcjkWKLPM5hARFmRno1K5coiKiiqyvIvCxx9/DD2A5Tnmje71RJivVMLD1TV/pyUg9+5trz598KdGjSwLF5oYjRo3NWr07dvX5Hjv3r2RpddjtYVYWiIsUavQrnXrEvdAd58+fXA7TY/1l83HlFpg3hkxevbsBbGVu+HPY/jw4bieCmy7bj6WrgbmnQQqVwkzOV6+fHnUrR2FGceEMFj4vTDrOEAQ4vXXXy90ftHR0ZDLZNixYwcsPQq+a9cuCAQCfPLJJybH+/bti9jYWNy8edNsTkZGBk6ePGm2QGjw4MFwcpJgR0yMxVg7d+6EUCjExIkTC3dSxWzGjBkQCoXYsWOH2RgRYceOHRCJRGYLcPr2H4jVl4W4m2n+Oc8lAjvijOjbz/Q1nDRpEoRCIXbu3GkxVkxMDKRSqdnPcr9+/XD8+HFkZpoHu3HjBuLi4tCvX7+CnG6R6NGjBwwGA/bu3Ws2ZjAYsHv3bjRq1Mjkj2SBQIDevXvj0KFDUKlUZvPOnTuHxMREs9fiRefu7p5/o+nJ52wB4OjRo1AqlWaLrPr06YPk5GScPn3abI5arcbBgwfRs2dPu1zXmH08dzEplUot/hDHxsaa/cXPzI0cORKl/PzQOz0Nu9VqGImgI8ImVQ4GZKQjIiwMvXr1Mpnj5uaGSZ98grnZSkzJzEDSo7toN/V6jE1PQ4xahW+mTi1Qz64XSZ06dVC9enV8l5mB37Iykf6oOLyq02FEagrO6LT41EJHgIkTJ0IpkaBveipOaDUgIqiMRvyRnY13MjPQrnVrNGnSxGROhQoVMHToUHyRlYk5yixkPop1SafFsPQ0xBoM+Pw5OhW8KOrXr4/XOnfC4H9EmHsCyNHl3jk8chdo/4cIyRoJJtrp8ZNx48bB29Mdb6wBFp0GVI9iHbgNtFoGpKlzF0c86atvpuDAbUKvdQJceXSDIiUH+Hov8NkeAd597z2Lz4k9L6FQiPfefx+XLl3CwoULkZiYCCD3juTff/+NrVu3omnTpmZvufXo0QM1a9bErFmzcPz4cRgMBhiNRly6dAnTp0+Hh4cH3nrrLZM5Tk5OGD58BE6fOYNly5bm33nJyMjA2rVrsWvXLrRv395kUUFJEBQUhLCwMOzbtw+rV6/Of4wpJSUFK1aswIkTJ9CoUSOz/psjR45EKR8/tFohxrbruYurdAZg7SWg/R9iRIRXNbuulS5dGi1btkRMTAzWr1+f/3slKSkJS5Ysxvnz5zFmzBizguHdd9+Fi4sLpk+fjsuXL4OIoNfrcezYMcyePRu1a9dG165di+5FeobSpUtjzJgx+Pvvv7Fly5b8XeESEhIwb9483Lx5E1999ZXZvIkTJ0Kn0+HXX39FXFwciAharRYHDhzA4sWL0bZtW7PrWkkwefJkJCUlYdasWbh9+zaA3Gcpd+zYgT///BP9+/dH1apVTebUr18fnTt3xrJly7Bv3z5otVoQEW7cuIHffvsNOTk5Je4PtZfe8/YcGjZsGHXr1o20Wi25uLjQjRs36NatWxQZGUnvvfeeXfsWvayuX79OtWrUzO1lJxaTQiQiANSyWTN68OCBxTlGo5G+/PJLkjk5kVAgII9H/RU93dxo8eLFxXsCdqTRaCgyMpIEAIkAcnnUh1AiENDEiROtzjt27BhVKFuWAJCbWEJSoZAEAgH1euMNysrKsjhHq9XSqFGjSCQUkkQozH8NA/38aOvWrUV1ikUuOzub+vbpTQKBgJzEAnKX534/hZYJpkOHDtk11r1796h0UEDu10gIcpbk9n+TO4lpxYoVVuetXbuWfLxz+zF6KsQkFgnISSKm8ePHk16vt2uOo0ePJtGjnymZTEYCgYAEAgE1adLEar/Dhw8fUuvWrQkASaVSkslkub0wq1enq1evWo01cOBAEgqFZrHatGnzwvQ7fF56vZ6qVKmSfy55r4VQKKTatWtbnXf9+nWqFVmDAJCLVEQKp9zXpWVz69c1g8FALVq0MIslEglp2LBhVmNdunSJqlWrlvu9J5fn92Vs164dJScnF/o1KCy9Xk/vv/8+SSQSEolE5OLiQgDIx8eHNmzYYHXesWPHqHz58vl9TiUSCQkEAurZs6fV61pJEBMTQ4GBgfnnJRaLSSQS0fDhw0mtVluck52dTX369CGBQEBisTi/p2jZsmXtfl1j1hVZn8mMjAz06NEDJ06cQFZWFgIDA/HgwQM0aNAAW7ZsgbOz8zM/x6vcGigPEeHw4cM4dOgQhEIhWrVqhRo1ajxzXmpqKjZs2ICkpCSEhISgW7duL8VzIxcuXMCUKVOQlpaGatWqYfLkyc/cfcRoNCImJgZnz56FTCZDp06dzHq3WXLv3j1s3LgRWVlZqFy5Mjp16gSJxMoKlhIkPj4emzZtgkqlQkREBNq2bVtkO2fExMRg9uzZUKvVaNq0KcaPH//Mt5w0Gg3++ecf3LhxAx4eHujevXuR3blLTU3FF198gWvXrsHX1xeffvopKlas+Mx558+fx44dO6DX69GgQQM0atTomf0KHz58iC+++AI3btxAQEAAvvjiiwLvCPQiu3DhAkaPHo3ExESULl0a8+fPf+bPl63Xtbi4OHz99dd48OABypcvjy+//BKlnnh23FKs/fv34+jRoxCLxWjbti3Cw8Of6xyLWmJiIjZs2ICMjAxUqFABr7322jNbRdl6XXvR6fV6bNmyBZcvX4aLiwu6du1q9uytJcV5XWPmirzP5MGDB3H27FkolUpERUWhdevWdk+OMcYYY4w5BjctZ4wxxhhjNrN70/LDhw9j06ZNJseWLVuG0NBQ+Pr64s0334RGo7E9Y8YYY4wxVuIUeF39V199hebNm+dvbXT+/HkMGzYMgwcPRtWqVfHDDz8gMDAQX3zxRVHlygCcPHkSK1asQFJSEsqUKYPBgwcX6FkwxuxFr9dj8+bN2Lx5M9RqNaKiojBo0KBn9ui8desWFi9ejOvXr8PLywu9e/dGgwYNHLqH8uO0Wi3Wr1+PmJgYGAwG1K9fH/369YOrq+tT5129ehVLlizJ3xJuwIABiIyMfOoctVqNNWvWYPfu3TAajWjSpAn69OnzzOefL168iKVLlyIhIQEBAQEYOHAgIiIinjonJycHf/31V34f4ObNm6Nnz55muwTZg1KpxKpVq3Dw4EEIhUK0bt0ar7/+utm+5sy+iAh79uzB2rVrkZmZiSpVqmDIkCH5/VAZK3IFXdHj7+9Px48fz//vjz/+mBo1apT/36tXr6aqVavadXUQ+z+1Wk09e/QgABTgJKV6cgV5PlqNPGHCBDIajY5Okb0Cbt68SeFVKxMAquYvpoZlRCQRCUghl9G6deuszvv6669JKBSQm1xETcqKqIyXmABQh/ZtX4hVqhcvXqQywUEEgCIDxVQvWJTbNcHdlXbu3GlxjsFgoPfff58AkLezmJqUFVGge+559endmzQajcV5p0+fpqAAPwJAtYLEVDdYTAIBqJSXJx04cMDiHL1eT2+++SYBIHd3d6pUqRJ5eHgQABoyZIjVVepHjhwh31LeJBCA6pQWU+3Sufn5+/qYXM/tYc+ePeTl6U5CoYDqBospKig3VnBQAJ09e9ausdj/paamUpMmTQgA+fn5UaVKlUgmk5FYLKZff/3V0emxEq6g9VqB70ympaWZ7Oawd+9edOjQIf+/69SpY7I9FLOvd955BxvXr8evHl7oKpdDJBBATYTF2Up8+8MP8Pf3x7hx4xydJnuJabVatG/bGprkmzg2HKgTlNt4P1EJvLtNg169euLQocOoU6eOybzFixfjs88+w2dNgY8aGeDslNuH8J+rwIC/d2LokMFYvWatI04JQO4zQW1bt4QXJePCaCDcN/e87mQAwzdl47XOnXDm7DmzdwB++OEHTJ8+HT+1Bd6qo4dUDOiNwMpzwJtrV8PLywszf//dZE5ycjLatm6JMrJM7HobqOSdG+tmOjB4YwY6dmiH8xcumW0x+OWXX2LBggXo06cPGjduDLFYDIPBgIMHD2Lp0qXw8fEx2y87ISEB7du1QbhHDg73Bcp55sa6ngoM+DsV7dq2xqXLV+2yS098fDw6d+qAun4aLBpIKOORG+tKMtB3w0O0a9MKl67ElrgdpkqCnj174vTp03j33XcRFhYGgUAAlUqFf//9F++++y6CgoIQHR3t6DTZS67Az0w+vn+mVqvFqVOnUL9+/fzxrKysl6K9yovo/v37WLxoET50cUW0QgHRo7cFZQIBRru4op/CGT9MnWqyJzJj9vb333/jSux1rH9DjzqP7XDn5wKs6E4o7ynAjz/8YDLHaDTiu2+/xhthAnzVAnB+1BVFKAC6VQF+bWfAmrXrcO2a6b7NxWnZsmV4kPgQm3obEP5Yp6Jgd2BDTyNcxHr8+uuvJnPUajV+nDYVb9UBxjUApI/+LBcLgUE1gS+bGTF//jwkJSWZzFu4cCGyMjOwqbcBlR7bbKmsB/BPbyOEBjVmzZplMkepVGL69Olo3bo1mjdvnt+CSSQSoWnTpmjfvj1mzpyJjIwMk3lz5syBQZODf3sZUO6xGq6CF/BvbwPU2VmYP3++Ta/Zk2bOnAkptNjYy4gyHv8/XqUUsKm3ASkpKViyZIldYrH/O378OHbs2IH+/fsjPDw8/5ERuVyON954A2FhYZgyZYqDs2SvggIXkx07dsTEiROxf/9+TJo0CQqFwqQb/7lz516KXlgvoq1bt8JgMKC3wnIPzz4KZzxISsLx48eLOTP2Ktm4cSNqB4lQ08JmNRIRMLi6Hhs3/m2yveDly5dxLS4eI6IsN43oEwEonIT4559/iirtZ9r49wa0Kw+EuJuPKSRAv3A9Nm4wvXN6+PBhJKemY4SVHUyHRwFanR7bt283jbVhHbpUMsLPQgtVNynQs4oBG9ebxtqzZw+ysrKs7n7SuHFj5OTkYNeuXU/EWoseVQzwtPBoZCkF0L2yEX+vt88d4b/Xr0GfMANcLLRQDHQFOlUkbNyw3i6x2P9t3LgRbm5uFnt5CgQCNGrUCCdPnsT9+/cdkB17lRS4mPz6668hFovRrFkzzJ8/H/Pnzzdpvrpo0SK0bdu2SJJ81alUKoiFQrhZWajg+WgbRUt7ujJmLyqVCl4yCxtsP1JKAWi0Ohgf2zM973vSy8paD5kYcJYKHfq9q8rJhrfceoc0bzmgUqlN5zzjvDxlgEBg/jOpysmBl8x6Lt4KC3Me/be1DSHymvtbjPWUNTbe8tyPsQeVSvXsWKpsu8Ri/6dSqaBQKKxupZv3PcO/G1hRK3AxWapUKezbtw9paWlIS0tD9+7dTcbXrFmDyRb2UWaFV716deiMRhzSWm69tFejhlAoNNvflDF7ql69Og7dFSLLSgewbXEChFetbLI7RcWKFSGXSfFfnOU5p+4DSVl6VK9evQgyLpjqNaOw85YYOoPl8e3xIkREmOaX95aitfOKuZG7b/mTK62r14xCzC0xjBZqVyJg2w0RImrUNDme9zkuXbpkMdbFixdNPu7xWP/Fi2GpkzARsD1ejOo1rdxafU41akbiv3jLu5IYjEDMTTEiqj99hTt7fjVq1EBiYqLZ4xR5Ll26BA8PDwQFBVkcZ8xeClxM5nF3d7e4lZGXl9czt4litmncuDHCK1fGt9lKZBpN7wzd0+sxU5WDbl268AWDFanhw4dDbQA+2gGzYmj7deDvK8Dot94xOe7u7o6+ffvh52NixKaYzsnRAR/ECBEcFICOHTsWcfbWjRo1CgkZenyzD2aF1x/ngf03DRjztul5lSlTBp06dsDXB8S4l2k6J00FTNolQs3q1VCvXj2TsdFjxiAuWY+fDpnnsfA0cDrBgDFvvW1yvEqVKmjevDk2bdqEzEzTYEqlEv/88w8aNmxoVkyOHvMWzj/QY/YJ81i/HgWuJukxeswYC6/I8xs95m0cuWPA0jPmY98fBG6n2S8W+7833ngDHh4eWL16NfR6vcnYnTt3sH//fgwbNoxbM7GiVzyLy01xa6Dnd/LkSXJ3caFAJylNcHWjWZ5eNMrZhTwkEiobHEx37txxdIrsFTB37lwCQHVKi2hWR9DKaFDfCAGJRQLq3LEjabVaszlJSUlUtXJFcpWJ6L16oD9fB33fGlTOW0wKuYz27t3rgDMx9e233xIAahYqpPmvgZZ1A3WvKiCBADRo4ECLrbdu3bpFwUEB5O0soo8a5Z7Xl81BQe5i8vRws9oO5+OPPyYA1Ka8kBZ2AS3pCupcSUgAaPSoURZjXbt2jfz8/Mjd3Z06dOhAw4cPp06dOpG7uzv5+PjQlStXzOYYjUZ69513CAB1rCSkxV1Bi7qA2lXIjTV+/PjCv3CPxRo+bBgBoC5VBLS0G2jBa6CW5XJjTZ482W6xmKnNmzeTk5MTBQYGUo8ePWj48OHUtGlTkkqlFBUVxb9nWaEUtF7jYrIEuXr1Kg0ZMoTkUikBIE83Nxo3bhw9ePDA0amxV0hMTAy1bd2KABAAqlg+lH755ReLhWSe1NRUmjhxIvl4exIAcpJIqF/fvnTu3LlizPzpNmzYQI0bNsg/r4jwqjR37lwyGAxW5yQkJND7779PHu6uBIAUchkNHzaMrl27ZnWO0WikP//8k+rXrZMfK7JGBC1evPip/WJv375Nb731Frm4uBAAcnZ2ptGjR9PNmzefGmvZsmVUO6pmfqw6tSJp5cqVdu9NazQaacGCBVQjIjw/VsP69WjNmjV2jcPMHT9+nF5//XUSi3N7ewYEBNDkyZMpMzPT0amxEq6g9RrvzV0C6fV6ZGdnw8XFxeIjB4wVB7VaDZ1OBxcXlwLvYmM0GqFUKiGXy1/YVmIqlQoGgyF/YUtBGAwGKJVKODs757fuKYicnBwQkdXFNU+L9bw//9nZ2RAIBM/cZccesrOzIRQKi2SXHWadVquFWq2Gq6vrC7OzFCvZClqvcTHJGGOMMcbMFLRee+4FOIwxxhhjjOXhYpIxZhOtVovs7Gw8z5sbRqMRCQkJUKvVz/7gx2g0mueOVZz0ej0SEhKeexcqtVr93D0AjUYjsrKyYDBY6WVkRWpqKlJTU59rjsFgQFZWlknv0BdNTk4ONBor/arYMxERlEoldDqdo1OxSqvVQqlUvrA//4yLScbYc9q5cyfatm0LmUwGFxcXVKxYETNmzHjqL6P4+Hg0atQIMicxgoKCoFDIERpaFhs3bnxqrG3btqFly5b5sapWrYpZs2Y9dyFVVM6dO4eoqCjIpRIEBQVBLpOiSpUq2LNnz1PnrVmzBg0aNIBcLodCoUBUVBSWLVv21F+Wd+/exTvvvAMPDw+4ubnB3d0dY8aMwe3bt63OMRqNGDNmDFxdXeHt7Q1vb2+4urri3XfffWqBePPmTYwaNQrubq5wc3ODl6c73nvvPSQkJDzzNSkORqMRc+fORUR4VTg7O0Mmk6F50ybYtGmTo1MrMdRqNb7//nuULVsWrq6ukMlkeO2113Dw4EFHp5Zv//796NSpE+RyOVxdXVGuXDn88MMP/MfDi6hIlwFZwau5GSuZ5s+fTwKBgEJDQ6lPnz40ZMgQqlu3LgmFQurcuTPpdDqzObGxsaSQSUghAb1TF/RHNOi7VqBgN5BIAPrtt98sxpoxYwYBoPLly1Pfvn1pyJAhFBUVRUKhkHr27El6vb6oT/epjhw5QlKxkNyloA8b5p7XF81Avs4gsRC0du1ai/M+/fRTAkBVq1alAQMG0KBBgygiIoIA0JgxYyyusr5+/Tr5+/uTm5tbfmugjh07kru7O/n6+lpsDWQwGKhmzdxV3GFhYTRw4EAaOHAgValShQBQ3bp1LeZ38eJFKuXlSf5uYvq8KWjV66CJjUDeziIKCvCj+Pj4Qr1uhWUwGKh/v74kEICiq+a2IZrbGdS4jIgA0I8//ujQ/EoClUpFzZo1I4lEQg0aNKBhw4ZRr169KDg4mEQiEf3111+OTpFWrlxJQqGQQkJCqFevXjR06FBq0KABicViat26NanVaken+Erg1dyMMbu6e/cuQkND0bBhQ/Tp08dkC7cLFy7g999/x2+//YYxTzSnrly5Eh7evoYjw4DKpf5/PEcHtF8BHEsQIj0rGzLZ//cZvHbtGipXroxWrVqhR48eJitTT506hblz52Lp0qUYOHBg0Z3wMwT4+0KUk4Qjw4HSj13G0lRA0yXATaUUGVk5Jq/TkSNH0KBBA3Tv3h3t27c3+Xz79u3DypUrsXXrVrOxli1b4tKlSxg/frzJNTMrKws///wzQkNDceDAAZM5P/zwAz788EP07t0bLVq0MBnbsWMH1qxZg99++w1vv23aJL1enVrIvnMWewca4P3Ywu+ELKDxEhEq1W6BbdtjnuelsqtVq1ahb9+++PN1oFe1/x8nAj7eCUw9mLvzC+8IZt23336LL7/8Eu+99x4qVqyYf9xgMGDJkiU4f/487t27B09PT4fkl5ycjODgYERGRmLgwIEmP0NXr17Fr7/+iilTpmDChAkOye9VwgtwGGN2tWDBAkgkEkRHR5vtBVytWjXUrFkTv//+u8nx27dv4/q1a5jQ0LSQBACFBJjeHtDojfj2229NxubNmwdnZ2d07drVrMVJVFQUwsPDMXPmTPud3HM6fPgwHiQm4YvmpoUkAHjKgWmtAWWOBosXLzYZmz17Nnx9fdG2bVuzz9mkSROUKVPG7DW8evUqdu/ejc6dO5tdzF1dXdG5c2ccPHgQ58+fNxmbMWMG/P390bx5c7NYrVq1go+PD3744QeT46dOncKxE6cwpYVpIQkAga7A5CYGbP9vB+LirOwjWQxm/z4TLcsJTQpJIHcv9C+aAz4uIsydO9chuZUERqMRs2bNQt26dU0KSQAQiUTo0aMHdDodli1b5qAMgSVLlsBgMKBHjx5m15rKlSujdu3a+P333/kZyhcIF5OMsQI5d+4cQkNDrfYODA8Px6VLl0yeZ9yzZw+MBLQrb/lzRgUAnrLc4uxxZ8+eRcWKFa1u0RoWFmZWPBWn//77D4D182pbHhAA2Lt3r8nxM2fOoEqVKma/IAFAIBCgatWqOHv2rMnxc+fOAch9fS2pVi23qnry9UhOTkZERITFfoMCgQARERFmezrnxWpfwfJ55R2/cOGC5Q8oBmfPnUW7cpaf95SKgVZlDTh35nQxZ1VypKenIyEhwer3k7u7O0JCQvK/Fxzh7NmzKFu2rNVer+Hh4bh16xays7OLOTNmDReTjLECkcvlT115rFQqIZVKTQold3d3AECqlWlqPaDSw6xAVSgUT/1FkZ1t+rZ4cXN1dQVg/bzS1LlbwDzZINyW88p7bazNyzv+5DyRSPTUWEql0qyozYuVkmN5Tsqj83Xkay+Xyay+7gCQohJAJi/6xuwlVd7Xztr3BhE5/OdLLpc/tXtDXgN+a39ssuLHxSRjrEC6du2K+Ph43Llzx2zMYDDgyJEj6NKli8mdsE6dOkHuJMb8U5Y/56rzuQXlu+++axbr2rVrePDggdkcnU6HY8eOoVu3boU6n8IYOnQoJCJggZXzWnAKEAqA9957z+R4t27dcP78eWRmZprNUalUOHnyJKKjo02ON2/eHC4uLmbPRObZv38/FAoFWrVqZXK8Vq1aOHHihMWiQalU4tSpU6hfv77J8TZt2kAmdXrqeXm6u6FJkyaWP6AYdO3+OpZfEEOtNx+LTwN23gC6de9e/ImVEAqFAq1bt8ahQ4csruiPjY1FYmKiQ3++unbtioSEBNy4ccNszGg04tChQ+jQoQMXky8QLiYZYwXSrVs3VKxYEXPnzsXNmzfzj2dmZmLRokVISkrC+PHjTeaIxWL06tsfay4Bk3cD2Y/aMBoJ+PsK8PZWIDgoEG3atDGZ16tXL4SEhGDOnDkmxWtGRgYWLFiArKwsjBs3rsjO9Vk8PDzQolVbzDwO/HIY0DwqbPRGYNlZ4LPdQHh4NbNFIMOGDYObmxt+//13JCYm5h9PTk7G7NmzIRKJzBYwubi44L333kNMTAz27t0LvT43mMFgwP79+7Ft2za8/fbb+XeB8/z888/Q6/WYOXOmydvZDx8+xG+//QYiwvTp003meHl5YdSo0fhqvwCLTueeDwBoDcBvR4HpR4H3x31QLFsyWvP+++8jVS1EjzVC3HusJr/wEHjtLxGCAv3Rt29fh+VXEkycOBE3b97EihUr8v/YICJcu3YNixcvRlRUlNkfJ8Wpffv2qF69OhYsWIC4uLj8O5RKpRLLli3D3bt38eGHHzosP2ZBEa8qt4hbAzFWMt28eZOqVq1KACg4OJgqVqxIYrGYZDKZ1VY4BoOBWrZsSQDIxQnUKBhU2g0EgHxLedH9+/ctzouNjaXy5csTAAoJCaEKFSqQSCQiZ2dn2rRpU1GeZoHodDqqWbMGASAPWe55+TnnnleZkGDKysqyOO/UqVMUEBBAACg0NJTKlStHAoGAvLy8aP/+/Rbn6PV6GjFiRG4sDw+qXLkyeXh4EAAaPHiwxZZMRLmtnEQiUf5rGBwcTABIJBLRkiVLLM7RarU0oH9/AkAB7mJqFioiHxdxbuui0aPJYDDY9oLZ0bZt28jN1ZlEQgE1CBFRzcDc/ELLBNPly5cdnV6JsHjxYnJyciInJyeqVKkSBQYGEgCKioqy+jNZnO7du0fVq1cnABQUFESVKlUiiURCUqmUli9f7uj0XhncGogxViT0ej02bdqETZs2QaPRIDIyEoMHD4aXl9dT5+3btw+ffvop7ty5A1dXV4wePRojR460uBglj06nw8aNG7F161ZotVrUqVMHAwcOhIeHh53PynZbtmzBV199hQcPHsDLywvjxo1D//79nzpHpVJhzZo12LVrF4gIjRs3Rt++feHs7PzUeRcuXMDSpUuRkJCAgIAADBw4ENWrV3/qnOTkZEyYMAH79u2DQCBA8+bNMW3atGd+vc6cOYNly5YhMTERQUFBGDx4MMLCwp46pzhlZGRgxYoVOHr0KMRiMdq1a4fu3bvzW5/PITExEUuWLMGFCxegUCjQrVs3tGvX7qk/k8XJYDBg27Zt2LhxI1QqFSIiIjBkyBD4+Pg4OrVXRkHrNS4mGWOMMcaYGe4zyRhjjDHGihwXk4wxxhhjzGZiRyfA2Mvs7t27+Oeff5CdnY2wsDC0b98eIpHI0WkV2vXr17F169b8ZyZbtGhRZM9Zffnll5g3bx50Oh0aN26M1atXQywumkvXxYsXERMTA4PBgPr166Nhw4YWm347ilKpxIYNG3D//n34+/uje/fu+T0vn+bEiRP5z0w2a9YMUVFRxZAtY+yVUfRrgczxam72slOr1TRs6FASCgUkEQnIXZ67orZMcBDt2bPH0enZLDMzk97o8ToBICexgFxluedVuWJ5OnXqlF1j7d+/n8Ricf7qY4lEkvv/hUKaNGmSXWMlJSVRuzatCQDJJEJylgpzV7bWrE6xsbF2jWWr33//nVxcXEggEJCzs3P+/06fPt3qnNu3b1P9+vUJAEmlUnJyciIA1LhxY7p7924xZs8YK4l4NTdjDtSndy9sWL8W01oZMaQm4CoFTiQAH+4Q4sh9CQ4eOozIyEhHp/lciAhtWrXE8SP78UsbA/pUA2Ri4MBtYFyMCNeVCpw4eRrly1vZY/A5KJVKuLu7w8nJCX369EGtWrUgFosRGxuLVatWITExEQsXLsTgwYMLHUur1aJBvTq4G3cJM9rqEV0VEAuBHTeA9/4TIVPojdNnz8PX17fQsWy1YMECjBgxAk2bNkWHDh3g5eWF1NRUbN++HXv27MGsWbMwevRokzmZmZmIjIxEVlYW3njjDURERICIcP78eaxevRpeXl44deqU1S3rGGOMF+Aw5iDnzp3Dn3+txpyORrxbL7eQBIDagcCWvkaEuOrx7TdfOzZJG+zatQs7d+/Bqu4GDI0E5BJAIACalAFi+hsgIxV++uknu8Rq164djEYj3nrrLdSvXx8SiQQCgQCVK1fG+PHj4eTkhLfeessusdatW4dTZ85hc289elcDnES5u9e0LQ/s6m9ARloyZs2aZZdYttDpdPjss89Qr1499OvXL7+lj5eXF/r06YNGjRph8uTJ0Gg0JvMWL16M27dv47333kONGjUgFAohEolQs2ZNvPvuu4iLi8Py5csdcUqMsZcMF5OM2dmqVavg4ypGvwjzMZkYGB1lwN8bNz513+QX0R9//IHKPmJ0qGA+5iEDhlbX44+V9ilOjh49itKlS6NSpUpmYy4uLmjYsCHUarVdYv2xcgWalBGidqD5WIAr0CfMiD9WLLVLLFvs3bsXDx48QOvWrS2Ot2rVCklJSdi1a5fJ8RUrVqB69eoWe/L5+/sjIiKCi0nGmF1wMcmYnaWmpiLYTQCJlXU25TwBg8FocX/mF1lqaipC3fWwth6lnCeQkamEwWAodCwieurbyj4+PrDXEzqpKcko52G+R3Gecp5ASkqqXWLZIjU1N3apUqUsjucViykpKSbHU1JSrM7J+3xPzmGMMVtwMcmYnYWGhuJKkgEZVm6cHb0HuLoo4O3tXbyJFVJoaChOJ4qhtVIrHr0HBAcF2GW1ulgsxo0bN2A0Wi7ybty4YbfV46HlKuDofTGs1aZHEwQIDQ21Syxb5MV+fD/0x8XHx5t8XJ7y5cvj1q1bVj/vzZs37fJ8K2OMcTHJmJ0NGjQIWqMA3x0wH7uTAcw9LcaAgYNL3LZvQ4cORWKWHjOPmY9dTgJWXhBh2IiRdonVr18/pKen48AB8xfxzp07OHnypN2K8WHDh+PKQz1WnjcfO3IX+DcWGP7mKLvEskXt2rURERGBzZs3Q6fTmYzlbW1ZqVIlNGzY0GRs+PDhuHr1Ki5dumT2Oc+fP4+4uDgMHz68SHNnjL0aeDU3Y0Xg+++/x8SJE9EzHBhZC/Bzzl0d/MMRMcRufjhy7AT8/f0dneZz++CDD/Dzzz9jSE1gaCTgLgU2XwN+PCJCQJmKOHDoCNzd3e0SSyqVQqvVomnTpqhfvz6kUinOnj2L7du3Q6fT4dy5cwgPDy90HCLCwAEDsGrVHxhdi9C/OiAVAxsuAz8fE6FGZB3s2LUbMpnMDmdlm/3796NNmzbw9/dHmzZtEBQUhPv37yMmJgb37t3Dtm3b0KJFC5M5er0enTt3xq5du9CyZUvUqlULRIQTJ05gz549aNu2LTZu3PhS9D1ljBWNAtdrRdqgyAruM8leBYsWLaLyoWUIAAEgsVhEvXr2LNH9/YxGI02fPp1KB/rnn5dM6kRDBg+mlJQUu8ZSqVRUqlQpEgqF+bEEAgE5OTnRoUOH7BpLr9fTV199RT7envmxXJzl9Pbbb5NSqbRrLFsdOnSIGjdunJ8fAGrYsCHt37/f6hy1Wk0TJkwgNze3/DkeHh40ceJE0mg0xZg9Y6wk4j6TjL0AjEYjzp07h+zsbFSsWNGhvQrtSa/X4+zZs9BoNKhSpUp+u5qikJycjI8//hjZ2dl4++230aBBgyKLpdFocO7cOej1elSrVq1Au8sUt7i4uPwdcCpUsLC03oKcnBycO3cOAoEAERERUCgURZwlY+xlUNB6jYtJxhhjjDFmhpuWM8YYY4yxIsfFJGOMMcYYsxkXk4y9ooxGI9auXYu2rVuhbHAQakSE49tvv0VSUpLdY+n1evz5559o1aI5ygYHIbJGBL7//vv8htzWXL9+He+//z6qVCyPcmWC0atnT+zfv9/u+dlKo9Fg0aJFaNSoEUJCQlCrVi38+uuvyMrKcnRqjJV4d+7cwaRJk1ClShWUKVMGXbp0wbZt2+y2YQGzH35mkrFXkF6vR+9ePbFu/QY0LiNCk2AD7mYCa68I4eHpjZ2796Jq1ap2iaXRaBDdvRu2bN2G5qEiNAgy4FYGsO6KEH5+/ti1Z5/F5tlbtmzB69Hd4SIxomcVPZydgH+vi3HloR5ffvklPv/8c7vkZ6usrCy0a9cOR44cQXh4OEqXLo2HDx/i7NmzqFixInbv3l0i2z8x9iI4dOgQOnToAL1ej6ioKDg7O+Py5cu4ffs2xowZg5kzZ0JgbTsuZje8AIcxZtWUKVMw+fNPsbYHoWuV/x+/nwW0/UMEvVsoLl6+apddZj755BP89MNUbOxpRLvHFh/fyQBarRDBJSgMJ0+fNfnFkJiYiHKhZdE6RIM/XyfIJbnHiYBv9wOf7Qa2bt2K9u3bFzo/Ww0fPhyrVq3CO++8g3LlyuUff/DgAaZPn446depg+/btDsuPsZIqJycHZcqUgZeXF8aMGQO5XA4gtyfs/v37sXLlSixZsgSDBg1ycKYvP16AwxizSK/X4/ffZmBoDdNCEgACXIG5HQ24EnsdMTExhY6lVqsxd/bvGFPLtJAEgGB34Pf2Bpw+ex4HDx40GVu4cCGMei0Wd/1/IQkAAgHwSROgdmkRZkz/pdD52SolJQUrVqxAu3btTApJAPD390e3bt3w33//4erVqw7KkLGSa9WqVUhJScGgQYPyC0kAEAgEaNq0KSIiIjBjxgwHZsiexMUkY6+Y+Ph4JDx4iDesbB7ToDQQ6C7Gvn37Ch3r8uXLSEnLwBthlsdblQM8FCKz5yAP7N+P1qFGeMnN5wgEwBtVDNi/v/D52erEiRPQaDSoVauWxfG845a2g2SMPd3+/ftRtmxZlCpVyuJ4VFQUTp8+jZycnGLOjFnDxSRjr5i8t5ONT3nAxUiwy/NIz4pFlPvvyVgCgQAGY9HnZ6v88zJaTjLv6SF+poux5ycQCJ66yIZ/vl48XEwy9ooJDQ1FSOlA/HnB8vjeW8CDTD1atmxZ6FhhYWHwLeVlNda260CGymC2r3SLli2x86YQD7PN5xABf14SoUWLwudnq7p160Imk+HEiRMWx48fPw6BQIBmzZoVc2aMlXwtWrTAzZs3kZiYaHH8xIkTqFu3rslb4MyxuJhk7BUjEonw3tgPsPScACvO5RZneeLTgDc3i1G9WphZgWcLJycnvPXOe5hzSoB1l0xjxaYAb20To0G9Oqhbt67JvCFDhkAml6PfBiEy1P8/rjcCk3YCZ+8b8P7YcYXOz1YeHh4YOnQotm/fjsuXL5uM3b59G3///Tdee+01i6vUGWNP17NnTwQEBGDx4sXIzMzMP240GvHff//h0qVLGDfOcT//zByv5mbsFWQ0GjF0yBAsXbYMEf4iNA024G6WAJuvAaWDgrBz916zhSW20uv16Ne3D1avWYuoIBEaBBpwK0OALdeACuVDsWPXHgQHB5vN27NnD7q81gmk06BbZQOcJcDmODHupuvx888/Y+zYsXbJz1YqlQpdunTBjh07UKFCBZQuXRqJiYm4fPkyIiMjERMTA29vb4fmyFhJderUKbRr1w6ZmZmoXr06nJ2dceXKFSQmJmLSpEn49ttv+W3uYsCtgRhjT0VEiImJwfx5cxF75TLc3D3Qs3cfDBo0yO4/l0ajEVu3bsWC+fNw43osPDy90adff/Tv3x8uLi5W5927dw/z5s3D1s3/QqvRoE69Bhjz1luIjIy0a3620uv12LhxIxYuXIjbt2/Dz88PAwcORK9evSCTyRydHmMlWlJSEhYuXIgNGzZApVKhRo0aGD16NBo2bOjo1F4ZXEwyxhhjjDGbcZ9JxhhjjDFW5LiYZIwxxhhjNhM7OgHGCuPevXu4cOECpFIp6tevX6TPqd2+fRuXLl2CQqFA/fr14eTkVGSxitP169dx7do1uLu7o169ehCJRM+co1KpcPToUWg0GkRERCAwMLBAsa5evYobN27A09MTderUKVAs9n9GoxFr1qzB1atXUbFiRfTq1csuW146GhHhxIkTSE5ORkhICMLDrXTUZ4y9mMgBMjIyCABlZGQ4Ijx7Cdy7d4+6detGQqGQABAA8vLyom+//ZYMBoNdY928eZM6d+pIAoEgP5ZvKS/68ccfyWg02jVWcbp48SK1bN4s/5wAUHBQAM2fP9/qHIPBQF999RV5ebjnzxGJhPR6dHe6f/++1XmnT5+mxo0bm8QqU6YMLV++vChO7aU0bdo0UsjlJq+hXC6nKVOmODq1Qlm/fj1VrFjR5Lzq1KlDhw4dcnRqjL3yClqv8Z1JVuIkJSWhcePGyMjIQO/evVGtWjXk5OTgwIED+PTTT5GQkICZM2faJVZCQgIaN6wPkSoJ8zsT2pQHknOAuSdSMX78eCQlJWHq1Kl2iVWcYmNj0aRxQ/hJlFgZDTQJAe5mAjOP38eIESOQmZlpsY/b6FGjsGDhArxThzA0EnCXApuvGfHNjn/RtPEZHD563Kwdzvnz59GkSRN4enpixIgRKF++PFJSUrBz504MGDAASqUSo0aNKq5TL5GmTp2KSZMmoUKFCujQoQOCgoKQkJCAbdu24eOPP4ZWq8XkyZMdneZz++uvv/J/hseNG4f/tXfn4VGVd//HP5OZLIRsLEkgmAiyCYpAACGyiywCFRQBK0iwD3ErIo9iU4pU/eFPxa3FihYshtWlxEJVFhEIOwiEfRFEAgQSIAnZAwmZOc8fSGyapNKTOJNJ3q/r8pKc+8z5fmcmy+c6c5/7hISE6PTp01q9erX69u2rhIQERUVFubpNAD+Dq7nhdmJjY/Xee+9p2rRpZe7dun79en322Wc6dOhQlXxUNnHiRH224K/a/5hdYf6lx17fIv1hvUU//PCDmjVrVulazjRq5EglJixT4gS7gv5tZsCzX0sf7PXSuZRU1a9fv2T73r17FRkZqQ+GSE90Lv2Yk5lS+7lWTZ4yVTNmzCg1NnjwYO3fv1+xsbGlpiEYhqElS5Zo7969Sk1N/Y9LBNVmDodDdev66uaIm/XM5MmlpgY4HA69++4snfzhpPLy82Wzuc/5gaKiIoWHhys8PFwTJkwo9XH91atX9fbbbys0NFTbt293YZdA7cbV3KiRDMPQvHnzFBUVVSZISlKvXr0UGBiojz76qNK1iouLtXBBnB7vWDZIStKkrlJgHQ/FxcVVupYzZWZmatnyZZrcpWyQlKSpPSR78VV98sknpbbHxcWpSZBNEyLLPuaWetK42+2a9+GcUtuvnz275557ysxntVgsGjx4sAoKChQfH1/p51VT/e1vf9OVK4UaMnRomTmmHh4eGjr0VyosKtLs2bNd1KE5q1at0sWLFzV06NAy8z49PT01YMAA7dixo8wdhgBUP4RJuJXCwkJlZGSUe8cUSbLZbAoLC1NycnKla2VnZys3r0CRjcsf9/WUbm1oqZJaznT+/HkVF9vVsYLnFVxXCq9n05kzZ0ptT05OVruGxbJV8FujU5iUeiFNdru9ZNu5c+dkGEaF71f9+vUVEBDgdq+hM10PUxW9hhEREZKk7777zmk9VYXk5OSSn9fyXH9efG8A1R9hEm7F29tbfn5+unDhQrnjDodDaWlpCg4OrnQtf39/eXt56lh6+eNX7dc+3g0JCal0LWe6PqfxeEb54zmFUmqOvczzCg4O1veZNjkqmBhzLF2qFxRQ6uzZ9fehovcrLy9Pubm5VfJ+1VTXQ2RFr+H17TfddJPTeqoKwcHBKi4uVnp6+T9gFy9elOR+P19AbUSYhFuxWCwaO3astm3bpvz8/DLjiYmJSk9P1yOPPFLpWl5eXho1arT+utemnMKy4wv3SxdzizV27NhK13KmkJAQDRxwj2bttKqwuOz47J1SkV166KGHSm0fO3asfsgo1vJyToCl5UsfHbDqkXHjS21v2rSpunfvrvXr15c6Y3nd2rVrZbPZ9OCDD1bmKdVoEydOlKenp9asWaN/n+JuGIa+/nq1bDaby+9V/t8aOnSo/P39tWbNmjJjDodD33zzjdq2bav27du7oDsA/w3CJNxObGysbDab3nnnHR06dEgOh0P5+fn65ptvtHDhQt1///3q2rVrldSa9sILyrb76O5FVq09KTkMKaNAem2z9ORKix4ZO1bt2rWrklrONOOVV3U806pBH3to6xnJMKSUXOkP66RpCdLkyf+rJk2alHpMz5499auhQzR2uYf+tF3KuiLZHdKK41LfRVZ5+ARqypQpZWq9+uqrSk5O1uzZs5WUlCTDMHTp0iXFx8dr1apVio2NLXf+K67x8vJSTEyM9uzZo3nz5iklJUWGYej8+fOaPz9Ou3btVnR0tHx9fV3d6n+lbt26mjFjhjZu3KglS5YoLS1NhmEoOTlZc+bM0dGjRzVz5kxZLBZXtwrg5/yiCxRVgHUmUVkHDx40OnbsaEgqWWvS09PTiImJMS5fvlyltRITE412t7W5VstybR08L0+b8fTEiUZRUVGV1nKmhIQEo8UtTUs9L986PsYLL7xQ4VqdBQUFxv/85jeGzWY1JBmWHx/XObKDceTIkQprrV692oiIiCj1fvn5+RmvvPKKW6/V6UwxMTGGzXr9dbf8uMan1Rg/fryrWzPN4XAYf/7zn43AwMBS3xthYWFGfHy8q9sDar0bzWssDQS3ZRiGdu7cqX379snHx0cDBw5Uo0aNfrFa27Zt06FDh+Tr66tBgwbViHl+DodDGzZs0PHjxxUQEKAhQ4YoMDDwZx+XkpKiNWvWqLCwUB07dlSXLl1+9gyS3W7XunXrdPLkSQUFBWnIkCHy9y/nMnlUKCcnR6+//rqSkpLUtGlTTZ06tUb8Di0oKNDKlSuVlpamiIgIDRw40K2WOQJqqhvNa4RJAAAAlME6kwAAAPjFESYBAABgGmESbuvs2bN68cUXNWzYMI0ePVqLFy9WYWE5a/igQitXrlTnzp0VGhqqZs2aadq0aSoqKnJ1WwAAN8KcSbilefPm6YknHlcdm9Qz3K7MQg9tP+NQ82Y3a/WatWrRooWrW6zWHA6H+vbtq02bNqmej3RXuHQmWzp4UfKvW0ff7kpUmzZtXN0mAMCFbjSvcbkc3E5CQoJiYmL0WKShN/tL/t6S5NCRNOn+pWd178D+Onz0mLy8vFzdarX1+OOPa9OmTZrRV3r+Lsn7x98EW89Iwz69rB53dVNGZrZrmwQAuAU+5obbeevNN9ShsYfeH3I9SF7TNlhaOsKuEydPafny5S7rr7pzOBz6eNEC3X+r9EKvn4KkJHWPkObdJ13KytGCBQtc1yQAwG0QJuFW7Ha7Vn+9RtHt7PIoZ1nDO0KlyCZWrVixwvnNuYktW7aooPCqHu1Q/vjQVlKgtxQXF+fUvgAA7okwCbdit9vlcDgU4F3xPgFeBhfi/AfX72le0Wto9ZDqeonXEABwQwiTcCteXl66rU1rrThR/t1W0guk7WelTp06Obkz9xEVFSWbh7Ti+/LHD164dp/uHj16OLcxAIBbIkzC7fz26We07Dvpn9+V3l7skCatssjiYdP48eNd0ps7CAoKUsdOXfSXndLOc6XH8ouk366UvGweevnll13TIADArXA1N9zOY489pvXr1ur+v/9DQ1t5aHALhzIvS/MP2nQy09DHHy+uEffN/iV99dVXatXiFnX/KF+jbpN63yydzZHmJl47u/vue3+Rr6+vq9sEALgB1pmEW7Lb7YqLi9MHs/+ifQcOydvLU/fdN0zPTZmiLl26uLo9t3Dp0iVFR0dr3TerdbmwWDYPqUWrW/XOO+/o3nvvdXV7AAAXu9G8RpiE2zMMQxZL+XMocWMcDoc8PJj1AgD4yY3mNf56wO0RJCuPIAkAMIu/IAAAADCNMFlLOBwOpaamKi0tTS6Y2VDtZGVl6dy5cyoqKnJ1Ky5XVFSks2fPKjub2ye6g4KCAp09e7ZkvVAAcDXCZA139epVvfHGG2oWHq6wsDCFhISoU/v2+vjjj13dmkts2rRJA/rfo3r16ummm25SaEhDPfvss8rMzHR1a06XkZGhyZMnKyS4gcLDwxUUFKR7Bw3Qli1bXN0aynHixAk9Mnas6gUFKTw8XPXqBWnMww/r+PHjrm4NQC3HBTg1WHFxsR584AGtWLFCI3zqaIC3jwpl6PMrV7TucoH++Mc/1qq1BD///HONHj1KHRpZ9ESkXWH+0sZT0py9Vt3UtIU2bdmm+vXru7pNp0hPT1fP7lE6fzZJT3S0q+ePSwP9dY9VBy9K8fGfa9iwYa5uEz86fPiwevXsLj/l67editUuRDqcJs1OtCnLXkcbNm5W+/btXd0mgBqGq7mh+fPn69FHH9WC+g3Uz6dOqbG/5OZoZm6O9u3bVyv+COXl5emmJo014KZ8fTLCkPVfzsl/ly5FxVn1yP88pXfffdd1TTrRk08+qaWLPtT2R+1q2eCn7cUOaVS8RRvPB+jsuVTVqVOn4oPAaXrcFaWsk7u0Odquev/ylmRfkXovtMor7A7t3L3HdQ0CqJG4mhuaM3u2+tSpUyZIStITfv5q5OWluXPnuqAz5/vss8+Um5evtwaUDpKSdGtD6alIuxbM/0iXL192TYNOlJ+fr0UL5+vpzqWDpCTZPKQ37zGUmZWtpUuXuqZBlHLo0CFt3b5DL/cqHSQlKdBH+n+97dqVuFd79+51TYMAaj3CZA125OhRdff0KnfM02JRV6tVRw4edHJXrnHkyBG1aGhTRGD54/1ukXJy85WSkuLcxlwgOTlZ+QVXdHez8seb15ea1vfUkSNHnNsYynX06FFJqvD9uueWa//n/QLgKoTJGqyur6/S7fYKx9MMQ361ZJqBn5+fMvINXa3g5biQ99N+NV3dunUlSRcquBi4yC5dKnDUitfCHfzc+3W+Fn3vAqieCJM12IjRo/V5UZHyHY4yYyeuXtW2K1c04sEHXdCZ8z3wwAPKyC9WfDknbxyG9Nc9Huoe1U2hoaHOb87JwsPD1aVTR/010UPlzZj+9JCUfdmuESNGOL85lNGnTx/VCwzQB7vKH/9glxTgX1f9+vVzbmMA8CPCZA02efJkFXp5anxWpk4WX5V07daDuwoLNT4nSy2bNdPo0aNd3KVztG/fXvcNHarHV1q19LBk/zFfX8iTHvtS2nTKoT+8MN21TTrRC398SetOOvTEV1Laj2e8ih3SJwel3662asQD96tNmzaubRKSJF9fXz0f+3u9u1N6fYuU/+PSqAVXpbe2SW/vkJ597nnOTAJwHcMFsrOzDUlGdna2K8rXKps3bzZCGjQwJBmt6tQxIrx9DEnG7W3aGElJSa5uz6lycnKMIYMHGZKM0ACb0T7M0/C0Wgwfby/jo48+cnV7Tjd37lzD28vT8LJ5GO3DPI0Qf5shyRj2q6FGXl6eq9vDv3A4HMbzzz9vWCwWw9/HanQI8zQC6lgNi8ViTJ482bDb7a5uEUANdKN5jaWBaoErV64oPj5e3377rWw2mwYOHKgBAwbU2vsx7969W/Hx8crNzVXr1q01duzYWrO+5L/LyMjQokWL9P333ysgIEAjR45UZGSkq9tCBU6dOqXFixcrJSVFjRs31tixY9WsWQVX5gBAJbHOJAAAAExjnUkAAAD84giTAAAAMM3m6gYAoDpwOBz68MMP9emnn8put6tfv36aOnWqvLzKX/i/MgzD0KZNm5SQkCDDMNSjRw/169ev1s5jBuDemDMJoNbbvXu37r67r3Jz8+TrW0ceHlbl5eXJ28tLi5cs0YNVuB7rqVOnNOL+Ydqz74BC/G3ysEjnc4rV9tZW+nzZP3XrrbdWWS0AqAwuwAGAG3Dp0iU1aRImb29vjRsXrbZt28pisVy7cnrRIqWeP68dO3aoc+fOla6Vm5urju3bycg5p7mDi0tukbg1WXpipVWXVF/7Dx5WcHBwpWsBQGVxAQ4A3IDf/e53unKlUM88M1m33367PDw8ZLFY1KxZM/3vs8/KZrNp0qRJVVJr4cKFOnX6jNY8XKx+t0gWy7X/ekRI34yxKyszQ3Pnzq2SWgDgLIRJALXaF198oVatWqpJkyZlxvz8/NStWzclJiZWSa2/f/qJBreUmpezrGljf2lkG4c++2RJldQCAGchTAKo1QoLr6hBg4YVjterV092u71KamVnZSrcv+KZReEBUnZWVpXUAgBnIUwCqNWCg0N0/PgxORyOcse/P35cvr51qqRWy9a3ass5myqaqb452aqWrVpXSS0AcBbCJIBa7bnnnlNGxiVt3ry5zNh3332nw0eO6MEHR1ZJrZjHHteB1GItOVh2bMVxadMpux574skqqQUAzsLV3ABqNYfDoTvatdPhI0fUtWtXdevWTTabTXv37tXGjRtVLyhIp8+cka+vb6VrGYah8dHRWrxksR5tb+ih2yWrRYo/In2410ODhwzR5/9YJqvVWgXPDAAqh6WBAOAGFRcXa/To0frqyy9VdPWqJMlqtapbt25auXJllf6estvteuedd/Tun9/R2ZTzkqTQ4AZ6auIkTZ06VZ6enlVWCwAqgzAJAP+loqIiJSQkqLCwUH369PlFfz8VFxfrxIkTMgxDzZs3/0XutAMAlUGYBAAAgGksWg4AAIBfHGESAAAAphEmAQAAYBphEgAAAKYRJgEAAGAaYRIAAACmESYBAABgGmESAAAAphEmAQAAYBphEgAAAKYRJgEAAGAaYRIAAACmESYBAABgGmESAAAAphEmAQAAYJrN1Q0Aznb58mUlJCQoNzdXrVu3VocOHVzdEgAAboswiVrDMAy9+eabev3V/6/M7JyS7V06ddScD+epY8eOLuwOAAD3xMfcqDWmT5+u2NhYjWmVo6O/lbJ/L33xkHQ15YD69O6pw4cPu7pFAADcjsUwDMPZRXNychQYGKjs7GwFBAQ4uzxqoZSUFEVEhGt6D4de7FN6LLdQivybVR16D9fS+HiX9AcAQHVzo3mNM5OoFZYsWSJvqzS5W9kxf29pUme7li1fpqysLKf3BgCAOyNMolZITU1VRD2rAn3KH789RLLbHUpPT3duYwAAuDnCJGqFxo0b60ymXTmF5Y8fTpOsVg81bNjQuY0BAODmCJOoFcaMGaNCuzRrR9mx3EJp1i6r7h9+v4KCgpzeGwAA7owwiVohLCxMsbG/1x83SJNWScfSpbwi6avjUp+FVl0s9NFLL7/s6jYBAHA7rDOJWuOVV15RQECAZr72qv6y86d1JjtHtlPCl/N02223ubA7AADcE0sDodYpKCjQ+vXrS+6AExkZ6eqWAACodm40r3FmErWOr6+vhg4d6uo2AACoEZgzCQAAANMIkwAAADCNMAkAAADTCJMAAAAwjTAJAAAA0wiTAAAAMI0wCQAAANMIkwAAADCNMAkAAADTCJMAAAAwjTAJAAAA0wiTAAAAMI0wCQAAANMIkwAAADCNMAkAAADTCJMAAAAwjTAJAAAA0wiTAAAAMI0wCQAAANMIkwAAADCNMAkAAADTCJMAAAAwjTAJAAAA0wiTAAAAMI0wCQAAANMIkwAAADCNMAkAAADTCJMAAAAwjTAJAAAA0wiTAAAAMI0wCQAAANMIkwAAADCNMAkAAADTCJMAAAAwjTAJAAAA0wiTAAAAMI0wCQAAANMIkwAAADCNMAkAAADTCJMAAAAwjTAJAAAA0wiTAAAAMI0wCQAAANMIkwAAADCNMAkAAADTCJMAAAAwjTAJAAAA0wiTAAAAMI0wCQAAANMIkwAAADCNMAkAAADTCJMAAAAwjTAJAAAA0wiTAAAAMI0wCQAAANMIkwAAADCNMAkAAADTCJMAAAAwjTAJAAAA0wiTAAAAMM3miqKGYUiScnJyXFEeAAAAP+N6True2yrikjCZm5srSQoPD3dFeQAAANyg3NxcBQYGVjhuMX4ubv4CHA6HUlJS5O/vL4vF4uzyAAAA+BmGYSg3N1dhYWHy8Kh4ZqRLwiQAAABqBi7AAQAAgGmESQAAAJhGmAQAAIBphEkAAACYRpgEgBtksVi0fPnyCsf79OmjyZMnO62f/2TDhg2yWCzKyspydSsAajjCJIBqLS0tTU8++aQiIiLk7e2tRo0aaeDAgdq6daurW6s2qlOIBVD7uGTRcgC4USNGjFBRUZEWLFigW265RRcuXNC6deuUkZHh6tYAAOLMJIBqLCsrS5s3b9bMmTPVt29f3Xzzzbrzzjs1depU3XfffaX2mzBhgoKDgxUQEKC7775b+/fvLxl/6aWX1KFDB82ZM0fh4eHy9fXVqFGjlJ2dXbLPrl271L9/fzVs2FCBgYHq3bu39uzZU6n+CwsLNWXKFDVp0kR169ZV165dtWHDhpLx+fPnKygoSF9//bXatGkjPz8/DRo0SKmpqSX7FBcXa9KkSQoKClKDBg0UGxur6OhoDR8+XJI0fvx4bdy4UbNmzZLFYpHFYtGpU6dKHp+YmKjOnTvL19dXd911l44dO1ap5wQA/44wCaDa8vPzk5+fn5YvX67CwsIK9xs5cqQuXryoVatWKTExUZGRkerXr58uXbpUss+JEyf097//XV9++aVWr16tvXv36qmnnioZz83NVXR0tLZs2aIdO3aoZcuWGjx4cMntX82YOHGitm/frk8//VQHDhzQyJEjNWjQIH3//fcl+xQUFOitt97SokWLtGnTJp05c0ZTpkwpGZ85c6aWLFmiuLg4bd26VTk5OaXmbc6aNUtRUVGKiYlRamqqUlNTS92qdtq0aXr77be1e/du2Ww2/eY3vzH9fACgXAYAVGPx8fFGvXr1DB8fH+Ouu+4ypk6dauzfv79kfPPmzUZAQIBx5cqVUo9r3ry5MWfOHMMwDOPFF180rFarcfbs2ZLxVatWGR4eHkZqamq5de12u+Hv7298+eWXJdskGcuWLauw1969exvPPPOMYRiGcfr0acNqtRrnzp0rtU+/fv2MqVOnGoZhGHFxcYYk48SJEyXjs2fPNkJDQ0u+Dg0NNd58882Sr4uLi42IiAhj2LBh5da9LiEhwZBkrF27tmTbihUrDEnG5cuXK3wOAPDf4swkgGptxIgRSklJ0RdffKFBgwZpw4YNioyM1Pz58yVJ+/fvV15enho0aFByJtPPz09JSUn64YcfSo4TERGhJk2alHwdFRUlh8NR8rHvhQsXFBMTo5YtWyowMFABAQHKy8vTmTNnTPV98OBB2e12tWrVqlRfGzduLNWXr6+vmjdvXvJ148aNdfHiRUlSdna2Lly4oDvvvLNk3Gq1qlOnTjfcxx133FHq2JJKjg8AVYELcABUez4+Purfv7/69++v6dOna8KECXrxxRc1fvx45eXlqXHjxqXmIl4XFBR0wzWio6OVkZGhWbNm6eabb5a3t7eioqJUVFRkque8vDxZrVYlJibKarWWGvPz8yv5t6enZ6kxi8UiwzBM1SzPvx7fYrFIkhwOR5UdHwAIkwDcTtu2bUvmDUZGRur8+fOy2Wxq2rRphY85c+aMUlJSFBYWJknasWOHPDw81Lp1a0nS1q1b9f7772vw4MGSpOTkZKWnp5vusWPHjrLb7bp48aJ69uxp6hiBgYEKDQ3Vrl271KtXL0mS3W7Xnj171KFDh5L9vLy8ZLfbTfcKAJXBx9wAqq2MjAzdfffdWrx4sQ4cOKCkpCQtXbpUb7zxhoYNGyZJuueeexQVFaXhw4drzZo1OnXqlLZt26Zp06Zp9+7dJcfy8fFRdHS09u/fr82bN2vSpEkaNWqUGjVqJElq2bKlFi1apKNHj+rbb7/VmDFjVKdOHdO9t2rVSmPGjNG4ceP0j3/8Q0lJSdq5c6dee+01rVix4oaP8/TTT+u1117TP//5Tx07dkzPPPOMMjMzS84ySlLTpk317bff6tSpU0pPT+fMIwCnIkwCqLb8/PzUtWtX/elPf1KvXr10++23a/r06YqJidF7770n6dpHtytXrlSvXr306KOPqlWrVnrooYd0+vRphYaGlhyrRYsWeuCBBzR48GANGDBAd9xxh95///2S8Xnz5ikzM1ORkZF65JFHNGnSJIWEhFSq/7i4OI0bN07PPfecWrdureHDh2vXrl2KiIi44WPExsbq17/+tcaNG6eoqCj5+flp4MCB8vHxKdlnypQpslqtatu2rYKDg03P8wQAMyxGVU7OAYBq6KWXXtLy5cu1b98+V7dSaQ6HQ23atNGoUaM0Y8YMV7cDAMyZBIDq7PTp01qzZo169+6twsJCvffee0pKStLDDz/s6tYAQBIfcwNAtebh4aH58+erS5cu6t69uw4ePKi1a9eqTZs2rm4NACTxMTcAAAAqgTOTAAAAMI0wCQAAANMIkwAAADCNMAkAAADTCJMAAAAwjTAJAAAA0wiTAAAAMI0wCQAAANP+D1wbEHLwDuLiAAAAAElFTkSuQmCC",
"text/plain": [
"<Figure size 800x600 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAeIAAAH4CAYAAACWpO5eAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOydd3hUVfrHv3d6S++9k4SQHhJCLwpiReyK3dV11bWvru6qq67uriu67v7UVVaxYFfEBiLSIYWQRgiQhJBAIL1Pb/f3B97rTDKTuVOSGeB8nocHmEzuPXPvnfM973veQtE0TYNAIBAIBIJX4Hl7AAQCgUAgnMsQISYQCAQCwYsQISYQCAQCwYsQISYQCAQCwYsQISYQCAQCwYsQISYQCAQCwYsQISYQCAQCwYsQISYQCAQCwYsQISYQCAQCwYsQISa4TVtbGyiKwtq1a709lHEsXLgQM2bM8PYwzkoWLlyIhQsXsv/3xefAF8dEIIyFCDHBIWvXrgVFUTb/PP7445NyzhdeeAFff/01p/eeOnUKzzzzDGpraydlLFNFYmKi1bUNDw/HvHnzsH79epvvX79+PZYvX47Q0FCIRCJER0fj6quvxtatW22+/4cffgBFUYiOjobZbJ7MjzLlfPTRR3j11Ve9PQwCwSUE3h4A4czh2WefRVJSktVrM2bMQEJCAjQaDYRCocfO9cILL+DKK6/EihUrHL731KlT+Mtf/oLExETk5eV5bAzeIC8vDw8//DCA05/rv//9L1auXIk33ngDv/3tbwEANE3jtttuw9q1a5Gfn4+HHnoIkZGR6OzsxPr167FkyRLs2bMHs2fPtjr2unXrkJiYiLa2NmzduhXnnXeeR8c+Gc8BVz766CM0NDTggQce8JkxEQhcIUJM4Mzy5ctRVFRk82cSicTh76tUKsjlck8Pa9LQarUQiUTg8abOcRQTE4NVq1ax/7/pppuQmpqKV155hRXil19+GWvXrsUDDzyA1atXg6Io9v1PPvkkPvjgAwgE1l9tlUqFDRs24MUXX8S7776LdevWeVyIKYryueeA65gIBG9CXNMEt7G1D3fLLbdAoVDg6NGjuPDCC+Hn54cbbrgBANDc3IwrrrgCkZGRkEgkiI2NxbXXXovh4WEApydPlUqF9957j3XT3nLLLTbPvX37dsycORMAcOutt7LvH7sn2NjYiEWLFkEmkyEmJgb/+Mc/xh2Hoih88skn+NOf/oSYmBjIZDKMjIwAACoqKnDBBRcgICAAMpkMCxYswJ49e8aN5+TJk7jtttsQEREBsViMrKwsvPPOO65cVgBAZGQkMjMzcezYMQCARqPBiy++iIyMDPzzn/+0EmGGG2+8EcXFxVavrV+/HhqNBldddRWuvfZafPXVV9BqtZzH8dZbbyElJQVSqRTFxcXYtWvXuPc4+xyYzWa8+uqryMrKgkQiQUREBO666y4MDg6OO/bGjRuxYMEC+Pn5wd/fHzNnzsRHH30E4PRe9ffff4/29nb2/icmJtodEwBs3boV8+bNg1wuR2BgIC677DIcOnTI6j3PPPMMKIpCS0sLbrnlFgQGBiIgIAC33nor1Gq11Xt/+uknzJ07F4GBgVAoFEhPT8cTTzzB+foSzm2IRUzgzPDwMPr6+qxeCw0Ntft+o9GIZcuWYe7cufjnP/8JmUwGvV6PZcuWQafT4b777kNkZCROnjyJ7777DkNDQwgICMAHH3yAO+64A8XFxbjzzjsBACkpKTbPkZmZiWeffRZPPfUU7rzzTsybNw8ArNyyg4ODuOCCC7By5UpcffXV+OKLL/DYY48hOzsby5cvtzrec889B5FIhEceeQQ6nQ4ikQhbt27F8uXLUVhYiKeffho8Hg/vvvsuFi9ejF27drGi193djVmzZoGiKNx7770ICwvDxo0bcfvtt2NkZGSc25QLBoMBJ06cQEhICABg9+7dGBgYwAMPPAA+n8/5OOvWrcOiRYsQGRmJa6+9Fo8//ji+/fZbXHXVVQ5/93//+x/uuusuzJ49Gw888ABaW1tx6aWXIjg4GHFxcQ5/39ZzAAB33XUX1q5di1tvvRW///3vcezYMfznP/9BTU0N9uzZw7qT165di9tuuw1ZWVn44x//iMDAQNTU1GDTpk24/vrr8eSTT2J4eBgdHR145ZVXAAAKhcLueLZs2YLly5cjOTkZzzzzDDQaDf79739jzpw5qK6uZkWc4eqrr0ZSUhJefPFFVFdXY82aNQgPD8ff//53AMDBgwdx8cUXIycnB88++yzEYjFaWlpsLtQIBJvQBIID3n33XRqAzT80TdPHjh2jAdDvvvsu+zs333wzDYB+/PHHrY5VU1NDA6A///zzCc8pl8vpm2++mdP49u3bN+78DAsWLKAB0O+//z77mk6noyMjI+krrriCfW3btm00ADo5OZlWq9Xs62azmU5LS6OXLVtGm81m9nW1Wk0nJSXR559/Pvva7bffTkdFRdF9fX1WY7j22mvpgIAAq+PaIiEhgV66dCnd29tL9/b20nV1dfS1115LA6Dvu+8+mqZp+l//+hcNgF6/fj2na0PTNN3d3U0LBAL67bffZl+bPXs2fdlllzn8Xb1eT4eHh9N5eXm0TqdjX3/rrbdoAPSCBQvY15x5Dnbt2kUDoNetW2f1+qZNm6xeHxoaov38/OiSkhJao9FYvdfyflx00UV0QkLCuPHbGlNeXh4dHh5O9/f3s6/V1dXRPB6Pvummm9jXnn76aRoAfdttt1kd8/LLL6dDQkLY/7/yyis0ALq3t3fc+QkELhDXNIEz//d//4effvrJ6o8j7r77bqv/BwQEAAB+/PHHce69yUKhUFjtu4pEIhQXF6O1tXXce2+++WZIpVL2/7W1tWhubsb111+P/v5+9PX1oa+vDyqVCkuWLMHOnTthNptB0zS+/PJLXHLJJaBpmn1fX18fli1bhuHhYVRXVzsc6+bNmxEWFoawsDDk5ubi888/x4033shaX4yr3M/Pj/Pn/+STT8Dj8XDFFVewr1133XXYuHGjTTewJVVVVejp6cFvf/tbiEQi9vVbbrmFvZdcGPscfP755wgICMD5559vda0KCwuhUCiwbds2AKddvqOjo3j88cfH7fXacss7orOzE7W1tbjlllsQHBzMvp6Tk4Pzzz8fP/zww7jfYfbmGebNm4f+/n72XgQGBgIANmzYcNZFoxOmBuKaJnCmuLjYbrCWLQQCAWJjY61eS0pKwkMPPYTVq1dj3bp1mDdvHi699FKsWrXKqYndGWJjY8dN2kFBQaivrx/33rFR4c3NzQBOC7Q9hoeHYTAYMDQ0hLfeegtvvfWWzff19PQ4HGtJSQmef/55UBQFmUyGzMxMdqIHAH9/fwDA6Oiow2MxfPjhhyguLkZ/fz/6+/sBAPn5+dDr9fj8889Z978t2tvbAQBpaWlWrwuFQiQnJ3M6v63noLm5GcPDwwgPD7f5O8y1Onr0KAB4LBec+Tzp6enjfpaZmYkff/xxXDBZfHy81fuCgoIAnN7y8Pf3xzXXXIM1a9bgjjvuwOOPP44lS5Zg5cqVuPLKK6c00I9w5kKEmDBpiMVimxPRyy+/jFtuuQUbNmzA5s2b8fvf/x4vvvgiysvLx03YnsDeXipN0+Nes7SGAbAWzksvvWQ3NUqhULACt2rVKruinZOT43CsoaGhE0YzZ2RkAAAOHDjAKbWrubkZ+/btAzBeTIHTe8cTCbEnsPUcmM1mhIeHY926dTZ/JywsbFLH5AyOnh+pVIqdO3di27Zt+P7777Fp0yZ8+umnWLx4MTZv3uzUXj7h3IQIMcErZGdnIzs7G3/605+wd+9ezJkzB2+++Saef/55AM65HV1xUXKFCRLz9/efUCDDwsLg5+cHk8nk8bQgS+bOnYugoCB8/PHHeOKJJxxO8uvWrYNQKMQHH3ww7r27d+/Ga6+9huPHj4+z+hgSEhIAnBb0xYsXs68bDAYcO3YMubm5Ln2OlJQUbNmyBXPmzBm3+Bn7PgBoaGhAamqq3fdxfQaYz3PkyJFxPzt8+DBCQ0NdSq3i8XhYsmQJlixZgtWrV+OFF17Ak08+iW3btk3q80A4OyB+E8KUMjIyAqPRaPVadnY2eDwedDod+5pcLsfQ0BCnYzITJ9f3O0NhYSFSUlLwz3/+E0qlctzPe3t7AZy2mq644gp8+eWXaGhosPs+d5HJZHjsscdw6NAhPPbYYzat+g8//BCVlZUAwLr/r7nmGlx55ZVWfx599FEAwMcff2z3fEVFRQgLC8Obb74JvV7Pvr527Vq3rvfVV18Nk8mE5557btzPjEYje+ylS5fCz88PL7744rh0K8vPLpfL2fS3iYiKikJeXh7ee+89q/E3NDRg8+bNuPDCC53+LAMDA+NeY7wnls80gWAPYhETppStW7fi3nvvxVVXXYVp06bBaDSy1pplMFFhYSG2bNmC1atXIzo6GklJSSgpKbF5zJSUFAQGBuLNN9+En58f5HI5SkpKxu33ugKPx8OaNWuwfPlyZGVl4dZbb0VMTAxOnjyJbdu2wd/fH99++y0A4G9/+xu2bduGkpIS/OY3v8H06dMxMDCA6upqbNmyxeaE7QqPPvooDh48iJdffhnbtm3DlVdeicjISHR1deHrr79GZWUl9u7di4qKCrS0tODee++1eZyYmBgUFBRg3bp1eOyxx2y+RygU4vnnn8ddd92FxYsX45prrsGxY8fw7rvvct4jtsWCBQtw11134cUXX0RtbS2WLl0KoVCI5uZmfP755/jXv/6FK6+8Ev7+/njllVdwxx13YObMmbj++usRFBSEuro6qNVqvPfeewBOPy+ffvopHnroIcycORMKhQKXXHKJzXO/9NJLWL58OUpLS3H77bez6UsBAQF45plnnP4szz77LHbu3ImLLroICQkJ6Onpweuvv47Y2FjMnTvX5WtEOIfwZsg24cyASV/at2+fzZ/bS1uRy+Xj3tva2krfdtttdEpKCi2RSOjg4GB60aJF9JYtW6zed/jwYXr+/Pm0VCqlAThMZdqwYQM9ffp0WiAQWI1lwYIFdFZW1rj333zzzVbpLkz6kr20qpqaGnrlypV0SEgILRaL6YSEBPrqq6+mf/75Z6v3dXd30/fccw8dFxdHC4VCOjIykl6yZAn91ltvTTh+mj6dvnTRRRc5fB/DF198QS9dupQODg6mBQIBHRUVRV9zzTX09u3baZqm6fvuu48GQB89etTuMZ555hkaAF1XVzfhuV5//XU6KSmJFovFdFFREb1z5056wYIFnNKXbD0HDG+99RZdWFhIS6VS2s/Pj87Ozqb/8Ic/0KdOnbJ63zfffEPPnj2blkqltL+/P11cXEx//PHH7M+VSiV9/fXX04GBgTQA9t7aGhNN0/SWLVvoOXPmsMe75JJL6MbGRqv3MOlLY9OSmO/DsWPHaJqm6Z9//pm+7LLL6OjoaFokEtHR0dH0ddddRzc1NU14TQkEBoqmbfi2CAQCgUAgTAlkj5hAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC9ChJhAIBAIBC8i8PYACAQCwRehaRo0TcNsNsNsNsNkMoHP50MoFIKiKG8Pj3AWQYSYQCCc04wVXEZ0TSYTzGYz+3MAEAhOT5lEjAmehAgxgUA4JxgrtpaCy4gtI7gURbF/eDwe+/sURcFkMoGiKAgEAiLGBI9AhJhAIJxV2LJumb8txZZhrODaE1dLkW5ra0NUVBTkcjkRY4LbECEmEAhnHLbcyTqdDiaTCTweb5zgjrVwXRVP5hhHjx6FQqGARCJh3dUEgquQJ4hAIPgsXPZvGdra2qDT6ZCRkcGK7WRZq4yb2mAwgKIo8Pn8STkP4dyACDGBQPA67u7fMv/m8/lTIoo0TbPnZsSY+T+B4CxEiAkEwpQxWfu3zLEne7+WOb7ZbAafzwdFUTCbzdDr9RCJRESMCS5BhJhAIHgcmqZZcbUUW1uC66n926kQYmbczLmYMRMxJrgDEWICgeASzuzfjhWuydi/HWtNTyZms5kdP/OZaJqGwWCASCQikdQEpyBCTCAQJsSW4BqNRivrtqmpCaGhoQgODmZFaLIDpmyNcyrOxQi+peXLnNdkMsFgMJCCHwSnIEJMIBAAcNu/tRcwpdFoYDabvZrKYxlANdnnAcbvVzPXwmg0AiDVtwjcIUJMIJxjTOROtnQpW7peuQRM+YLoTEWwltlstnsu5jqZTCYARIwJ3CBCTCCchdjbv7W0cJn3AXB7/3Yq92ftYblvO5nYck1bYummJqUwCVwgQkwgnMFwbVhgKQSTFTDlC2LjbYvY8n00TcNoNLJiTCDYgzwdBMIZgL2CFyqVCiMjIwgNDXVY8OJsZ6qCtRghdrQfzaQ1kepbBEcQISYQfAhnC14olUq0t7cjIiICgPcE1xdc05M9BlsBa45gLGODwQAARIwJNiFCTCBMMbbcya4WvJjqFKGJ8PYYpipq2pm9aMtKXKQUJsEeRIgJhEnC1wpeTCbngkVseR5nxJRU3yI4gggxgeAmnmhYcDbg7c8xlQU9nD0Pqb5FmAgixAQCRyazYQHBfaaq6YPZbHbJorVMa2IsY/JMEAAixATCOLg0LNBqtaiursacOXO8LrjnklvYEb5qETNYFvwgpTAJDESICeck7u7fCgQCmEwmkh9qgbcFZSoKejAWsTvnIdW3CGMhswjhrIZLw4Kx+7eA42hk5vWp2pf0dc5Wi5jxfqhUKiiVSoSHh3skOpuiKOh0OtTV1aG0tJRU3zrHIUJMOCtwp2EB85ozkElzPN6+Ju4sisxmMzQaDdRqNVQqFftHrVaDpmnIZDKIRCKcOHEC06ZN88hnNZlMGB4eJtW3CESICWcWEwmuOw0L3BmPNwXI2+LnS3Cxys1ms5XYMv9Wq9WgKAoymQxyuRwymQxhYWGQy+WQSqVs+lF7ezuam5shFArdHq9l0BepvnVuQ4SY4HNMVPBCpVKhv78f0dHRHmtY4ApEAK3xBde0pcvYZDLZFFyNRgMej8cKrp+fHyIjIyGTySCVSh3e19TUVAwNDbGWrDtWrGXsAam+dW5DhJjgNZxpWMD8rdVq0dHRgbi4OJ8QQ18QIF/BG/fDaDSyIqvVatHe3o6WlhZotVoIBAJWcAMCAhAdHQ25XA6xWOxW1HNsbCxGR0dRW1uLgoICl/eLGYvYMt6AVN86NyFCTJh0PFnwQiAQeN0dDFgHaxEm/zoYDIZx1q1KpYJOp4NQKIRcLofZbIZCoUBERATkcvmk5elSFAWFQgGdToeDBw9ixowZLp3H0jVtWQqTVN869yBCTPAYU1HwgrGOvY23FwJnI4xFaEtwGXGSy+WQy+UICwtDQkICK7gAUFFRgbCwMAQHB0/qOBkBzcvLQ3l5OVpaWpCWlub0cey1p2TqUpOCH+cORIgJTuHJhgWu4CtCzOBLY/EmzngpaJqGXq+3ik5mhNdgMEAsFrOCy1i3MpmMU4DUVBX04PF4kEgkKCoqQkVFBSQSCeLi4pw6jq0KXZY5xqT61rkDEWKCTXy1YYGvCDGZHMcz9ppY5uCOTQsymUyQSqWsyDL7tzKZzOUAKG/UmlYoFMjPz8f+/fshFosRHh7O+Tj2SmWS6lvnHkSIz3G47t8eOXIECoWCDZLyVjlHXxFiBl8ai7dgnqGhoSEMDg5aiS5N06zgyuVyBAUFsYLr6ejgqRLisQIaHByM7Oxs1NXVobi4GAEBAZyOM9F4SfWtcwsixOcIntq/9XZqha8IsS9NilN1PZiiF2PdyWq1GmazGadOnYKfnx/kcjlCQ0OtcnCnAm92X4qMjIRWq8X+/fsxa9YsyGQyh8dx1DzCskkEU/DDl547gucgQnyWYSm49hoWMDhTYYpp4eZtfEWIGXxpLJ6CycEd607WaDSgKIq1aC0jlPfv34+srCz4+/t7bdxTaRHbOk9iYiK0Wi2qqqowa9YsNohsouM4WqQwzzupvnV2Q+7qGQjX/VtPFrxgojm9DTMxeTuF6WywTCxzcC1FV6PRgM/ns+7kgIAAREVFQS6XQyKR2PzsvnA9ptIitieg6enprGVcXFw8oQeJaztFy0hqUn3r7IQIsQ/jbMMC5u/JCJjyFUvUV5otnEl5xEajcZw7mSmAIRAIrPZvY2NjIZPJnC564QvXYTLHYCs40RYURSE7OxtVVVWora1Ffn6+XbF15hm2FGPA+1tEBM9ChNgHsLd/Ozo6CrVajeDgYI82LHAFX7KIAd+Y+AHvj8Py3jMpQWNdykwaDFNlKiQkBPHx8ZDL5R4NAvK2VeytYK2x8Pl8FBQUoKKiAocOHcL06dNtjourRcxgWQqTVN86uyBCPIU4Cpgau/c0MjKCzs5OhIWFAfDuREdRFBHiMXjjfljm4KrVavT390Or1WLXrl3jcnDDw8PZf3uiSYGjcXkbbwZrjUUoFKKwsBDl5eWQSCRISUkZ9x5XhJj5PVJ96+yCCLGH4VLwgnkfYG3hjo2K9JVyjoBvBWsBvjHxA5M3DpqmodPpbLqUjUYjJBIJ5HI5+Hw+BAIBsrOzIZfLvRrM4+3ndCq+K8yClIsASqVSFBYWorKyEhKJBDExMVY/d2W8pPrW2QkRYheZioIXvuIOBohFbAtP7JtbFr0YK7omkwkymYx1KdvKwR0YGIBSqeScu3q2M5mi5Ep8gr+/P/Lz81FdXQ2xWIzQ0FD2Z85axJbjINW3zi6IEDvAkw0LnMWXhNhXLGLmuvrCWJy5t1wazzMiGxISwv77THE9+sL98IWoaVuEhIQgKysLtbW1KC4uZlO8XBVigFTfOtsgQvwLU9GwwFksK+t4G19aFAC+MfED48fBtfE8s4fL9ME9UwR3IrwtBN7OI56I6Ohoq4IfUqnU7fEy84/RaARAqm+dyZxTQsxl/7avrw9DQ0NISUkZZ+FO9UPuS+LnS65pX0ilYhZIfX196O7uZsXW3cbzZyrevh9j0/gm+1yuLJySkpLYgh8lJSUwm81uB9Exc9Px48cRFhYGf3//s/YZO5s5J4T48OHDePfdd/Hkk0863L+laRoqlWrSo0y54EtC7CuuaWBqFwWWRS8s/2i1WgBAT08P/Pz8EBgYiJiYGLcbz7uCL90XbzHVQuzKeSiKQmZmJmpqalBTUwOpVAqxWOz2eCiKQltbGxsxT6pvnXmcE3esv78f7733Hh5//HHweLwJ3cl8Pt+nxM9XxuIrFjEwOcVFuDSel8vlCA4ORlxcHORyOSoqKpCZmQk/Pz+PjoXgPFMpxK64phkoikJubi4qKysxMDDAqSY1FxgrnVTfOjM5J4RYJpNBpVKBz+c7/ALx+Xyf2Zfl8/k+Uc4R8D2L2JWxMMUQbEUoc2k876lxnI14+zpMxvktnxWlUgmj0YjMzEyXXdMMfD4fhYWF2LFjB/r7+5Gamur2WM1mMyu+zLNMxPjM4ZwQYrlczkanchFiX7H8mC+75ZfMm2PxleviSAAti17YajzP5ODKZDJERkY61XjemXGca5yprmmDwQClUmmzIplYLGb3+41GI2pqajgt6B0hEokQGBiIoaEhtLW1ITEx0a3jMRHYpPrWmck5IcQKhQJmsxk6nQ5SqXTC9/papDLgG0Lsi65pyxzcqWg8b2schNN4e0HCRYgZwWWeF0Z8GcFlvCFMRynLxRkT5FlXV4fR0VFER0e7PWYej4f4+Hg0NzdDLBYjKirK5WNZCjHzf1J968zhnBBiuVwOAFCr1Q6F2Jdc05ZC7G286ZqmadqqD67RaERjYyN0Ot2UNp63NzZv4kuLAV8YC0VRNr0hKpVqXAlQxhvCNcCJz+cjPz8fO3bsQFdXF2JjY90aq9lshlwuR15eHmprayEWixEcHOzysSxjXxgPFrPI8IV7Q7DPOSXEKpUKISEhE76XeYB9YV+Wieb2FSGe7HHYajzPtOWjadqqyEVERAQiIiK8moNLXNOn8dY1sBTckZERAMCePXvcFtyJYJpnDA4Oor29HQkJCS4fixHPsLAwZGZmorq6GiUlJU4H/zHWuuX3wDILhFTf8n3OCSHm8XiQSqVQq9UO38tYUb7gDgZ8Z2/Wk6LDNJ631QeXomw3npdIJOxEU15ejsDAQHaBRfANJmui52LhMmlAOTk5k57Cw+PxkJKSgqamJkgkEkRERLh0HEvxjI2NhUajYQt+SCQSzsdhvpdjF6TM/SDVt3yfc0KIKYqCVCqFSqVy+F5fFGJfcJW7siAYm4PL/NuVxvOW+Iol6ivjOFtwx6Ws0WjQ1dU1aTW3LT1kZrMZCoUC2dnZqK+vR3FxsUvnHet1S01NZatvFRcXcw4eZL6XtjxDpPrWmcE5IcTArylMjmAeZpPJRIp6WDCRi9xgMNisoWyv8TyTEuROLqYvCKCvTGjevhbOnn8y9nCnciuJSV8KDw+3smKdzQm25U7OyspCdXU1amtrUVhYyGnbZSIhZo7LLOhtdXkjeJ9zQogZd6dGo+H0Xl+xQgHfSadiFgSDg4MOG8+HhoZa9cH19JfeV4QY8L4I+hKW99lezral4CoUCjaFjPm3qy7lqRZi5lyJiYlWYuzM4t1W0wcej4e8vDxUVlaioaEB2dnZDj+XIyEGfr03RqORLfhBxNh3OCeEGDhtEXPZIwZ8R/yAqbeIx+bgWoouADQ2Nnql8bwlviLEZCL7tW8yAJw6dcqqh7JlzrZcLme3HzyZQmY5jqlibIQyE2hVXV2NmTNncg4etLd4EAgEKCwsRHl5OZqampCenu5wPFxaq1rmGDPnIfgG58ydkMvlnFzTgO/sywKTJ8RcG88zE6hIJEJdXR1KS0u9npfoK5HkwLljETMTuGXhC+bfzMQ+ODgIPz+/SRXcicbnDYsYsC5beeDAAeTk5HAay0RtEMViMQoLC1FRUQGpVIr4+HiXjjMWZj4hpTB9i3NGiJ21iM8WIWZycG3t4ZrNZkil0gkbzzMwwR7u9FD1FMQinjxseUSYIhhjF2hMkRSJRILdu3djxowZXourmAohtkwnHHsuxootKytDS0sL0tLSHB7P0XdJoVCgoKAAVVVVEIvFdqOznf1Okupbvsc5JcRcLeIz0TU9NgfXMj3IE43nLSNGvY0vCbEvjMMV7JUBnUhw7Vm4zCLN20x1sNZYxGIxioqKUF5eDqlU6rDgB5fFQ1BQEHJyclBfX4+ioiIEBQWNe48rQsz8Hqm+5RucM0LM1Jvmgi+7pr3VeJ75fV8QHl8SQF8Zhz1cEVy5XO6Sy9Lbtaa95Zq2RKFQID8/H9XV1ZBIJAgNDbV7HK4CGhERgWnTpqG6uhqzZs0alz/vipdqbPUtIsbehQixDXzBNc0UvdDr9ejt7cXAwADUajXUajX4fP64xvNcc3BdxZeqfPmKEPuCa5oZw1QK7lh84V5MlWsacCx8ISEhyMrKQm1t7YSVspwR0ISEBGi1WlRVVWHWrFlWfYxd3S6yrL5lMBhI9S0vck4JMVMGzxFT6Zo2Go02I5SZHFyKoiAWixESEuK1xvMMviSAvjAOYOpFaKzgDg4OQqfTYdeuXZMuuI442y1iptEIl3NFR0dDrVajqqoKpaWl4ypludItatq0aVYFP5htAnfiNkj1Ld/gnBFimUyGrq4uTu+dDNf02JxKRnTHNp4PCQlhG8+LRCIcOXIEQqFwwqjJqcKXiov4ghBP5ji4WLgKhQJ8Ph8CgQB5eXlT1ujCF5kq17QzApqSksLmGJeUlFjtr3PJ/R0LRVHIzs5GVVUVamtrUVBQwH4n3XErk+pb3uecEeKpcE0723g+MTERMpnMbuN5wHfED/CdtCFvdoLyNI4E17KzFOMRsRTcoaEhDA8PO90owNOfwdtMtRBzET6mUtb+/futhBNwTYiZ9+fn56OiogKNjY3IysrySCbD2EJGRIynFiLENnAkfpY5uGPTgizdg+42nmfG4u39agZfEcAz0SJmBJfJvbXsi8tFcM8EvO2ansrzcP2sTKUsS+G0fG5cEVChUMhGZx89etSqIYo7MJ/p6NGjiI2NhUKhIGI8RZwzQiyTyTiVuAROW8QGg8Gq8fxY0bVsPM8lxcNVmLH4Ar5iEfuKENvCVqEUe4Lrqd7J3r4W3j4/M4apCNZyxZIVCoVspaxjx44hOTmZPY6rY5ZIJGzBj7CwMI9FPFMUhdbWVoSEhEAqlZLqW1PEOXOVFQqFXYvYbDZbCW5/fz+0Wi26urrGNZ4PDg6GXC6HVCqdEmvFl1zTxCL+FSZoZ3R01Go7YjIFl2AfX9wjtkQqlbLCKZFIEBgYyKks5UT4+fkhPz8fVVVVHu06xbi6SfWtqeOcEWLGIq6ursaBAwdQUlLCRiwzAs1MlkxOXWZmplcbzwO+J8S+MJaptMztWbiMV0StViMgIIAILryfzjWVFrEr5/L390deXh5qa2sxffp0j8wrISEhiIiIQHd3N4aHh90WZJqmrVrAkupbU8NZK8SNjY2oq6tDY2MjGhsbUVVVhY6ODixbtgwJCQmIj49HUlKSzcbzJ0+eRG9vr080nvcV8QPObte0I8G1ZeG2tLQgPDwcMTExHh3LmYa3vRPAr40PJguDwcB6hNyxZMPCwpCeno5Dhw55bGyMt8/VdoyWMPeS6c5ECn5MDWetEP/5z3/GqVOnkJmZidLSUsydOxf/+Mc/0NjY6HDfw5fEjwRrjccdIXYkuJalQB1ZuL5wPbxthVri7bF44vz2Wjfq9XrI5XJObQkdER8fj6GhIbZblWVxDlcwm83w9/dHUFAQW/BjokyMiWDmGkaISfWtqeGsFeIvv/zS6v8HDhzAk08+ycll6AuVtRh8aVHgSxaxo3FwFVzGwlUoFE7v+3tbeHwFby9GmDE4cz8sC+lY/tHpdFZphuHh4VAoFBCLxThy5AgOHz7skfHGxsaiq6sL1dXVKC4udmsrg3ElZ2RkQKvVsu0YXTnm2GA0Un1rajhrhXgscrkcGo3Gav/DHmdi04epwBcsQMDaImYi28emkalUKra71NhAO2eaXTjCF66HL+DtydnefTCZTDYFV6vVWhXSCQ0NRWJiot3e2iaTCdnZ2aisrGTP5+5nZizM+vp65OXluXw8Zk6jKAo5OTmoqqpCXV0d8vPznT6mrT1wy+pbjGXs7ft9tnHOCLFCoQAAqNVqh8UPfM0d7EtC7K2xWKaSjYyMsHV3p0pwbUEmo9P4wmLEZDLBbDajq6vLSnA1Gg0EAoFV5br4+Hi2cp0zCAQCpKeno6amBm1tbUhKSnJ5vIx45ufno7y8HIcPH0ZmZqbLx2IWD8wxKyoqcOjQIWRmZjr1nJpMJlbULbEs+EFKYXqec0aImcArLkJMXNO2mQrX9NjcbVv9k4HT14UpBTrZgutovISpY2z3Mcvng/GUMNsNsbGxrOB6QjQoioJIJIJIJEJLSwtkMpndHsFcPgePx4NIJEJRURHKysogk8mQkJDg8rEYRCKRVapUcnKyy8eyhFTfmjzOGSFmoqK59CT2Jde0L43Fk65pLoJracVYCm5bWxtUKpXLk6CnIJPQr3j6Woztrz1WcJlnIyAgANHR0RgeHsbo6Chyc3M9Oo6x0DQNPp+PrKws1NfXo7i42KWUIUvXtkwmQ2FhIfbt2weJROL0c21LPJljMmIcHR3t8rEssXRTUxTFNqYhuMc5I8TMl5dLmUtm1TeVPU4nGouvCLErFrE7gmsPX9mrBohFDLh3DWiatiu4ANigOoVCMWG7T5VKNeleEeb5pygKERERSE1NZXsEM54arowVvMDAQGRnZ7sk7vbE09/fH/n5+aipqWE7uDnCZDI5vI6M58FoNLJiTHAPu1ewt7cXWq0WcXFxNn/e1NSE0dFRFBYWTtrgPAlFUZBKpZwtYgCcArsmG18S4okEcDIE1x6+UFnLl8bhC2NwtGC193yoVCrQNG0VxR4eHs5Wr3NmITyZi2bLvs/MM5uYmGiVv+uMINkSz8jISLZbkzP5wBNZsaGhoZg+fTpqamom7I1seSwucx4zL5HqW57B7pNzzz33ICYmBi+99JLNB6y5uRlr1qzBunXr3Eogn0q41pv2RSH2FevcZDLZtGDGTqgymQyhoaGQyWQe38P1FQH09v3wRSybWziKYg8NDWUF193nY7ILejBYfg8pikJmZqbN7krOHMeSxMREKzHm0ijGkTs5JiaGDW50ZL0708mJ+R4ytfC9PVeeyYxTWOYBqa6uxm9+8xtWhHU6Hf75z3/i3nvvRUBAADIyMrB3716MjIycEUJMURRkMhkni5h5EE0mk0sdkzyJZdu0qXzQbVkw/f39MBqN6OjosLJgPDmhcsFXhBjwDWvUGzATsFKpxMDAAMxmM/bv32+zfSOTp302RLGPFSrL7kpM5DOXcdgTPEbcq6ur2XxgR9eMi3gmJydb9Ua2N685K8TAr88CKYXpOnaFWKlUsiLMVFb585//jGuuuQYBAQFISEgATdMYGRlBZGTklA/cFbjuEY+NDvQmky3EloLLtOiz5zIMCAgAj8fzWJ1cV/GlwiLnAvb6JRsMBkgkEkgkEgDwavvGqWz6MPY8QqEQBQUFKC8vh0wmQ2JiosPjOIpOzs3NRWVlJQ4cOICcnJwJPxsX8aQoinVRTyTwTPoSV5hxkepb7mHXNR0TE4Njx45h0aJF4PF46OnpgUgkwtGjR5GamorW1lZIJBIYjcapHK9byOVyThYx4DvRymMbibsKExQztheuLcG1Z+G2trZCp9N5/YtGLOLJYaLyjmKxmH0+oqKirFp+jo6OoqamxqsL8qkSYnsucJlMhoKCAuzbtw8ymQzh4eETHsfReAUCAQoLC1FWVoaWlhakpaVNOCYu30kej4fc3Fzs27cP9fX1yM3NHTcGZyxiBstSmKT6lmvYFeKLLroI77//PiIiIpCZmYmnn34ac+fOxcsvv4zdu3fju+++w9y5c88Yaxg4/WXhYhEDvlPUg8fjOWUBThSFOlZww8LCnHIp+0q0sq8IsS+Mw5UJj2t5R6YhilwudxiI5O2JdyotYnvfFSbyua6uDiUlJfD397d7HC6CJxaLUVRUhPLyckilUsTGxrp8LAaBQICCggJUVFTgyJEjyMjIcPlYllh6EUn1LecZ9+1ibsJDDz2EvXv34t5770VoaCjMZjPeffddfP3119iwYQPi4+Px6KOPIjg4eMoH7SrOCLGvWMSA7cjpyRRcZ8bhDXxBAJlx+DJcyzuGhYVNWN7xTGCqngdHQWGRkZFsJHVpaSnrtrd1HC7fRYVCgfz8fFRXV0MikSA0NNTlYzGIxWKrHGNLV7qzrmlLSPUt17G7zA0MDMQXX3yB7777Dh0dHbj66quRnJyMnJwc3HLLLWxE7JkE1z1iwHcsYmal39/fj97e3ikRXHv40t6sLwgx4BuuaZqmMTo6Ok5wLcs7KhQKt8o7Ojq/t/EFi5ghKSkJarWabehgy5vgjHiGhIQgKysLtbW1NlOQXLFi5XI560qXSCSsZ9NVi5iBVN9yjQn9TYGBgVi1atW41+Pj4wG4f9OmGmct4qkU4oksXLPZjJMnT8LPz29KBNcexDU9fhxTia3yjqOjozAYDKiurraKUvZ0eUdHeHuy9Waw1liYwCim+UJBQcG433F2vNHR0VY5y5aWtqvzcGBgIHJzc1FXVweRSITg4GCPzOmk+pbz2BXiU6dOobW1FRqNBkNDQ1AqlWwO7pEjR7By5UosWLDgjBJjuVyOvr4+Tu+dLNe0I5cyM5kyAR8ymQx1dXXIyMhAUFCQx8fjDL5iEfvKggCYHGvQmfKOgYGBaGtrw9y5c7022fnCveBiqXoCrvnKPB5vwoYOrmRBpKSkWKUgWWa1uPrZw8PDkZGRwRb84FJZiwuk+pZzjLs6RqMRAoEAq1evxurVqxEUFASKoiAWi6HX69Hf3w8AOO+886Z8sO6iUCjQ3t7O6b3uuqa5Cq5lJSGmHvZYfGW/2lcE8GyxiO09IyqVis17d1TecWRkBBRFed3i8Pb5p2oMzgi+UChkI5/lcjnrSQSsOyZxhaIoZGVlWRUQYRbH7ohnXFwcW/AjKCjIY1uOPB4Pp06dQn9/P/Lz80nBjwkYJ8TMyuWRRx7BjTfeCJlMxvr5NRoNPvvsM7S1tSEmJgaAb3wBuTIZrmmz2Wy3tCNzTq6Caw9fCZLylXH4ihAD3KzBqSjveK4zVZW1nD0Pk9ZUVVUFqVSKsLAwAK670i0LiDQ2NrKWtrtWbGpqKjQaDXp6eqwWDO5iMBig1+tJ9S0H2PUXREZG2kxNeuqpp3DLLbfggw8+QEFBgU+UgeSKs+lLlqLDuAuZPFxmr86TgjvRWHwhcMxXBNCXxmH5jHirvKO3r4W3z8/gaxYxQ1BQEBtsNWvWLPj5+bllxTKWdnl5ObtX7Il93RkzZmD79u3o6upCamqqR+YuJgqbVN+aGJcc9+np6exe65m0apfL5Q5rTTOCy1QTshRcS3fhVFovvmKJ+so4vL1XzQiuVquFTqfD4cOHWcG1LO+oUCgQHBzs9Z7JU4G354HJDNayXGi4ep6xwVbuupOlUimbggS4L8TMMfz8/KBSqdDQ0IDs7Gy3ryljqDHfWVJ9yzYOhbinpwcNDQ3o6emBwWBAQkICrrjiCigUCgCeeQCmCsv0Jb1eD71ePy4KlRFcPp8PgUCA0NBQtrCBt9yFRADHj2OqrLCJyjsKBALw+XwEBgZ6tbyjt/EFi9hX8ognIiUlhU1rEovFbs8l/v7+mD59Og4cOIDh4WGP1XRISEhAe3s7mpubMW3aNLeOxVjEpPrWxEwoxLW1tXj++edRXV0NABCJRJBIJFi+fDmefPLJKRmguxgMBhw9ehQHDx7Ehg0boFarkZOTg46ODrz99tuIi4tjrRdLwW1ra4NGo+FUN3ay8RUhPpuDtbiUd1QoFFblHY8fPw6dToeUlBSPjuVMxNuTKk3TGB4exvHjxyGVShEaGurymGiahk6nY7cYlEoltFot0tPTQdO0WwUvZsyYgX379mF4eJjdL3YHJpOCiXpmDCRXMZvNbEUvpuCHO3vGllHYpPqWfewK8fHjx3HvvfdCq9XizTffRGJiIkwmEzZt2oTXX38dZrMZf//7392qxDLZrFq1Cp999hn4fD4yMzMRGhoKmqbxxz/+Ebm5uUhPT7c79qnOI54IX4qa9oVxuCPEXMo7Wi7KuJR3PNfx9uLs+PHj2PrjZpj7+uAvlYASCuGfmIi84mKHi6Sxe/rMv5k9faZrlFAoRG1tLYKDg92KKmbSmrZv3+6RwCjG9RsbG8u6vcViscvHY+ZzhULBBplJJBKHtbMnGp9ldDipvmUbu92X2tvbceTIETQ2Nlqt3DIzMyESifDqq6+yQszUQ/Y1HnnkEfzlL39BYmIi+Hw+qqqqcN555+Haa691OF5fET/g7BDAqR6HrfKOSqUSOp2OLe+oUCjcKu/oC9fDV7533hpHa2srdqxfj6DWY8hOSEBSTAxGNBocbmrGzuMnYLjsUmRkZLALMEsr17KDFPM8MIVQ5HI5a8nRNM3e5+7ubrfTe0QiERQKBQYHB3HixAnExcW5fCxmr3natGnQaDRsNS9XjSPLveugoCC2dvbMmTMRGBjo9PFsGWqk+tZ47C71/f39ERwcbLdnZk5ODgB4tFSep8nLy7P6/5lY4hLwHSH2lXFYCqDJZBq3z8+4EqeivCPBexiNRpRv24Z4tQYJycnw8/ODWCBAgFiMvJhoaJua8fUHH6DkvPNA07SVx4PJy+bq8WD6BPf09KC7uxtpaWluiQdFUUhKSsLhw4dZV7orMMJJURSys7NRVVWF+vp65OXluTS+sUFkkZGR0Gq1rLUtl8vdOh4Dqb5lzbgnkLkY8fHxmD17Np555hncd999oCgKer0eVVVV+PTTT3HllVeioqICo6OjEIvFmDdv3pQP3lnkcjkMBgMbLDARvuSa9iUB9NY4LMs7Dg0NgaZplJeXQ61Wg8/ne6W8oy9YxL6AN66B2WzGoUOHMHDkCAojo6DXatGn06G7uxs8Hg9isRgZkZFoP3ECEokEhYWFbi/AeDweAgMDMTg4iKamJqSnp7s1fn9//wlrSHM9DvOM8/n8Cat5ccFWZa3ExERWjEtKSpxyfU+0dUmqb/2K3U8uEokwMDCA9957D19//TWmTZsGpVKJ+vp60DQNgUCAtWvXQqvVIjQ0FDt27JjKcbsEE8igUqk4CbEviB9wegJgEuK9PQ7GTTdZAselvKNUKgVwugiBQqHwSASqK5zLK/ixTGbqEFMMZWx+dktLC8zDwxDHxELD48FPoUBISDD4/F+tq7jhIdYa9gQURSEuLg4nTpyAXC6325qQy+eiKArR0dFQqVRstyZn93fHWpwikQhFRUUoKyuDTCZDQkKC08ezJZzp6enQarWorq7GzJkzOYumoxgiy0hqJlvlXMTu1aQoCgaDAXfeeSeEQiF4PB7kcjmuuuoqKBQKmM1m+Pn5QSaTISAgYCrH7DKMEKvVaod1m4lr2vY4AM/kbHIt7+jn5zeuvKNer0dPT4/drZOphFjEnrkGTG62LcE1m8028/eDgoLQcPw4IiMj0dHRAYlEDIHAep9f70aUsy2YqOK8vDxUV1dDJpO5lDZkKaCpqalW3ZqcGa8t4ZTJZCgsLGQ7K0VERLg0LkuY7UimkUV+fj6n7x6XfGlmfmMiqc9FMbYrxDKZDD/88MNUjmXSEQgEEIlEnPaJiWt6PIz4OjPxci3vqFAoOBdIcWUckwGxiH/FmWthmSpmKboGg8Gq+pijYihxcXGo8fPDif5+UKABWI+he3gYSrHErWCosTCVtUJCQthmCaWlpU4HcFkKlGVa04EDB5Cbm8v5etoTusDAQGRnZ6O+vh7FxcWcjaWJhJOJ+GbKa2ZlZTkcJ9esGsZNfa5W35rQv2A2myec7Cxvwplw4RhLS6VSOXyvr7mmfWEszD22tQpnci9tCa5leUeFQuF2eUdfEmJvjwHw/nWwh2XkuqXg6nQ6NjdbLpezudlyudwpaygkJATRM2Zg3969SBOJERAQyP5sRK1GWdsxBOflISoqymOfydIbFBcXB6VSyQYyORN1P7ZUJp/PR0FBAcrKypwqpDGRcEZGRrLdmmbNmuVwscDM9xPdA8vymq2trQ7Tw7h2c2Ku6blafWtCIT4bLwRXIWZc01PV59TRWHxBiJnroNVqbTaiZ8o7KhQKThaNu+PwBQHyhTF4G5PJBJPJhO7ubivB1Wg0EAgE7PPgTqqYPRYsXozNOh22bN6MacpRRAWHYFSnxSkzjYAZM7Dkggs88v21FArL42VkZKC6uhq1tbUoLCzk/JzbqtAlEolYkZPJZJz2nx25fhMTE63EeKLrzswxjj6DVCq1KvjBNACyd0yuiyvL6ltMIR1vz71TBecwNcsJ50y+ODKZzGG9aeDXLiHnshCPLe+oVCoBgN17YibYqS7v6CtC7O3nYqqx3Ncfu48LgA1gCgwMnLLIdYlEgosuvRQmAHqNBl0ApH5+mJOejqSkJI8JPsNYS5aiKOTm5rKRytOnT+d0HHsCqlAokJ+fj+rqakilUoSEhLh0HMvxZWZmorq6mg20svd+Zo7h8h328/NjxykWi+2mXzlb8IkRYyZm4FypvsVZiM+Gi8G4prnsETMPq6caZbvDZLvJHZV3ZAQ3OjoaIyMjKCoqcinVwlNYWifextuLgclgbBcpS9G17KXN5ONqtVqcOnUKRUVFXhkvn89HTEwMkpKSPFI2kkGtVsNgMFjlGtuyZAUCAQoKClBeXg65XM4pUnkiAQ0JCUFmZiZqamowa9asCctWcgmGYhYLlZWVOHDgAHJycmzO50xMDNe5PiQkBFlZWWx5TX9/f5vHdHb+tMwxPleqb3ES4v7+frS2trKux7GFMs4k5HI55z1i4PTD4OlVtbN4yiK2Vd5RqVSyK08u5R2bmpq8vjAhFrHnMBgMNks8jt1mmGhfv6enx+vXwpOeq0OHDmH3jh040dAAmEyQBAejYN48LF682G4bRJlMhvz8fFRVVbHXa6KxOhpvbGws262ptLTUbvoV1yYUAoEAhYWFKCsrQ0tLC9LS0mwey9kqidHR0VY5xpb70MzndMVLxljGRqMRwNlffcuhEFdWVuJf//oXDh06BKVSibi4OOTm5uL+++93OkfNF+C6R2y5X+FtnB3HZJZ39JVrQgKlnGPsIowRXFteD6a+srMuRW/jiTHs2rULP334IeLUGlwSEQGpUIj2/gFUvf8BWhobkV1YaPc8Y/sO27NkmWfG0YI2LS2NTWuaOXOmzfvhTDtFpplDeXk5pFLpuD1oV1szJiUlWYkxs2hgLGx3mmQwsTpne/WtCYW4o6MDN998M0JCQnDrrbfi2WefxYoVK7Bv3z789a9/xYsvvuhwD8PXOBPLXNobhzfKO56LrRB9fQyWMBXIxrqVtVqt1SIsPDycFV93Kxp5+xowY3B3ku7s7MRPn3yC2XwBFlikEKWEhKBAq8EH+6uxV6NBfn6+3WNER0ezkdT2LFmuQVFM2crKyko0NDTYdCk7K56We9ASicTKcne1gQ+zD11bW2u1aOD6OR0dGwBbfYtpqXi2YfMbyNzcl156CcnJyfj000+hUCjwxhtv4IorrsDTTz+NwsJC7N+/H0uXLnW7yfVU4owQ+1IKExOVOrba1FSXdzzTWyGazWbU19djz+7dGBrogZ9/EEpKZ6OgoMClpg/eggmcGhgYgNlsRkNDA5RKJTQaDVt8R6FQIDg4GHFxcVAoFJNaY9vbk6MnhLiyshKKwUHMLygYd6wgqQyzIyLwUUODw/kjLS0NKpUKNTU1NoOjmOeWy3iZtKby8nKbLmVX5t6QkBBMnz59XGlNd+ZxpuDHvn372FrXjPHgrjZY5hgDOCtLYU4oxI2NjViwYAEUCgVomoZEIkFjYyPb97Krq2uqx+s2XKOmgakv6jFReUeapnH8+HHI5XIEBAQgOjoacrl8ykP8z2SLWKPR4N//egXN+7chSaFBQpAQ3ccNWLPrW0RlzMSDjzzusOLaWCZ7UTK2Ny5j5TJiIJFIQNM0W4HMGyU/fWFh5gkhPnn0KNLkCvAo28KRER4ONDehq6trwvaFjChVVFTg4MGDmDFjhtXYnLUUxWKxVVqTZbqQq+IZExNjldYkkUjcNqgsFw2HDx9GbGysxyzYs70U5oRLCz8/P/YLz0QcMwKs1+ttRsn5OlyDtYDJc01zKe+oUCjYyZXP56OmpgZFRUVetzx8xSJ2ZRzv/G8NTlZvxONLIpAV9+tE2t6rwuofd+Pf/1qNPz/9LOdr7Ol74ag3rq2CKMwe4pkYr+FJPBKsRVEwT/BMmWkaoLnteVoW6Ghra0NSUtKvx/lFiJ0Zr0KhQF5eHmpqaiCVStmymmaz2WULMSUlhRXjkpISp3J+7cHUui4vLwfg2VoUFEVBo9Hg+PHjSEtLO2O8sFyweQeZByQ7OxsHDx5EW1sbEhMTIZFIsHr1avz3v/9FcXEx23HpTLogMpkMAwMDnN7rrkXMpbwjs083UXlHvV7PHo/rl7e/vx+dnZ2QSCRITk722D06U4O1urq6ULNnM35bEoSsOOtyfwlhcty1MBov/lyJQ4cOcc4FBVyzBp3tjTtV+dmu4gu59p4YQ1JGBmr3VeECsxl8G9+Xxu5u0P5+ExawsEQikaCgoACVlZWQyWRszWdmrM6ONzQ0lC2rybQkdNednJWVhf3796O2thbR0dEemSeYWtcVFRUefW4pioJWq0VbWxsSEhLOqupbNoWY+XAXXngh27w6MTERixYtwocffoj8/Hw8+OCDbKCWM3se3sbZYC0uosOlvCOXNJCJxgFwc0N1dHTgv//9L3bs2MG6ceLi4nD99dfj0ksvdfsenamu6ZqaGkiNQyhJTbX588wYP0SIulBdXc1ZiB2NgQmmG2vl6nQ6t3rjEk4vTk+ePAmTyYSQkBCPeGlmzpyJ8k2bsLmpCRekp1t9V7qVSuzt7UVcVpZTPXkDAgLYms9Mrq074hkXF8d2a5o1a5bbViyPx0NeXh4qKirQ0dHhsTk8ICAAKSkpaG5uRn9/v8eCepmAsrOtFOaEFnFJSQlKSkrY15944gk8+eSTVu+1fKh8YWXsCGeDtSwtYssOMVNZ3tFSiCeivb0d99xzDwwGA5YsWYKkpCSoVCrs27cPf/vb33Dq1Cncfffdbo/FF1zTzi4IdDodZEIKQoHte0FRFAKlp1fczowBsN7btxRdtVrNRq/L5XKPRq9b4iv3YyowmUzYs2cPDuzdC11PD2A2g1IooBaJER8f79Z2WWhoKC696SZ88+67OF5TjdyQUMhEQhwbGMRhgx7Rc+cgISXF6e9zZGQkVCoVqqurWfF0Z05IT0+HRqNBTU2NR1J6mPrRe/bscboN40T4+flBIpGwBT88UQSIEWJmHmJ6y/u67jhiwuX3yMgIWltbYTabodVqodVqodFoMDo6ir6+PuTl5WHu3Ln4/vvvMTo6imuvvXaqxu0yXPeI9Xo9TCYTBgYG2BQhpVIJo9E45eUduVaSevXVV2E2m/G73/2O7dkLnN4L2r17Nz788EMsWrQIGRkZbo3lTLSIw8PDMajjo3dEhzD/8ZONRm9C+xCNdBst40ZGRvDTTz/h8OHDoGkaaWlpyMvLg1KphFKpxI4dO0BRFCu43gym8wZTtRCgaRrfbtiA9q3bkBfgj4ykJIgEApwYGMD3NbX4+qOPcONdd7nUlpChsLAQoaGhKNuzB9urqmDW6xGYmorzFszHnDlzsG3bNpfuZ3JyMhtJnZ6e7nZKDxMMplarJywewhUmr7i9vR2dnZ0eaZRhMpnYFCnLoDB3j2kZAHa2VN+aMGp627ZtuOeeexAdHW3VmF4ikWBgYAA33XQT5s6dixMnTqCjo2PKBu0OcrncKmraVnlHpVIJg8EAPp/PuhBd7RDjCbgUFzlx4gQqKytx2WWXWYkww+zZs1FWVoYNGza4JcS+ZBE7M46ioiJ8HJKA9ZVt+M2S5HFf2h9qOqETh2POnDlWno8tW7ZgzeuvgKfuRVaoGQYzsO1rPsRBMfjN7+5HREQE8vLyHLZuJLhPS0sLWnfvxsXxcUiwcHWmR0aCTklB+bE27Nq5E5etWOHWeRISEhAdHY3RCy7AyMgItFotRCIRBAKB3cpajqCo060OKysr0dLS4tb4gF+DwXbu3In+/n6PBOuJRCIEBgaioaEBYrHYrQUN8Gt5y5SUFGi1WlRVVaGkpMStSoUmk4ndvjmbqm9N6JqePn06HnjgAYSEhEAqlUIqlVo1bmYiAa+88krodLopGrJrjIyMoLGxEXv37oVWq8XixYtx7NgxvPbaawgKCrIq75icnAy5XI7W1lZQFIVUO/uKU4kjIT569ChMJhPS09Pt/j6zZzOZ45gqnBVikUiEa2/+Dd7994vQbmrBRQVRiA+RoXNIg001p7DlGIXSC1agra0NBw8ehMFgwMmTJ/HuG6txQbwGv1sUjWCFGHw+D13DBry0uQv/e+NV/Pb3jzrdi/ZsY6q2pOpraxFlNFmJMIOQz0deRAT2VldjdMkSzm7Qsfv4zN9Mq0amylhXVxc777n6WZl+vnv27PHId4gJ7Ovr68OpU6cQHR3t1vFMJhNkMhmioqJYd/JEda4dwexfUxSF6dOno6amhs3+cNUjMLZ2tWX1LeDMFeMJhTgtLQ2PPPKIw4N4wjUyWTz44IP46quvcPz4cYSHhyMqKgparRYrV65ETk4O5syZYzf4gs/ns6stb+MolYpZZWq1WpsWMXB6n9Rd19CZ6po2mUzIzs7G5TfejW+/+hTbvjgKGDUwgQ9hQDQWXHQRFi5cCIVCwW47/OmJx5EbosWfL00Aj/frlzsyQIS/rojFdf9rx86dO3HxxRdPxkckjGGwsxNpfnaEgaYRExQEuqMDw8PD44R4bOcoRnAt9/GZ1LCxZV/NZjOioqKwf/9+AO7th4vFYqSmpqKxsdEjLmCKopCYmIiDBw9CKpU6nQdvCSOc8fHxVjnGru4bW1bq4vF4nBpPOHNMBks39ZlaCtOmEJtMJpjNZgiFQuj1euzZswcbN27EsWPHYDKZkJiYiJUrV2Lu3LlTPV6nueiii3DZZZchKysLYWFh2Lt3Ly6//HI8/PDDDm+Wr5S4BBxbojk5OZDL5aipqcHixYvH/Vyj0aC5uRl33nmn2+PwZdc0U+JxbOAU0xs3KioK9z/yRzYXPiwsDNnZ2ePcZcPDw6it2IXH5vhbiTCDRMjDhRlivL9v96R9xjOJqZj4BCIRtBZbZJbQwOmfCfhsbMdYK9eyc5S/v79T+/h+fn7IyspCfX09VCqVW8F2EokEUqkUDQ0NkEqlCAwMdPlYZrMZgYGBkEgkqK6uRmlpqcseGssgsmnTpkGj0aC6uhrFxcUubceNDUpjGk+Ul5ejqanJrvduIuyV4WTmA6YU5pmWfTButMPDw9iwYQMyMzMxc+ZMfPrpp3j44YcRFRXF9vfcvn07Nm/ejJdffhnLli3z6Wjp8847z+r/7kRNexNHQuzn54eLL74YX375JRITE5GcnMz+TK/X47PPPoNUKsVFF13k1jg8YRHr9XpUVlaiu7sbcrkcJSUlLq3kdTodent7x0UqMyUeJ+qNm5mZOeGxVSoVQBsRGWDfgxARIIRWrfZqiVdf+N5N1cIsdcYM1B1owByTCUI+H2bT6RQW3S959nsbG9EXEozm5marVo3BwcFQKBR2UwZpmkZjYyN279yJE0eOgMfjITU3F3PmzrX6HjGev7q6OpSWlrpsKZrNZohEIiQkJLDiac+LxeVYPB4P8fHxVmlNruzDMsYX8Gud66qqKrZkpbPPmi3RtGw8IZFInN7bnqge9plcfWucEHd2duLZZ5/F5s2b0dHRgSeeeAK33347XnzxRfY9PT09eOCBB/DUU0/5vBCPhYma5jJmX6o1zWUsv/vd73D8+HGsXbsWCQkJSExMhEqlQkNDA/h8Pl544QW38/ksLWKaplFXV4eqqioYjUakpKRg/vz5E05QW7ZswZo1a9DX1wehUMgWsbjooovwm9/8ZtxK1l5v3NHRUYyMjLBuZMt8XIlE4vbzGBgYCL5IhuZuNfLjbbtDm3u08A+MPCvyGN1lMr//jJcjMjIS2+QyfLx7D0pjYkAB4PN5EAqEaO7rQ7tIiAtWrMCcOXM4T8I0TeOLL75A2fr1iNXpMDsgEEbajPqv1qP2559xya23Yv78+aAoil2UBwYGoqamBsXFxS7de0Y8ExISoFQqUV1djZKSEpesOMtFYEZGBqqrq13eh2WinBn4fD7y8/PZkpWOFq+2jmdrDHK5HIWFhdi3bx8kEglb6ITrMSdaZFjWpWb2j88Ext15oVCIgYEBJCcns8nYzz33HACwBeXDw8Nxzz33nJF7Y3K5nK145ciFcya5poHTq82XXnoJW7duxTfffIPGxkZIJBJceeWVuPzyyzlXBOIyjs7OTjz11FM4dOgQZDIZRCIRBgYGEBYWhsceewyzZ88e97s///wzXnrpJaSlpWHlypUIDQ1l96LWr1+PkZER/OY3vxnnWh6box0SEoITJ04gKirKI5/JFjKZDHOXLMdXP7+Pi3KCIBdbT+w9I3psbjah6Lx5k3L+cxHLSnSWCy9LL8fsZctQsWULNvX1Ik0uh1QkwvGBQTRq1Ljgppswb948pxYF5eXlKPv8c1weEooii2CnJTSNn1qP4tt33kFUVBQyMzPZBSjTEclWHWmun5OprDV9+nTW6szPz3f6WJZCTFEUuw/b2NiIrKwsp45ny7PDlKwsKyuDTCZzyoK1tLDHEhgYiJycHNTX16OoqIizR8xRhyjLVM8zqeDHOCH28/ODWCxGdXU1srKyMHv2bOzcuROLFy9m3Sc0TWPbtm14/PHHAZxZJS6ZwCy1Wu1QiH3JIuYarSwQCLB06VIsXbp0UsZBURSGh4fx5z//GSMjI1i1ahWSk0+nA/X19WHjxo3405/+hFdeeQW5ubns7xmNRvzvf/9DamoqVqxYAbPZDJ1OB5qmkZubCx6Ph2+++QYJCQnIzMx02Bt3KhqO3HDDKjywZzse/uI47poXgrw4Ocw0UHZ0FK/vGoQsOhNz5syZ9HH4EkNDQzAajQgODrYq5OMsBoPBSmwt62rLZDLWtcy0a7T0cpSUlODgwYNoPXIEowYDoiIj4a9SYdGiRU4JD03T2PnTT5hBUVYiDJx+zs9PTsGRmmrs2rkT06dPh9lsZl2e+fn5NutIc8FS8JjKVq7um9rah2VqXB87dszKte7ssRhkMhkKCgpQVVUFqVSK8PBwTscba2GPJSIiAunp6axHgEuENpdWjZbpnmeKGI8T4sDAQMybNw+33XYbXn75ZZx//vm444478OijjyIoKAgjIyOorq7GunXrcPPNN3tjzG7BiK9KpXIY7e1Ni9hoNOLkyZMwGo0ICQnxmbQhHo+HHTt24OTJk/j9739vFWgSGhqK66+/Hm+//TbeffddrF69mk0N2bNnD06cOIFFixZheHiYndCYXO2ioiLs27cPAwMDVgJuD1fbIDpDXFwc/rb6P/jHi8/hwW8bIOcNwGQGtDwZMvLOwx9+dy/6+/sndQy+AE3T+Oqrr/DOmv+ivq4OABAZGYUbb7mNDf6zJ4Amk2lcEwulUgm9Xs+mB8nlcnYfXy6XO5w0FQqFVdU/g8GAXbt2OW1NDg4OoqelBedFRNr8OUVRyA0Jxc/V1aBp2iqH2LKOtEKhQFhYGOfzjhU8kUjEBjEx18LVYzFjY2o9y2QyREba/nxjmUjkgoKCkJ2djbq6OhQXFyMgIMDm+8Yez9G9jI+Ph1ar5RyhzbWkJyPGZ0r1rXFCLBKJ8O9//xsrVqzApZdeipCQEAiFQjz22GPw8/Nj87QKCgrQ1NTkjTG7BY/Hg0wm4xSw5Y1gLbPZjD179mDv3r0YHBwETdMQiUTw8/PDBRdcMGmuWK5QFIXdu3cjMzOTFWGapmE2m2E2m2EymVBQUICvv/4a69evZ2tr9/T0QCgUIiEhwW5rtPDwcPT09HAex1QECaWlpeGt/72H+vp6HD58GHw+H7m5uUhLS8PQ0BD6+vomfQzehKZp/OWZZ/C/t17HkiTgzYt5kIso/NjUgdf+8Ry2/fwT/v7Sy6Bpml102SrzyQhuWFgYEhMToVAoPBbZ6mqte7PZDNA0RHz7YiHi82AyGlkhtjxHQEAAsrKyUFdXh1mzZnHOubUVnyKXy5GXl4fq6mrIZDLOxTTsWbH+/v7Izc1FXV0dJBIJp8hsR0GHkZGRVmlNjjyKXEUzLS2NFePi4uIJnwuj0ch5//9Mqr5l8xNHRESgrKwMra2taGtrg9FohL+/P6RSKYRCIXg8HgwGg0frkk4VTJtBLmUup9o1TdM0vvnmG5SVlSE5ORkzZ86EWCxGZ2cn9u7di48//hjR0dFey9umaZpNDZk2bRrUajWb6gacXuTw+XxERERAIBAgOTmZjbYcHR1l25jZK7YwOjrKeTJzVYg1Gg12796NA/X1MBqNiE9IwJIlSya0aJi9t7GWur0vNU3TbIqLJ2tK22MyFyTbtm3D/956A/9YysNvZorY8100jY8b8024bF0ZVq9ejauvvhoDAwNsRzEmPUihUEyZNeLsOQIDAyENC8OBk6cQJZZAJBKN29NsGhxETEEhmy0wVqiio6PZgKtZs2Zxut/2BC8kJITtrsQlDclRX+Pw8HCkpaWxY+NyPEcWbGJiopUYTxQ4xcWNDPxadYzpAlVQUGB3HFyPaXnsM6H6lt2lB03TSE5OttpjGBgYgNls9ukCHlzgahFPtWu6tbUVFRUVKCkpsarm5efnB4VCgZ9//hk//vgjbrjhhkkfi73euEz1nZ6eHvB4PHZhxuPx2Ad8ZGQEQqEQkZGR7GszZ86En58f9u3bZzPP+cSJE+jv72dbazrClTSqpqYmvPy3Z6HtacWMUCOkQgrb9wLff/EBrr31dy6ldpnNZhw8eBANDQ1Qq9Vob2/H0UP1GB7oAcUToGDWfKxYeQUnd7uvYTQaseat/yIvgsZtBQKYTEac1nwaAIXCaB5uzafx/s+bceutt2LmzJle2Ytz1SKur6/HkEqFbxsaIO/sQqhcjsCICETHREMikaCpvx9NNHDF/HnseWydIy0tDUqlErW1tZyilScSvLi4OCiVSk5C50iIgdPlOpme1Y7KS3JxJVMUhczMTE7R2VyOx8BUHauoqJgwCM6VblNnQvUtu0JsOdAjR47gf//7H+rr69lV47Jly3D77befUblawK8WMVfXtNlsnrL0rOrqakilUqSkpIz7mVgsxvTp03Ho0CEMDg66VUEHANra2vDzzz+jvb0dFEVh2rRpyM7OZnvl2uuNOzg4iHnz5mHjxo3Q6/Xjut2YzWaUlZUhLy/PqmqQQqHA5Zdfjg8//BAKhQKFhYXg8/mgaRrt7e1Yv349m7vOBWct4v7+frz0/J+RwmvDfdclIMTvtOWiM5jwecUpfPL2agQHB6O0tJTzMbu7u/HOW2+AVnZBDiWqmzohhQZLp4kwuzgGepixqW49nirbirsf+hMuuOACzsd2BpPJhBMnToCmaURFRTmdQ8qkB41NEdNqtdhXWY6Hixih44HHo0Cd/g8A4LLpIry+bxRdXV1eC4hxRYi3bt2Kii+/xDyKQn18PHZ2dyNXr0PQ8BCaTp2CKjAAdUYjMi9Yxj6TTLDWWJgmDEyaj6M2mvaOw8CkIdXW1qKwsNDudeUixBRFOXU8rhYsE53d0NCA7Oxsj4imZcGPlpYWpKWljXuPsxax5ZiZ3/fF6lsON2n6+/txww03QKlUoqioCB999BGuu+46vP766zAYDLjzzjvdKuLtDZwRYlt7Q5NFV1eXlRVpCUVRiIiIwOHDh9HX1+e0EDM1dUdHR/Hxxx9j8+bNEIvFiIqKgl6vR1lZGQIDA3H//fezVbps7dWMjo5i7ty5qK6uxrvvvosVK1YgPj4eFEVhcHAQmzZtQm9vLxtRb8mNN96I0dFRfPvtt9izZw/CwsKgVCoxODiI6dOn4+mnn+Y8mTsrxD/++CMEI214bFUypKJfv8hiIR83zIlFx8BRbPjqM8yaNYvTvR4eHsarL/0VgSMH8PvlqfiyfBTqIAP+doEMPLMBSlMvkpJTcXFuCP5v60m88cqLyM7O9ugev9FoxHvvvYd3334Tw8ODAE0jLDwS1666GXfeeee4raOx6UGWZR6Z9CCmAAbTrlEsEgGUxu7kx9wCb0alOvv97OzsROV332GBnz8K4+OxLHM6fjp8CPubW6BWKtF+vB06bRjmX3ABMqZPh0ajgUQimbDhg2W0skKhQHx8/ITjdSSeubm5DoWdixAzP2cisw8dOoTp06fbFU6u95ERzbKyMo+KpkQiQVFRESoqKiCRSBAXF+f2MRl8ufrWhK5piqLw17/+FVKpFBs3bkRYWBi2bduGN954A+Xl5bjvvvuwbNkyn2iK4AxcWyEyD6UzLhZ3EAqFdptnUBQFnU5n9wFiJliFQsFatbaCZurr67F582bMnz+f3YPm8XgYHR3FV199hTVr1uCVV16x+5BS1Ol2f6tXr8af//xnrF27Fn5+fhCJROjv70dAQAD+8pe/oLCwcNzv8ng83Hvvvbj44ouxZcsW9PT0QC6XY86cORPuC9kbhzNCvG/vdsxLElmJsOWxlmaH4sXtB9DZ2cmpeP6WLVtg6jmMR5ZGIcBfjL2Hu3HPTAGSQ4SgaQGOdmvQ29uD+IRE3LkgGtuPtmPjxo244447OI95IkwmE+7//X3YuvFrrJhmxGWLZRDwKPzYdBJvv/oCyvbswupXX2O7i9lKD+JSBKWopBTf1G7Ew3Nti93XjUYE/FK9zFs4K8S1tbXwGx5BQUEBAEAqFOLS7BycNy0d66r2YejwYZgGh2CqqsIPdXXYGBaO865Y6TDPVyaTIT8/H/v372fz3W3BRfAYYWciqW3l73IVYuZ4jLUpk8lsplw5WyHOskoW00bRneMxKBQK9jpKJBI2foOJUXHHC+ur1bfsCjHjVti6dSt+//vfIywsDFqtFjweD/v378fSpUuh0WjQ09OD1NTUM6q6ljMWMeC4D7CnyMjIwI8//sjmvllCURRaW1vZko2MdbN//358+OGHKCsrY1uEFRcXY8WKFUhMTLTqjSsQCLBu3Trk5uZi/vz5Vsf38/PDVVddhTfeeANbt27FypUrbY6RSQmIi4vDO++8g6qqKrayVmpqKhYtWuSwXF9iYqLbguSsEGuUowiOsx9IE+InAkxKzuVP92zfjDmJQvhLBThyahRmowHzEiXs2EIUApwcOZ1zKxIIUBrPR+OBWs7jdcSXX36Jn3/4Gv+7XIiFSadb9JnNNGbHS3BxugHXfrYdL7/8MlatWsVuLTA52c5MjrfdfgduuGYT/lOmx32zrS3sve1GvFcHXLnqSq8GbjobrNZ9/ATi5bJx89WGA/U4ebQVN4eEAkYjMlPTIFYosKe9HZvffRdKpdJhKlBwcDAyMzNRW1trN+DKbDZzssYYYa+qqoJcLh8Xm+Os0EmlUjblSiaTjato5YrIMaJZXV3N9h5253gMwcHBmDFjBmpra9l0KcY76a54WlbfAuATYszJNmcedLFYDLFYjNDQUCiVSgiFQp+pPOUMXC3isZv8k01BQQF27dqF7du3Y+HChawYG41GHDt2DIcPH8a8efNQV1cHlUqF2tpavPPOOwgKCsKyZcsQHByMzs5OVFVVobW1Ff/973+tXGRNTU3o7u7GokWLbJ5fJpMhNTUVZWVlEwpxe3s79u7di+PHj0MqlWL27NlYtGjRlE7GzgpxaFQsWrrb7P68uVMJSiTjXAJ0eKAPsdOkAGjQ9PgFqFjIB2gjjEajR1xgZrPZqnvQW2/8B4sSTFiYKAJowGQys89raZIE18wwYeuenfjb3/7m1kQzf/583HP/Q3jqX6uxsUWLq7N4kAmBzS0mbDhCoXjOAtx8881ezXF31gjgCQUwGK2/0yeHhtDYchSXBwcjXS5Hw+AQKB4PcpEI56emAUdbsGfjRly6apXV7w0NDWHfvn3o6+uDWCxGTk4OUlJSMDo6ykYr2yrbylVAg4KCkJWVhdra2nEpUq5YnAEBAWxFq7H5wK5asCEhIZg+fTpqa2tRUlLCZkW460mMioqCTqcbF7jmCSEGPGNhewqHM0RmZiYaGhowOjrKuiDfeustHD16FKWlpWz90TPFGgZOC7FGo+H03qkUYqlUiksuuQQff/wx1q5di8jI03WMu7u7MTo6ipSUFJSWlrIP+pNPPomMjAysWrWKfZhycnIwb948/Pe//8Wf/vQnfPTRR+y90Wg0oGl6whQhhUJhNzfWbDZjzZo1+PrrrxEUFITo6Gio1Wps3boV7733Hv72t795pEE5F5wtcLJg8VJ8+NoetPWqkRhmbaVo9SZ8Vz+I3JKrOO+9+weGoHOoB4hTID3aDzyBELvbDLhkuhiggWG1HmqtCSdPdoDmCbGjWYMlq3IcHtdWbW2mzCMAtltQW9sx3Hu+CEKRkM2RtGTpNBE++voUent7ORd0sMfjjz+O3NxcrHn7v3jwx3IAQFJSKp545nbcdNNNOHXqFOfv02TACHF/fz/a29uh1+shlUpZj9BYUtLTsXfPXuiMRoh/Ecnajg74GfSY4e+P7pFR8GRSqzS7OQkJ2FFTjdbWVixcuBA0TWPz5s3Y/Nln4Pf2IVLAx6jRhF0yKRKKinDLHXdAqVSirq4OBQUFVvOjs4LHpEjt378fpaWl7ALdVeGMiIhAamoqu1CQSqVsUKqrwhkTEzOudaIrEc5jYdKlqqqqkJeXB8AzFixzP3yl4pbDqOkVK1Zg586dOHXqFNLT01FYWIjvvvsOM2fOxCOPPHJGpjJxdU0Dk5NLbKvaENOMXCQS4ZJLLkFbWxs6OjrA4/FQXFyMhIQEyGQyNiji008/hVKpxOWXXz7uwZTJZLjooovw3nvvYdeuXUhMTIS/vz8iIiLA5/PR0dFhtzLOyZMnkZiYaPNnH3/8Mb777jucd955mDdvHnvevr4+fPLJJ3jsscfwzjvvuNyGzRmctYjnz5+PHVvn4rlvt+O6mYGYlxECkYCH+uMj+KS8G32iJPzumms5H690wXnY+lEdFqUZkRwmRmlGBD6sPYH8aD6Meh16h7WQiXgQ6vqxdr8RjR0ChNZUYXDwV7E3Go3jIpWVSiWMRqNVmcfQ0FC2exDTfIDP58NgNgKgfvljjeGXtaOnJprly5dj+fLl0Ol0MJlM7FgYvLkQNxgMqK+rQ0dNDSRKFRQ8Hk7SZhz080fyzKJxzRlycnJQHh+HHxobcXFWFoR8PkZ1OoTy+FDp9OjUahCWng6BxffKTyyG9JeexgCwc+dObHrnHSyUyDA3Lx9igQA0TaN1cBBf7NiJ/xmNuOeBB1BZWTmudKUrApqWlgaVSoWamho2Tcydrl9MQxgmrYnBHZFLSUlhxbioqMjt4zFkZGRAp9PhwIEDbI1uT+DJY7mLXSFmbvA111yD888/n524n3jiCWg0GjbQ4UxELpdjaGiI03vdqa7lqDcuU23IVjPysb2eu7q6cPLkSfb/Bw4cQFxc3Lj0IYa4uDgYDAY88MADCA8PB5/PR3FxMWJjY1FeXo5p06aNs6KOHTuGnp4em/u3Op0On376KQoLC9nUI4bQ0FDccMMN+Pe//42tW7dOSTMQZ4VYLBbj8Sefwv/ejsRbe37C23uPgUfRMPLliE2fiyd+e++Eka5jOf/887F983dYvbkGD17sj3svSMUj7ytxw8c9mB1jwKxYHiAUY3ONGU0DEjx4QSwq2ivw5ycewx13/c5q4cU8B1FRUaz4TjSB8fl8zCyehW8P78DN42PiAAAbGvVITpnuVOlFLtjafvB2f+rK8nLoGhtRMnMmYhMT2RzzYz09qNu+AwKBgBUG4HQ8xOU33oiv1r6Ht2prkaFQ4OTQELqGhzFdKkVAfPy4Z0Gp10OD04tcg8GALRs2oIjHxxKLVEOKopASHIxVgnS8ua8Khw8fZiOLFQoFGzHvSjwNkyJlmWfrjhAzDSf279+Puro6ZGVlAXBv4UZRFLKysthjuns8y+NmZ2ejoqICgGvXz9dx6Jpmqif19vZCq9VCoVDAz88PBw8eRF9fH9LS0jhFmfoScrkcnZ2dnN7LxTVN/7JSthWpzEQZKxQKu71xuY7D0jKfSIg0Gg26urpgNBqRl5eHpUuXore3F5WVlRgcHIRQKMSHH36IefPmISkpCVqtFnV1dSgvL0dxcbHVpMVQW1uLwcFBXH311TbPGRQUhKSkJOzYscMnhRg47Xa//8GH0LvqRjQ0NMBoNCIxMRGpqalOf7GDgoLw4GN/xjN/ehxPfj+ASIkWfLEMzQMU1AY+yrpE4PGA3PhA/KE0GLlxCuTFivDcz7Xo6+tDYWEhFAqFy6l/N958K+777S68U6nBjfnWX+OvG7T4oYXCn5+/7aybsMYyMDCAzoYG5IaEIs7CO8fj8ZASGQmTyYSGqipMnz7dylOTnJyM2x64HzU1NWiqq4dGKED7iRM4ZjYjpb8ftfv2ITgiApERERCLJag8cQJUaChSUlLQ3NwMZftxlKbYzhaJ8Q9AnMmE6v37kZeXh7y8PNTU1LD9sV0VUD6fz6ZItbW1carLPRFMWlNFRQWam5vZ19zBMlXKE8dj4PP5bJWwxsZGuylYzuIr348Jo6Z5PB6+//57vPXWW/D394dGo4FOp0N/fz8oioJarcYTTzyBm2++2Wc2vbnAtcQlYO2attcbl+lvzJT3UygUiIiIGNc1xh3GCnFeXh6+/fZbDA0NWdWRpWkafX19OHnyJHg8HhYvXoy4uDjExcUhJycHn3zyCQYGBhAcHIxvvvmGzamTy+W48MILcfPNN9u8j2q1GjRNw9/fn402HIufnx+USqXbn5ULrlTWYggLC7MbsMYVvV4PPz8/3PqbuzEyMoKDBw/i+KFDSI8dwtqbEkAJhJCLhZBJRRDw+eDx+SgNppG8/yiOHTuG8847z63zL126FLfddR+eevPf+OqgGiuyjBDwgE1NJuw5QeGyq27Adddd59Y5uOJJC0WpVKKvr4991hwFz7W3t0OsVCIy0PZWS1JEBA4eOYITJ06M62wUHByMJUuWoLS0FB+9/z6a9+9HWU83wkNCkSiRoP/wEXSe6MBocCDKTSYUXnwx5HI5lEolaIMBIRNswQSJRFCOjAA47TFiRKS0tNQtS9ay2UR8fLzbQicUCllxBzwjTEKhEFlZWaisrERbW5tTHaAmgqIoSCQS9PT0QCqVeuy4voBDi1goFLLNm0UiEZv+sHv3btA0ze5Z+sqmNxcUCoXDPWKmTZter8fJkyfR0dFhtzcus383mddgrBBfcMEFeO211/DVV1/hxhtvZC0rlUqFkZER7Ny5EwkJCVb7vQKBAMuWLcOaNWtwySWXICYmBidPnoRQKMSMGTPsurmB0xGMPB4PJ06csBn8Q9M0Tp48OWVbFlPV9IHZzx8bPMXUWufxeMjNzcXs2bPx888/Y/dnzYiNtt0mjqIohMopzvEJE0FRFB577DHk5OTgP//+F/66twVmM43c/AK8/NgtuPDCC6f0O+nuBK7T6VBXU4PelhZQo6OgAJgkUvgnxCOnoMBu0wK9Xg8ZjwceZfuzCgUCiGjYzc8HgI0//ICRykq8eN752NZ0BNtaj0E6OAgFTePQqZM4JZPhhgfuR2pqKn7esgUDnZ1oPXEclWIxclNSIJPJrY5H0zS6dTpEWzRuSEhIgFKpRE1NjVU5WFcICAhAdnY26uvrPRKPIZPJkJmZifr6evT09HBuczgRQqEQfD4fR48ehVQqtaqy5yomk4mdqyorKyEWi73eBMdTONwjXrFiBVasWDHu5ydPnsTq1atx9OhRzJ49+4zy28tkMjboYmRkhC3SbznhMm3azGYzJBIJoqKi7PbGnQrGBo3J5XL8/e9/x4MPPoiXX34ZM2fORHBwMJqbm7Fv3z74+/vj5ptvHndPwsLCEBAQgNbWVixYsIBzMZb09HRMmzYNO3fuxBVXXDHu54cOHcLAwIBL9ZpdwdNCzKQHjfV2aDQaCIVCdnshPDwcycnJkMvlMBqN2Lt3L9uPOSEhAd/r+ega0iEycPxeqt5oxrFBGotsTHQ0TaOpqQmHDx8GcDpAZdq0aRN+pyiKwsKFCyGVSrFw4UL2tTMNg8GA8t27oT98BDPCwxEZeXrR1z8yguYjR1A5MoLSJUtsNguRSqUYNRphtvMsaPV66CjKZm672WxGZ2cnarZuxSKZHDytFvMio5Aik6NlYABGAIvFYtSplKitq0PVph8RODKC/KgoHKdpbK2thamnB2kzZiDCopViy8AAuiVirLAIgmL2ZPft2welUml1n4aHh9HQ0AC9Xo/IyEhkZGQ4vI+RkZE4deoU+vr6oNVqJ+z7ywWmzkBdXR1KSkomXJRzgalpwOQCi8Vizh2lJjomn8+Hv78/6+5n0mldxVe+Ly4nOMbExEChUOCjjz7CjTfe6Ja7ZSrQ6XQ4cuQIGhoa8M0336CrqwtpaWno7e3F559/zlq2TCNymUwGoVCIgwcPQqFQeGRF5w620nVmzZqF999/Hx988AE2b94MvV7P1oj+wx/+YNOKYEq8uVI4/Xe/+x0eeeQRfPrpp1i+fDni4+OhVqtRVVWFnTt3YuHChcjPz3fnYzo1HleEmKZp6HS6cWUeVSoV66Jn9vIs9/ONRiPbXYphbOzAzJkzIQ2JxxeVx3HP+QnjvuQb63owygtiRZPh5MmTWP3S39B6sApi8+ktEx1PjuSsIjz06OOcVv3enFDcXRCdOHECquZmzE5KgtxCUEL8/REgl6OsqQlNTU02q7UlJiai0t8fJwcGMM3GsZs7O0GFhSIiIgL9/f3j7nlbWxu0nV1Izs6GVCJBQEAAYmJiMMviPp/ctg01VVW4NDwC86ZPR1RkJGIC/PHp3jLU9vRAWVuL2aWzIZXLUd/VhR86T2Ha0vPHucKZxgbbtm1Dd3c3goOD8cnHH6P8x80w9fWBR9MwSyWIyMjADbffzqaG2iMkJATDw8OoqalBcXGxWwYCU2QkLi6OTZNyR9wZ0QwLC0N6ejpqampQUlLCubvaRMcETrv7mdzl4uJitxcO3sahEJtMJgwPD7OVSIzG00UK6urq8MMPP7Ch776ysrDFtddeiy+++AIymQwzZsyAn58fJBIJnnvuOeTl5SE6OtruImKqOzDZw17ebFpaGp599lk8/fTT0Gq1aGhowJ///GeoVCqbQnz8+HGo1Wrk5DjOaR1LQUEBnn/+eTz33HN477332AlYKpXiyiuvxN133z1lizEuQjy2vCPzt2V6ENPU3TI9iMFkMmHr1q3Ysuk7nGpvASgKGTkzsWz5RVYBbYw3SCwW44bbfou3X3kOxk1tuHxmJOJDJOhXGvBDbQ++PmTC8mtvs1rU9fX14ck/PICA0cN4bmkoihJPRzlXtY3i7V3b8eQfuvDPf73h82mC7nz/T7S2IkIotBJhBgGfj4TgYBxpboE+O3tcxTl/f38k5OWh7ttvEdXRgcSwcNAmE4ZHR3HoxAk0qJQIzclBVVWV1T0PDw+HXC5HQEAAunbtRlR4uM3PQNM0Wnu6kS0QYHZcHAx6PWiaRn50DIyzSvB9XT3KT3Zg866dCIgIhykwELmXr8C1119v83gtLS3YsH49muvqMDw4CEqnxyXJSbg9Nw9yoRBtw8PYeKABrz3/PB58+mlMm2ZrefHr2JiYjQMHDiA3N9fl+8CIXHJyMtutyVFv4ImwNMzi4+PH5Ri7M0aGmJgYtrogkw99puLwKjc1NeHxxx9nA3H0ej30ej16enoQHx+Pe+65B4BvlAmzxxNPPIF//OMfiIuLA0VR2LZtG2666SZcdNFFDh/cqe5JbA9HBSz4fD7kcjlmzpyJxMREfPfdd1i1ahXk8l/3r5RKJX744QekpKS43JZv5syZePjhhxEUFITOzk5IJBLMnDnTbl7yZGEpxGazeVzwnFKptEoPUigUbH9cLtsLBoMBr6z+Jxr3fI+SaBori/yhN5mxu2kjXvvrTlx83V3slo3lgmDx4sXg8Xj4+P012PlVK3hmPcyUAOKgGKy87bpxbv2vvvoK6DuCf9wQh0DZrxHUM5P8kRYhxV3rjmD9+vX4zW9+46Er53nctYg1w8OIltrf6/SXy2Ee6IdWq2WFmMlUUCqViIyKQltODn44ehRUXR0kPD7MIiGEkZEoWLQQhYWFdlPCoqKiQPsp0NrfjxQbi51BtRo9Q0NYkZMLmUyG0dFR9Pf3IzQ0FDNj45AbFYUPqqtRZjLh2vvvR3Z2tt2UsR9++AHvvPwyAnt6cblcjhGNFsMUheqWo9AbTXiguBhJgYG4KzcXr9fW4ouPP8Yfn3rK7hzFlHucMWPGhB2LuMAIJ5OCVFVVhfr6eof1te0xVjSnTZsGjUbDCrwremErIDg5ORlarRZVVVUO20b6Mg6bPpjNZvT29kImkyEwMBByuRx+fn5ITk7G0qVLkZiY6PP7w2OtP7lczjlgxpcsYpqmHW4B8Hg8PPnkk3jiiSfwxhtvIDMzE6Ghoejt7UVjYyPCwsLwxBNPuHy/mDKKOTk5Nl2Fk4llmlh/fz9GR0dRXl4OjUYDHo9nFUAXHx/vVnrQt99+i8N7vsMTS0OQE//rIuP87HB8u78TH3z8lt399YULF2Lu3Lmoq6tDX18fu6c1dsVuMpmw/cdvcVmGxEqEGQJlQlyQIcE3mzbgtttuszl5+fL3jitCiQTaUfvR9kq1Gmqtjs2lZxZcAFiBnT59OpIuugiDg4NsXEdCQsI4Cxo43b6yoaEBPW1toHg8KMVi/NzcjNjAQLbSFsOe1lZAJEL0L3WZpVIp1Go1RkZG4O/vDxFfgNyoaHQY9CgqKrL7vLW0tOCdl1/GUorCkmnTMDQ4CINcgXi5DEdNZrzW1oYNQYG4MiMTfB4Pi+Pi8E5NDTo6OsZ1IGJg5gKxWIyCggJUVFS4vI1mOa8wLnSm+5MjF7ktxpa3ZHKB9+3bh/r6euTl5Tn97NoSYmbvvaamBtXV1SgqKnJK5H3l++OwslZWVhb27t1r9wBHjhwZtxfi6zgjxHw+H3q9fpJH5BjmoeZifSQkJOC1117Dxo0b8dNPP6G5uRlBQUFYtWoVLrzwQrd6GTszDnewVeaRSRNj8if5fD5SU1OhUCggFos99qUymUzYuulbLEnhWYkww8UFkdjT0owtP21G0cxim9eC6XYzESqVCmrlEFIj7LvU0iKkUB8ehkql8ul9MHeufXRyMo63H0eywQCT0QitVgONVgutRgutVovatmPQpaSwdQxCQkIgl8vZTIWenh4cP34c0dHRDmsa7N+/H/s2boJieBiJfn6gQaNfpUJZfx9Gdu7A4rRpiPL3h1KnQ23nKbRJJIjNysIppRKREsnpfc/wcHR3dUEoEoEvFOC7Q4dwyGzCU/fcA1A8JOZkY8GSJVZis3HjRoSMKnFNfj76+vqg1+rgJxKBx+MjlaKwSCrF1pYWXJY2DUI+HwkBAaA7OtDf329XiC3Fzs/PD7m5uaitrYVUKrUbZW6PsSInFArZYiRyudypYjeA7V7ETB60qwJvL0XWsj+yMy56XxFhgGOw1thatsePH8eOHTuwfv166PV6fPfddz4frGWJXC6HVqvlVAvVl1zTAPeOJiEhIVi1ahVWjSlS7y7Mw+upa8K0bBy7l8sEnTFWLtMfl0mfY6yjydg77erqwkjvCczKsx3lSVEUZiUr8EXDfhTNLHb5PFKpFHyBGD0j9nPau0f04AvkPr3/5axHjOkcZrnAalaOouunzciIjkHALzEcMrkcg3odgnJyMOuCC+ym1ZjNZk7nb21txb7vvkO+UIQ8i8m6ODkFyQGB+La9HT8oRyFVq0AJBIicORNXzpuH5uZmVK1bh2S5HIFSKaQSCYKDg3GysxOfNDfhcOsxFEdGIH1gECYAJ7ZswdrKSiy9+WY2i+BARQVKAgLYc/IFAhjMZlB8PrpHVAjU6nBwZAgP/vgjlqWlIS04CBAIJrzvY+fcsLAwq3xlZ54ZW/O3TCZDQUEBqqqqIJVKnarSZm+eEolEKCoqQllZGWQymVO16U0mk939ZT6fz7Z5dNWK9yachFgoFEKtVqOhoQF79uzB5s2bMTQ0hLCwMLbS0pkiwgDYfVMuVoYvuaaBqWvJaA+mPutYK5CmadTX12Pfvn3QarWIiIjAwoUL2S+vZXqQpeBqtVoIhUJWcCMiItj0oIkCRRhX/WTAHJc3weTO41HAL+9zNA4mDSogIMBqQSsUClEy/3z8sHsdLs03Q8C3/g4ZTWZsPKjCrAUrfH7vy54QMvn4Yz0bTF9kJljugpUr0VRfj77e3tMR6ioVhmgz6NBQ5M2a5TC3lYsQ19fUIFKjRX6q9T4qRVFYkpUFpckEYUkxZs+dC5FIhMDAQNA0jZiYGBxtbMT7u3ZhdmgYSqRSmAR8vN/YiMbmJlwUGIR0GlD090NPAyECPvw0Gvzw3nuYNm0a0tLSYDaZILDo+hMYFISO/j4M9/XCbDDC32xGAA2EjY7irT270W0wwj8yAl9+9hnOW7YMRUVF4+ZYW+LJ5CszNaS5BlvZM6SY7k9MWpOtFDJbTNR5aazAc81bdmSEMCLP9Ee2VzPfF+F0lyoqKvDDDz9g9+7d6Orqwpw5c3D//ffjvPPO80iLt6mGEWK1Wu1QiN2pNe1JGAH0thAD4wPHBgYG8M9//hOHDx+Gv78/pFIptm7dig8//BDz58/H3Llzx5X7DAoKYnvk2trHc8RkFvSIiIiAPDgK+44eR2aM7Ymn8pgSqdMXTnicuro6fP7pJ6ip2AmYjRDL/LF4+WW45ppr2AXKyiuuwOO7fsIL353AvUuiEaw4LbgDSgP+b+spdNLhuH/l+LxtX4LpE8sIraXw6nQ6iMVidqE1UV/kxMRENjeWpmmk+vsjLi7OYRoNF4tcp9Ohq6kZ8yaw6tJCQlDR0oLQFSusxiaXy3HX73+P/4pE2FJfj7KmJphoM/b2dGO+WILSwCAkhoX+UlSExohWB8HwEA40t2DXjh1IS0tDZFIS9jQcxBy5AlqtFuER4WgyGiHV6ZAhlWGzWgUpn4cTGg0CDQbM4PHgp9Fi8IeNeGnzZhQuX46HH33UakFmq68xs2fqbLDVRMLJdFlj0pq4RD078jYGBQUhOzsbdXV149oxTjRGR95AmUyGwsJCVFZWQiKRuN15bKpwqKJarRalpaUAgMceewxPPPGE1arIVhN7X0csFoPP53PaJ/YV1zTgO2NhFgQGgwFDQ0N4+umn0dPTg2XLlllZwAcOHMDWrVsRGRmJyy+/fFx6kLtjmCwhFgqFWLj0Yvz00asonaZEWqR17uPWhl40jchw3/nLMDo6anMcW7ZswasvPoU02RAeKvFDqEKIw10D+Hb96yjb+TP+8cp/EBMTg9TUVPzhqRfwyt+exY3vd2BG2On729DLgzAwFn946inORVemAiYP21Js+/v7YTQacerUKXah5UrAnEAgQLyNhgtcxuQIk8kEmE0QTWA4iAQC0HqdTevQ398fFyxfDvqCCyCVSnH8+HE03HcfzouMRJhUCoPe8ItAUfCXSJBGBSH2ZAfKt26FSqlE8/79ONrTg+9UasyQiDEw0A8hRSFKIkWbTofvlEp0m4xIoij8JigEUqkEJ2gaGcnJaNMb8K+v1uPvZjOuuuoqJCYmssWGbImnZb3nsZ2f7OFIOFNSUqzSmhwJIhfRjIyMtEprclQljOu2XEBAAHJzc1FXVweRSGS3kMgZtUfM1Db19/dHT08PXnvtNUybNo1NkznTRBg4fQO41pv2Fdc04HwPXk9hMpmgVqvZyddkMqG2thYGgwHNzc1obW3F9ddfj6ioKAgEAggEAlAUhcjISJjNZmzduhVXXXWVRx/8yS5xuWLFChw5dBDPfL8F8xP7UJgY8Ev60iCqukVYvOIWFBQUYMeOHeN+t7+/H6+99DyWxyvx0NKE025sALNS/HFZnhEPfH4Er7z8Ev65+lUAQHFxMd5+/xNs27YNhw4dAgDcnJmJRYsWcSqAMFnXgdm/H2vlMnnYjJXL7Oenp6d7ZXLj0kdXIpFAGhKKU11dSLATV3BqaBAByck2rUzmPEzGSGtrK2i9HqEKBWQSKdRqFfh8Pvu7fmIxeEYj9ldXI7C/H/fExKFWIMTWxoPoUo4iTqWC0GRCi0SCHTotRqUSRBgMuCs0HCFiMcQSMbqGh3GyfwCVGjV6O0/hq//8H/b/8AP8o2Nw3mWXIjMz0+7zIRKJ2D1TxhMxEY5ifCiKwowZMzhHPXPtRcz0G2bEeKJFmzP9DMLDw5GRkcH2XHankMhUwMmvXFlZie+//x7ffvstvvrqK/B4PMjlcqSmpqKkpAR33HGHT60uHMG4SLkIsa+4poHJF2LL3EzL4Cm1Wg2BQMBaO3w+H0lJSYiIiEBNTQ0SEhLsFmAvKirC2rVrceDAAY+mO022m14sFuOxPz6JjRsLsG3z9/h513GA4iEubS7uWHUJ5s2bx753rBBu2rQJQm0v7l4Uy4owQ5BcgDtmB+Hpn/eitbWVvW4KhQKXXHIJLrnkkkn7TPawVd6T2b8XiUTsfbdX5vXw4cMQCoVemwO4LER4PB6mFxag7osvkKFSIUhuXR+6b3QUrXo9in9pPm/vPMxnlMlkMAmEaFKpkSRXQCKRQqNR/xLRzwdN0ygbHUWIRIy7p2chTCZHfmQkEgMD8GVNDXYPDmJUpUIyBVwwYwaODg1C1tePmICA04WTDEbozDTeaW6CWa/DzQo/iM1mxAUF48jwEL7/939Qlp+Hhx591O545XI58vLyUF1dDZlMNmGJyYlc05bXkElrcmRpm0wmTkYaRVHIzMxEdXU1ampqbO6FWx7TmdSkuLg4qxxjd8uATiachJjH47GThNFoxNatW7Fz507U1tbijTfewA033DAlzeA9iWW96YnwFXcw4DkhZrpI2SrzyKQHyeVy+Pv7Izo6GnK53Co9aGBggO2drFQqJ9zfYdq+ce12xZWpaPogFouxYsUKXHrppRgeHmZ7SFtaSJZ/Mxw+1IjCKEAutj1plKb4g/fjcRw+fNhjHWSGhoawfft2KJVKxMfHY86cOTYnLSYtbOx9BzCuXacz+/feLrFpef7+/n50d3eDoihERUWxqTy5ubloa2nBDzU1yAoMQmJoKGgAx3p70DA0jLCSYsyYMcPueSyjs0NCQhAUGYldfX3I9FMgXiqD2XzacySXK7BjoA/H9HpcERODsF+aQlAUhYUJiUjh8XFAq8WaPbtxfXAIlqSk4G9VVZDzeAB1emtEqdXgp9ERaM1mPBUdjQiBELXDQ4iRSpEbF4ecwUH8pbwC5eXlE25dhISEICMjAzU1NSgtLbU7T5vNZk5bCJaWtkwmmzC1iqtoWqYfNTQ0IDs72+bz5EqHv9TUVLb61tjgNV8yHp2OtBIIBFi6dCmWLl0KANi+fbvPR3SOhfqlCPy54Jq2dC9a/m0wGCCVSq329Lh2kbK0RkNCQtDW1mY3YKanpwd8Pt/tgu+2xjBVDel5PJ7N3Gt7X2Qejw/TBLfpdHCTZyYCg8GAdevW4Xd33Q69RgU+RcMEHqJjE/DwH/6IkpKScV2jmLQwhUKB0NBQNi3M1fFM1X2Y6PwURWFoaAhbf/oJnQ0NoEdGAQqgAgKQkJ+PRUuWQC6X49KVK1ERE4P6qirsPnIYBqMR4tBQFFxyMUpLSycMPrV0gaenpyNrZhGO/PgjXj3agiy+AClSGQwUcLirCy08CkK5DNlj0nOYsS5JSsLHdbX4sacHirIyGAYGUatVQyVXQCqR4JROhwadFpcGBCFSKMKIXg/wf01nyggKQh6fj93btjlMUYyLi4NSqZzQ/etM+qlcLkd+fj72798PqVRqM4XQ2XRWJu++rKzMboUwV4SYqRTGWNyFhYU+meHjlBBXVVXhwIEDMJvNMJlMWLly5bgC9mcKXIt6nCkWsdlsttrHtZUepFAoEBkZye7tuVqW1HIc8+fPx9atW3H06NFxK3OaprFnzx5ER0dj+vTpLp3LHlMpxM6OI2vGDHy84yuMaIzwl47/iu1sGgEt8pvQ+uICTdN44o+PY8ePG/DwHD5uzBcjRArUnjLhlT2teODeu/HYk0/hvPPOs+oadSZmOjhCrVZj/UcfQdDcjMWxsUhMSoaZptHa24vKrduwob8fV1x/PcRiMeLj49HR2oqh7m4ITGZIjEacam1Fa0TEhO5WS4tYr9dDB2BAq4MfgCajEXUD/VCbTRimeFAp5BAHBKCjrxfq+HjIfinhyTwrbcPDGNHqcFStAr+3B1kSKRpGjfiutwcZYgl6JRLwRWKki0QwmUzo1KghDw62armY4e+PL44d4xQ1zuyX1tbW2hQjZ0UuODiYbbpgaw/WFdEUi8VW6Udj97Vd7XnPBK85sri9Cedv5BtvvIG3334bfX196OjowMyZM7Fhwwb84Q9/wIIFCyZzjJOCM3vEXEpLTgU8Hg9Go5Et82hp5arVanbvXi6Xu50e5GgczISSnZ2N4uJifPfdd5g/fz6ys7MhFAoxMDCAXbt24eTJk3j44Yc9fu18RYiB8Rbh0qVL8fF7b+OVnzrxxIUxEAp+/exdw3qsKRtC3qxL7Lr17MHk5DL3vaqqCt+u/wL/vpCPq7JFbIpbYRwfH1wjwk2fq/Hxh+/hrrvumtRa8N4ucWs2m3Ho0CGYmpqxYsYMyH553nkApkVGIjIgAF8caEB9fT2CgoKw/auvED2qxNykZATJ5RjVatHY1oZdHSehv+xSZGdn2zyPpUX8+SefQNbWht/OnYeatmPQDA9DbtCjZmAAeq0OiQI+RHIFdh49imSNFompqafzWmlAazTizYpyzKBpzMrKwmEa2N3TgxGREP9TqzDHaEK2nwKUQIABkxGHhgZhlMmRmZQMy8usMRohkHPzZDDuX6bgxdiFsSvzW0xMDJvWNLaZA5c9Z1soFArk5+ejuroaEonEytp2VYiBXy3u8vJyNDc3O2wxOtVwEuLt27fj+eefZys1XXLJJXjwwQexe/du/OMf/0BSUpLTKQfeRiaTcbKILStaTbUQ2yqG0NTUBJqm2U4ycrmcbd0okUim5OGydE1TFIUHH3wQa9aswY4dO7B9+3aIRCLodDqEhITg/vvvx6xZszw+hsks6OEMtq53YGAgHvvzc3jhqcdw6/vHcVGWDGF+QjSeUmNzsxH+Cfl48GH7QTbMnrrlfWcarjA5uQqFAuXl5UgKAlZmCSAQjon0BfDgXDEu/KAde/futQouO9swGo040XgI5wUHsyJsib9UilSpBA37qiDg85Co1mB+Vhb7c7FQiPkZmZC1HkXVli1ISUmxuZfKLDh6enpwaO9eXBYTh5yoKCxJTUV95ym8W1GOXLkC1yanYkivAz8iAl/zeNg70A9tox5CoRARERGo7OnByZ4e/DY8AoVp6bgkIAD9Wg2GdTpUdXfj+5Zm7O7qglbAx2aVCTfGxSEqNBRyizHRNI3K4WFkL1nM+ToJBAK2xKRcLreqauWqoZGamgqVSjUurYlr1LQtQkJCWGubKSJiNpvZJheuIhaLUVhYiIqKCkgkEo/FZ3iCCYWYWYH85z//wRVXXIG///3vAE5v2EdEROA///kPpk2bhoaGBsTHx/uE1cgVrq7pqahoZTKZbJZ5tJx45XI5JL+U1ktOTvbqdR4rgmKxGPfccw+uvvpqVFVVQafTISIiYsIi+O7iyxYxcLpX9Mv/twbr13+Fd7f+CKNei4CQJFxy20pcdtllCAgIsMrJteXdYO57aGgoEhMT2QA5hv7eHhRE0uMisxkKYvigaC1OnDgxaZ+dwZvWhUajgVmtRvgEdaYj/ANQfrQFEVIZ8lNSbL4nJz4BRxoPorm52ao7mWVZVx6PhyNHjkAwPIzpSaePI+TzQQOQ6vT4XWISwsViNPT2QCAU4vLUVHzX1oZPuruwtawMiQkJWH+sFdNFYuRMn84GOoZIpAiRSJEcEIjLklNwV0UZ4ubOxbHycvSEhCBOJsPw8DACAgNBg8anLS04LhHjtmXLnLpWMpkM+fn5qKqqYp8ty8/mLJbNHCzrPLtjvQKnrW3LtCbmWO56dhQKBVvVSyaTcer1PRVMKMTMBNPT04Pi4l9r6kqlUvT19QEA2xrsTMMZIWYeLHexTA+y3Msdmx4UFhZmc+JtaGiAUCj0+mLHXupQWFgYli9fPmVj8KQQq9VqDAwMQCqVIiQkxKlx2CMtLQ1/+MNjMD/yKDQaDdsfuaurCy0tLWxOtlQqZa1cZ7wbfv4B6Gyz/56uURo0KM5lCV3F3n2gafq0+/ybDTh5/BgkMgUWLFqC5cuXe3RMAoEAEPCh0unsvkel08FA01DQNPzs1GAWCQQI4vExPDxs8+eMRWwwGCCiKAgsRKHu1Ckk83gI/8U9KwAFiViCqOAg/NY/AAdUyXj7yBEEFhYgUiZFbF8/QkNtV/kS8fkIkEqxdOlS9Obn4+O1a7GttxdpPB6oDgEOGHQYCgjA+VdfjYyMDK6XiYUpW2m5v+uOcDLNHMrKyli3ryc8iCkpKawYMwsjT8x9QUFByMnJ8alYCU4jiYyMRH9/P/t/Pz8/DA4O4osvvgCPx2M31X3J5+4Ipp0ZF5wN2GLSg8ZGKjN70kwxBCY9iNnHPVt6I08FnhLi7u5ufP7ZZyjbvgkmrRLg8ZE8vQCXrbwKJSUlnI5hOY6JamozvZHlcrlTvZGB01bfiRMnQFEU4uPjIRaLsfzCC/Hwxg043GNCdoy158FoMGJNuQo6A4V1776FpiNHcOlll7ncr9ZZjEYj/vLM09i+8Suk+OuQF8VHX6cZr/9tCz589228/NrrHhuLSCRCeFoaDh1txbTIyHHfI5PZjCMDA4ibngl9W9uE1p/ObLLrxWGCtSIiIqASCtE9OoqIXzo4DQwNIVCnRV9vL2iKwqBOiySJBCEhIejq6kJRcAh2RUXh/PPPR0BAABq/+x6jej38bLjSDw0OQC+VIjU1FRdeeCEKCwuxaeNGHKitxcDgIIrmLce1116L9vZ2l4UpOjqajaQuLS1125s5Nq3JHdc0AxPxvH//fhw8eBCA53oaRERE+FS2z4RCzHzo0tJSVFVVoampCdOmTQOPx8MTTzyB1NRUrFq1irWWzyQhlsvl6Onp4fTeiYp62EoPYqoPMZYO4wKybN3mCr4ggMw4vO0WZoTYnUChkydP4pknHoFk6AhunOGHjGgFBpQG/NiwHa88V4Ub736U7Z4zFmaxRdM0Ojs72f18ZnHnqZraarUa69atw9aNX0Mz3AsAkAVF4vyLLscVV1yB+KRU3La+Ce9fLUBm+Omvs1Klxmd1WrxWBizJCMYF0aew9ad3sW3jV7j7oSftfiZXsXUP3nzzTez+4VP8bZkc52UEsz/vGTXiofVH8fD9v8PHn3/N1n139/zTZ8xAQ18fdjc1YVZqKoS/iIDWYMCupiPQRkXivMWLsf2LL9DW14dkG40GuoaGMCqRWMW7MHuTQqGQDdbKyMhAQHIyfm45iqtmzMCRxkNQdXZCOTIKii9El16HPpMJukONGB0dgVQmQ0dPL7QA25u6qboan7U049bM6VbNRdRGA75qb0N8cQkbUJSdnc0GkPX19aGmpgZBQUE4duyYW8KUlpYGlUqFmpoaj1iwloFW7u7nMjARz2VlZey8cybpDFc4CfHKlSshk8mg+8X1c/fdd2PHjh1YuHAhLr30Up9aWXBFoVDg2LFjnN7L4/HGRawyf4+tPuSJ9KCJxuELQuwLzScsi2q4+sV8643/Q5CqCS9ckwQ/izSj2WmBeH/3SXz49msoLCxEWFjYuMApJhcbAEZHRxEYGIjQ0FA2F9sTk4VGo8GfnngMXQ07cHmWBLMXB4CmgV1Nndjw4atoOtyI1/+7Bjevug4L13SiKFqHCJkJlSeM6FJSuG5WBF6+NgUCPoVb59J4c3sn3nj5eSQnJ09qmzilUokNn3+IWwoEOD/T2gUd7ifAP1eE4ZL/HcOmTZtwxRXuN7SgaRpRUVEIu+oq7PjmGxw9UI9ovgA0gA6TEfyYGCxbuRJJSUlozc1F5c5dUEgkCLdo+DKkUmFn61FIMjPR1dWFTZs2ofvUKRjNZkTHxGDlypXss8bn87HyhhvwwWuv4Z8//ohEpRLzgoPxlUqJSo0afJqGlMeDqKcHg0NDGJTKsEk5ilPxcQgKCoJarcadDz2E1//6ArpqazA/PAIhUgmOj4xiZ38fDMlJeOL399l8hkJDQ5GWluYR8aQoCjk5OaioqIBOp/OItckUEDl48CDbP9pdhEIhpk2bhtraWhw7dsyngqw8BSfXdEJCAu666y4AwODgIBYtWoTrrruO/bnRaPQpfzsX7FXWYnqlWk68Op0O9fX14PP5rOAGBwezPXKnqt42k77kbXzFIgZcLybR3t6OI7VleGx+CCvCNACzyQSjyYjLcgPx3YF2vPnmm5g/fz5bVWvsYmvv3r1ITU112MXLFdavX4+TB3Zh9eURSI34NWJ2WqQMs1JU+MM3W3CwqBjPv/gPaLVabNq4EVu3b0Wsvwrv3Z2K/PhfJ0Eej8Ldi6JQebwdGzZ87XEhthSNqqoqaEf6cbmdfs6R/kLMjjVj5/ZtHhNiputQXFwcGhsb0d3VBYqiUBITg8zMTDYKesHixdik1uCb/VUI0moho3gYVKnQrteBFxMD+eAgDvzf6wjWapAkV8AEoOXoUfy9vh5zzz+fFauMjAxccccdeOrBB3HKaESQTotOgRDvjAzjPIkUy/z8ITCZcEKjQaVyFE0mEzAwgC+//BKzZs3CrFmzEPjSP/DN11/j4927QatGIZIrMOumG3HZZZdN2DWIaXXY0dHh9veQ2d/dvn07urq6PFJ4JyYmBgcPHsTBgwdRWlrqkflRIBBAIpHg6NGjkEqliIqKcvuYvmRZc1bP0dFRfP7559i0aRN6e3uhVqsxY8YM3HfffcjLyzvjXAZyuRwajQbff/89WlpasGzZsnG9UhnRHR0dRWxsLGJjY736GX2lypevWcSu0NTUBLN2CDOiYzD6y1bC6WtLg88XQCDgoyBGgG6NCnPmzLG7h+/K83D06FGsW7cO2zZ/D7Vajbj4RFx+1bVYuXIlWznJbDZj8/frcV4Kz0qEGbJi5Jif0Icfv9+A6268FZdeeimKiopw15EavLgs0kqELce6ZJoUn5XtBPBHp8dtj7H3QKvVArQJQdIJ2uDJeOjXeKbsqWV+r5+fH7u3bxmr0dvbyy6sFYEBCM7JQX9fHwaNRvgHB+OSGTOg0Wiw55NPcHlUFHJjYtgF56hOh48rK7D9h40oKSlhq6zp9XrkR0dj1dx50BiNaOrrxQfbt6NGo8WBnm7IeTzoaDMUMjluj45G46gSW9avZ4OZMjIykPH442zApr+/v1UuLk3TaGpqQmdnJ6RSKXJyclhvS2ZmJjo6OtDU1ISCggK35iWJRAI+n4+Ojg6EhoZy7g9sD2aO8vPzQ01NDWbOnOm2tc3UrmaCzCQSic1qd2cqnITYbDbjX//6F1avXo2FCxeiuLgYBoMBP/30Ex588EG89dZbSEtL82kxrq+vx/79+9HQ0IADBw6gsrISw8PDaGhoQFpaGpYvX263V+rg4CD4fP6kf7bR0VHU1tbi1KlToCgKCQkJyM3NZYuV+4pr2hfGwVWImZzcsXv4jY2N0OkMUGn08JeJIJNKwRfwT99n/HJsngAyucJh/1VnFgM7d+7Eow/cg2BqCNdNFyDMj4/9J+rw6l/r8P03X+ONt9YgICAAw8PDGOztRNEM+5b2zEQ/bNndznpJjEYjQJshFdmf9GQiHoxGA+fxcsXyuxEXFweaL0H1CQ1Kk8fvAZvNNGpOmZG1zHYakauMjo5abR8wJT25RKWbTCa8+coryOILkD+m0Iq/RIIbS0vxws8/Y+vWrbjpppsA/NICFkDsL4JQ3taGUrkCaaLTz4uIxwffaMC0iEiEh4ZimmoUlc3N2LlzJxITE9lUH5lMxhZKUigUSE1NxYEDB7D2rbfQ0dAAWqMFJeBDGhmJZVdcgWuuucbqM9uqaucsZrMZmZmZqKurQ0lJiZWHh3GBc53/mLkhOzubrcaYk5Pj1vzJjCEsLAzp6emorq5GSUmJz3dV4orD9CWKorB792588MEHePHFF1kXNQAcOXIEd999N9544w2sXr3aI5Fyk8Vf/vIX9PX1ITs7G1dccQUWLFiATz/9FHv27HH4u1MRrVxXV4f169dDo9EgNDQUJpMJ+/btw5YtW3DdddexucPeFkDAN4V47JbC6OgoampqcOzYMYjFYhQXFyMmJobNyU1NTUX51u9Rd9KIC3LHr6xHNUbUdlG4/NLccT+zNQ4u9Pf34/GH78eCiGG8cnkIRILTv7uqCDjSrceNn5TjxRdewN/+/vdf4i4oaPX2PSBqvRmg+OyiMSIiAlL/YFS19SE71nYQVGWbGolp9qPBtVotampqoFKpEBsb61Jrw+nTpyMlMxdryipQGC+FSGC9MPiuYQQn1FL88dJLnTouYN0PmfkzMDDA9kNmBDciIgIpKSmcYzVOnToF1YkTyLYoq2imaajValAUBYVcjhSRCI11dejo6EBsbCxCQkKgFQjQp1IhVC6HVq+DWadHtFCADIUfaABdKhVEv8TQREnlCOLzcbK5Gf39/WhtbYVarcbH77+Pttpa0DodIBBCEBaKvo4OFNLAIwkJSAsIwJBejx0nT+Lb//wH/f397Dycl5eHffv2sVsmrsAEPYaHh8NgMKC6uhq5ubn46quv8OmHH+J4WxvEYjEWL1+OW265xSrH2hYmkwkURUEoFLJpTe4uFizTq+Lj461yjB0tlM8EOAlxV1cXxGIx7rrrLtA0DaPRCD6fj/T0dFx44YXYsGED+35f5csvv7T6/8aNG/Huu+9ysuIn2yXc2tqKzz//HJGRkSgpKWEfLLVajZ07d+LDDz/E7373O58QQMD7rmmj0YiRkREAQEtLC5suxGwptLe3Y82aNWhvb2d/5+OPP8bll1+Oxx9/nPUwzJy/FB9t/xiZMQokhP6aW2owmvHaT8dB+8Vi0aJFDsfD9blfv349oOnHCxcHsiLMkB4hwu/niPHCpm/Q8/DDCA8PR9qMAvx8eDsWTx+/b0fTNH4+MorswuXsBCWRSLDogsuw4YvXcf50HWKDrSeoitYRVHUJ8MCdl407ntlsxtq1a/HZurVQDXQBtBk0X4TUrHzc/+AjyM/P5/z5KYrCg488hofuvRN3fdKJ20sDkR8rRZ/KiPW1w1hXb8byK262W0qSgSl0Mzo6Oi4jgUkBVCgUMBgMrCXpqtWl1+tBm0yQi8Qwm804fuIEuk6cgEGpBAVA7O8Pzego1CoV/vmPfyDIzw8h4eEYFouxraUFV+bkwF8mxyGNGgGS0/dLqdOBFgig+CUy/JRWC6lEApNOj6SkJGzatAnbv/4aGWoN7omLQ5yfH3rVajy6ayfiDUbcUFKCmF+s7RCJBCtTUhDZ2Yl31q9na/z7+/sjJycH9fX1kMlkLsUqMN9lHo+H5ORkdHd348oVK9Dd1IQlEimuVygwZDTih88/x3UbNuDFV17BZZeNf4YYLIPILKtZyWQyRE9QdGUixuY5T5s2DRqNZlxFL2fwJe8tJ9e0XC6HTqc77YoRiayipJnIUcC3PpgjFAoF59Z8k92TeNeuXZBIJJg37//ZO+/4turr/b+1JUvy3nvPxCPTmUAGJIywadgUWtrQFlpayrcttEAKLatAS9l7rzBCQggJ2XjG205ixxmO9x6yZMvW+P1h7kXe8ghxf/C8XnlBYuvqXunez/mcc57zPMsGfYYuLi6sWrWKjz/+mOzsbObMmTMjAvH3RdYaamThSJ4TCCBSqVScydVqtRQXF/PQQw/h4+PDrbfeKooC5OTk8NFHH1FXV8ezzz6LTCbj57f+ggdqq7nz40yWhEK8v5Y2Yz+7j/bQqQzkd3+6d0yLR5jYPHN2ZgZnhdlxG6V3elGSCxt3tZOXl8fatWtZuXoNjz2wmye3n2LDiiBUioHX2e123slq5FCHlj9edPFAT/ZbXHfddRTl5/CbD3K5PEXDoihXzBYbuw538PkRGwtWXDpsc2G323nkkYfZ9sErXDNbyqUXuuOjk1NU28MrWZn87lc/4/H/vDCmn/TQZz81NZV/Pf0CTz3xGLdvyUdiNYBEitbTn+tvu4Gbb755UFWjt7d3WFm5p6dnkGHJaH7IgiDOVNYfd3d3JGoNte3t9DU20nXqFH4KBZ5aLRIk1HR0sK20DFl1NWeFhuJqs2NSqzH0mHjHZMLU30eUpxe1djvHzL1oJBJMdht6bx/kcjl2u52MtlY0ej1oXdDr9eTu20d0ayu3z18gZs1NJhMeEikX612pPXECHy9vlMrv1ttF/v58Xl/Prl27xGqFn58fkZGR5Ofns2jRoglniMLaJpSfP/vsM9qPHOG5wCASHe7/a339+Gd1NX/+/e9JS0sbVdZ4aGVUr9eTmppKQUGBqA44UQwNxI6KXsXFxaSmpk7o+59psWrMQCycbHh4uNg/mD9/vvihdHV1odFoWL9+/aDf/1+As1rTcHpL00ajcUzChVwuJzIyUvzsZ0IgnmhG3NPTw759+ygpKcFisRAZGcnKlStFBStHUs1Q1TGJRCIG2aFM9d27dxMWFiYSnAAeffRRvLy82LBhg8jk12q1nHPOOQQGBvLyyy+zZ88eVq5ciV6vZ+NDD7Nz5052bf+C/aVVuLjoWHDBSs4///xh7i9ThcXSj2qMJ071bZbc0dHBQw89yJ7tn2Po6uTfO7p5ZlcNy2I9WBLjRsbJXk71unLNz29n7ty5g9orrq6uPPL4U7zx+uu8vX0zr+a3g0SCq08YV956FT/5yU+GZQ+HDx9my0dvce/Zai5N/W7hXRypZX6YC7/6oJ5/PfpP3nr3gxHv0dE2Iqmpqbzy2ptUVFRQXV2Ni4sLycnJWK3WgVKww+ZKqGZMRuhmOrgpXl5eBKck89Vnn5Fi6mG2pyd6tRrsIJHA1sqjeNjtXKbVsTgqGrlMhsVi4WKNC08X5LGjq4salYpmjZpX21q5ytuHBQEBuLm50Ww2s7elmRMKOR46HarQMI4fP05vXR2XRkbR2dGBt5cXEqmUpp4esFpJ93DnUGcXLS3Ng7JIiURClFpFY00NCQkJ4nVHRkbS3d1NQUEBCxYsmBA5SniWZTIZra2tfPHxx9zq6UWkUkV/f7+YeMkkEn4fHMy+yqO8//773HXXyHrpI6l0eXt7ExcXR0FBAenp6ROeHx/pmALjOysri/Ly8kmpjM0UOBWIY2JiuPfee0VZOuEDcXV15ZJLLhH/fqalFycCnU7ndCA+naXp3t5ebDbbmDemTqejp6fnjJeEBUykRH748GEeeughWlpaCAwMRCaTsX//ft544w0uv/xyUlJShgmgCKQaQQBltEV2aDZaWVlJfn4+11577YjjdHFxcYSEhPDRRx+xcuVKYKCce+GFF3LhhRdO4pMYPSM2mUycPHkSGCAv6fV6kman8OXbe+mz2IeVpgF2He3BJlHy7puvoeo6zoY5ahZFBtJu6OHzgmY+LGknq0HJlVdfx4aL1pGYmCjO9jvCzc2N39x+Ozffcgs1NTVIpVLCw8NHnff/7LPPCNL0cnHy8B6jQibhZ4vc2fBZqUi6Ge1zcISjnKtCocDX15fu7m5ycnJEhTGdTkdgYCB6vX4YQXIimC6S6NkrV/KnDz+kv7WVYFdXrH1mcptb2VFziqLmFta7uRKmUNLc0EjS7Fk0NjaisVj46azZvNzYwOqbbmLBpZfywmOPsbWzk6KODpQdHfRKpeg9PJjt50uu0ciC+fOoq6tDb7MR5+dPe3s7HR0deHh4opbJQCKh22ZDK5WMOGLZabGiGvJ5SSQSZs2aNSmrP0ExTCKRUFhYSH+3kdURESjlCsy9vUglUmTyb9sfUilLVWqyDxyAMQLxSN9laGjoILemiYw1jSbB6ajopdFoBhlZ/C/BqdK0cLEwsFu3WCxYLBYkEgm1tbX8+9//5vHHH8dgMJxWoYDphE6nw2KxDOjGjnNDyGSyQSX46T4PpVJJc3PzqP2T5uZmPD09Z5TE5XjlWLvdTlVVFX/5y1/Q6XRcf/31uLi4YLVa6e/vJzMzk3feeYeAgAAWL148KQGUoUGwpqYGm802YDc3CkJCQqbVBGHoYtfT08NHH35I1p4v6Tc0A3ZkLp7MW3Yua9eu5Z3XXuTp/Z387my3Qa9tN1n594EedB5BKLqO89I1vgS4fRs4fVTMjXRnVUo3v99qYu7cecNs7EYKRlqtdkx/XQE1p06Q4icZ1TxibqgGLB2cOnVqxEAstBBqamoG9XPtdrs4Auju7n7abDmnKxB7eHgQHh4BCiVvNDVxrL4Od8vABjxOpSRdpaHbYKDF0k9MXCze3t4DHAVTDx3HK3npmWe4bP16bvu//2P3pk0Yq2vwdHEhWKOhBzgsl5O4ejWzZ8+mpqYGs81Gv92Gu7sbra2tGLoNJHl5odKo2dfVRZxUimZIQGs0mThis3LdvHnDgp1MJiMtLY3MzExOnjxJRESEU9ftKG85sL7YkUkG7gelSkWf2YxKqhJ/RzGO9v5YpN24uDhMJhOFhYXMG+EaRoMwvjQStFqtaOSg0WicGr+aadVbp+eIy8rKePnll6mpqRH7N319fTQ2NnL8+HFyc3Nxc3OjoKBgRo8xCRAG/Lu7u8ftWZzOHrFKpSI1NZW8vDzi4+OH9Xe6uro4deoUF1544Ywlaw3V1Rb+/8svv8RkMnHdddeJfT25XI5UKuXSSy/FaDSyd+9e1qxZM+nzcAzEWq0WiURCZ2fnqKSVrq6uaR95EM6hr6+Pfz32ME0lX7MuScfCGF8kSMg73sbWna9z8ngFv7z99zz35MMcamzjqlQN3loZ+TVm3szvw6QJQmaz8JMU9XdB2AFLo3XM9evi888+ZtWqVdN2/iq1Cx29o99XbUYrSKSoVKphCmNCz95gMODm5ibKeur1+inJuU4E07XeSKVSXF31LPeYxbaCfJZ6ebPG35+8zi6O1FQT5uJCD3aO9PRw6tQpkEhoOlmFvLcHL3MfNDSQ/9rr9Pr5suKKKzAajRwvKaHVaiU4NpY16elIpVLMZjOpqam8rXclr7GR9IAAPDw8aGltxVUuZ2VEJJ+XlNCpUnGxTk9jYxNtrS00m0y819aGPSmRtLQ0Kisrh12DWq1mzpw55OTkiAYy48Ex20xKSkKqVrO/s5NLvL2RyaQoFArMZjNqtRor8I25l9Xz5jl1vKEQ1LwmmrmPZ0rh4eHB7NmzKSoqYsGCBeNyO2YanArEvb29bNiwgYaGBlJTU0XShF6vp7e3l2effZaHHnpI7NXN9CAMiIuxyWQaNxCfbtb08uXLOXLkCF988QVz584lJCQEm83GyZMnycvLIzAwkHnz5tHf339GBT2sVismk4muri5Ro9ZoNNLX14darRZ7uaGhoeh0Ol577TXmzJkzopuRRCJhzpw5bNu2jZaWlkEG4M5iaCBOS0vDz8+PjIyMQbOWAgwGA4cOHRq1tzUZON7rX3/9NbXFe/jrBcGE+37Xajh/TiBzInv422dZeM1dyD+ffJbXX3mJ32wtALsFhUrDuRdczbnnncdf/7CB9MjRNwqLI1S8Wn542s4fYPlZZ/PEni3UdfQT6D6wAbDb7dhsVqw2Gx/mtWGVu9Df309ubu4wdbkTJ07g7+8/aUbsVDFdgVilUhEQG8vuL7ejMJm4KjIKvUJOXX8/GTYbXVYrcomEAL2e2pMnkVosRLtokX/r5HZxZBTLIyL4+NAhdn38Mb/fuJGrrrpq0HtUVFQglUoJCgoi7Zyz+eCzzfhptUS4uuLu7k5HeweLfX15W6Pmnb4+DmRnEWqzYbHbKbdaMSsVBLR38Px//8vyFSN7Ebu5uZGUlERRUZHorjQWHDPiwMBAzlmzhlc3b2aRqyt+SiVyhRyb3UZvby9vtLfTKlcMUlYc63gjwdEX+fjx40SNYks59JjjVcz8/f0HjTWN5Ck9U+FUIJZKpWRmZlJXVzdsh9Xb28u//vUvkbD1vwKZTIZKpXKqT3y6S8JeXl7ccsstfPLJJ3zzzTdYrVZRaD4+Pp5LLrkEjUaD1Wr9XjLioSxWIQvq6elBJpOhVCrFuUMh+I7UkzUajbi7u4/6PoInr9FonJZALJfLueWWW3jooYfw9/dn6dKl4sPb1tbGG2+8gaenJ5dccsmE38uZc9i3azvpwZJBQViAv7uG5ZEq9n/9BY//5wVWr15NQ0MDRqMRPz8/9Hr9gPa5RIrRPPp33N1rQ6GYvtKuzWZj8eLFvOATym8/PsYjF7jjo5Ngs9ux2+CLIz28mNvH+VfdyNlnnz1iz17ohZ9JTNfmf86CBXz9ySek9PaiVygAO8kenmzRqPmys4NzPTzw9/CksuokoS4u+Gi1bGlqRKJ1IUyhQCqRcFliIsfyDnLgwIFhgdgxSG349a95uLOTfx74hjiphBCNC7VdXeQZu4lbtoyu7m7aS0qQuLrho9GwNiCAhYEB1Bq6+c++/XzV18fq1atHvA7BXSk/P3/cfuzQwPnX++7j2rIybqys5FK9njSdjo5+C5+2tnLQZuWP998/ZsvDGUtFtVo9aKxpPMlKYWR2PISHhw8Kxv8rPghO94h9fHxErWNh0bVYLHR2drJs2TIsFgtSqfR/hrAlKNo4M8J0useXAHx9fbn11lupra2ltrZWJNg4bnxOR2laMLMYWlZ2ZLE66iurVCqam5upqqoa11Tb39+furq6UX9eV1eHQqGYtFTdSOS166+/nsbGRl555RX27dtHWFgYJpOJ48eP4+fnxwsvvDDt0nh2u53+/n5a6qpImjv6HGdSiCtfnmyiq6sLDw+PYYtPWFgYvkERfFF6hLlhw3fzNpudLyv6WLBm/Nnmkc7RsYUg9HKNRiNSqZRbb7ud/z71OJe/VcPiEDt1HRbyTvVgtthQqV0oLjjI1q1bufzyy6e94jVR5aahEMhG04H4+Hgi5syh44ttFNfX4aXRIAHitTp2dHTQ39/Hsj4zff399EokvFdfR5HdxqVz5qKRK2hva8PTy4sUN3dKc3NhjECs0+n46wMPkJ2dzb7duymrr0fn7s7a0FDsdjs5mzbx1Dkr8B+S0Ua6u7PO35/X8vPHrCbFxMTQ3d09bj92aOD08/Pj3U2beO655/jo/fd5vbkZJFKS0xdy25IlnHXWWWN+hs6aUej1elJSUpySrHRWLEqQ/8zPz6egoGDU655pVVune8RPP/20WGLs6enBYrGg1+vx8/Pj+eef/58zfQCcDsTfV29WIpGImtYjQSaTTdr6z3Em1zHwms1mVCqVWG4cTebTEc7OEa9atYoXXnhhxMXCbDZz8OBBlixZMmnDhJEYyxKJhLvuuouLL76YDz/8kGPHjuHr68tNN93EhRdeOO3lKuF7kMlkSKRyjObhLFcBJrMVpLJRd+lSqZTLf3Itzz/2V+aVdrJ21nd9LqvNzqNfNdLY78Zll1025jkJLYSJyj2uWrWKLVu28K9HH6avvZpL4mFxmBIkNr6pzuGJjSUUFxVy/wMbB90bk7kfjUYj77//Ph++9xanqqpQKZWcc+5abrjhxnGVm0bCdC6sZ59zDrsPH0bq5k5dR8dAGyU1FXVQEMd6e3iq6hQ1nR1EyKQE+vlzVUICC4KDsVgs1NfXo1Qq0SjkWPr6AOjs7CQ7O5uqqioaGhqIiooiPDwctVqNQqFg6dKlLF26VHx/q9XKn//8Z3xNPfiPMk2R5uPD26dOUVxczIpRStSO7kpHjhwZRvATMFIp2cfHh3vvvZc//vGPNDU14eLigpeXF0ajkaysLLRa7ajrlDMZseP7xMbGimNNoz2fEzmmRCIhJSVlUgzyMwWno+dll13G0aNH+fDDD8nPz6e7uxt3d3fWrFnDTTfddBpP8fRAIpGIxg/j4fvIiJ2BI7NxtJtSkAAcSqoxmUxIpdJh/T2dTjfh8o2zY1SrV6/m66+/5p133uGcc84hISEBmUzGiRMn2L17NzKZbMRe7kTOY7QNQWxsLH/5y18mfeyJQDAdmD1vCQeKNrE62W/EB3//kTYiE1eN2bO74oorOH78GPd98jbvF9SyJFxFT7+NHUf7abG58Yd7NhIbGyu+rzC+VFVVJWa4JpNpkFOYr68vkZGRo7YQBAgqVXJzKw+uhmRfOy6KfqQSWOgP83zNPPTRGyxMXzTpcS8YIMzdfNP1VJYe5KJYuG2FkjaTkY/2vs/1X2zmwUee4KKLLnL6eNNNDk1OTiYjPIJOcy/LZ89G+u3MsEtVFZeEhfHv/fto1GhYHhHBT5KTRT9huVyOj7c3Tc3NHG5uISA1laysLN574QUkDY2Ey2TYurvZtWcPRZmZ/PyOO0aUfZTJZAQEBNBitWA0mtDqhgdjpVSGQiIZ141NLpeLTGqdTjeiCMdYPV2VSkWIg/a2VqslNTWV/Px8XFxcRuTXTFTqWKhajVVOdrY0LUAulzN37lwyMzOprKwkJibG6deeCTgdiI8dO8YNN9xAS0sLKSkphISE0NzczJ/+9Ceampr47W9/+73ZAU4XnBX1mCmuR0MDscViGVZSdpQAdFyIx5vJneh5OJMRu7i48MADD/D000+zfft2tm3bJr5/dHQ0v/nNbwY95BPFRFStThccP8/V563h8axdvPvNKX6yOASZ+H3Z+TyvjtJOHb9cc8GYx5NKpdx99/9x1lln89mnH/PpkVJkcgVzLljCmjVr8PHxoaKiYpDcIwyw//V6/aAWwmSqJu+++SpnBRhID5Lgq5cj+3akyQ64qi18VWnguf/+Z1ggnsh7/eOhh6g7ksvm63Uk+H236P5ikZ0/fdHJPXf/ntTUVKfvjem+B1xdXVlx2aV8/e67tJYUkxIUjEYmo7ylhf0dHeiSk7ly5UpObvsSg9mM27eSqQBqjYYmq5ViYzfn+/ry9r//zbx+C+enpKBRKGhpaaXHZuPLk6d47tFH+ePGjeK4TX19PQaDAVdXV0JDQzmkVNHS0Y5cIR82TVHV1YlZIXfKDtDFxYW0tDTy8vLQarXDyJMT9TUWPIcLCgpYtGjRsCx2Mj7J8fHx5OfnU1hYyNy5c4e9fjI+BiqVinnz5okzxo4Z/EzLkMcNxMJu829/+xtKpZLt27cPMmZ++umnefTRR7niiiv+5wybJ9IjPpNjQzabjZ6eHgwGAzAwSmYymejt7R0kAejok3s6zTcmIizi7u7OPffcQ21tLcXFxdjtdiIiIoiPj5/ywzATAjF8J5pvMpnwCInnP3t2selgMxemeKGUS8k/1UOTzZOLr7t1TJlI4Vi9vb1ER0fzi1/eJm6wTCYTnZ2dWK3WQd+1QqEgMzNzYOxkivyMtrY2qk8cZcNSO/6uShy/HgngpZVzToSVe/cXDVJcmsh30NrayhebN3F3umJQEAaQSSU8cJ4b2ys7ee+995xmtzvaIE4X5s2bh16vJ2PvXrYcPYqlp4cmSz8rLl7H8rPOQqVS8VRtLc8WF7PUz484b2/6rVYK6uvI7u4m5qyzyMnIIKSri8vSHFXz7LipVdyYPJuH8/PZt28fcXFxfPLBBxzLz4e+PlCpCEpMpFWj5mBnJ4ulMnx8fcRqhtVm44tTVejDwklKSnLqejw9PUlISKCwsHBY8JxMkAsJCaG7u3vELNZms02qypaSkkJ2djaHDh0iKSlpmDvWZNY0nU5HWloa+fn5qNXqSZFCvw+MG4iFLykrK4uHH36YyMhI+vr6sFqtqFQqfv3rX/Pggw/S1NREZGTk/8QMsQDtt2MH4+H7LE2PJvUIiOpber1eLCufiSrEZHrmQUFB45K7nIFAMnJ1df3eNK/HgkQioaamhv/7v/+jvLx8YF7axZ2ShibKGpqYNyeFReeczS0rVgzbqA6taExG7rHv2z7kdF1Lb28PGoWE0R5hlVxK/7f6AZORAC0sLMRiNnFhon7En6sVElZH2snN+gZwPhCfDsTFxREdHY3BYKCjo4OjR49ywQXfVTRu/+Mf2fzZZ+zYt48vjx0DqQRdUBCrr7uOefPmccd117HS3WPQdyasj0qZjLnubmz58EO29vYS3mXg58HB+Lm40GA08nVuLr1mMx9LpTQajcw3mYgKDKC6u5uva6o56urGZZdfNqENSHBwMAaDgby8PBYtWiQG9vHGjUbDaFnsZIOmYzn5xIkTg56XyR4TBjL4xMRECgsLWbhwoagQOZPgdGnaz8+PnJwcLr/88kGL/86dO5HL5TPW/nAsTISsNTBXObkbdiQIzjJDGcv9/f3iTK5Op8Pb21ssK0ulUvbu3Yu/v/+EtVqnE2ciAJaXl/Ppp59SWFiIzWZDqVQSFBTE1Vdf7ZRowemCwWDgsccew263c8MNNxARESE6lm3dupUT1fX8YelS/P39aWpqGhRwe3t7USqVYgvB0bziTEwfeHp6glTJnpMmLhkl0dp1wo5cqR4krSkEl/7+fvbs2UN5eTkKhYJFixYNI8rYbDYkgHKM5UIhk2CbwMb3dG/+PT09USgUwyYA3N3dueHGGzFcdhlNTU1IJBJCQkJQKBTU19fjrtOhstno7u4WeQF2Owi7HBeFksPZWVwbHsFNDqYF/lotyT4+vFxaSqZCTmN4OP8tLkbWWI/Oy4uoZcu4ackS/Pz8JnwtcXFxGI1GioqKRH37ya5rQhablZU1iAw2lXXSUZDExcUFf39/ce2dSowJCgoaNNY004LxuIFYuDl+8YtfsHHjRkwmE8uXL6e/v5+qqir++9//cskll4iC2/8r2TAwIbIWTO4GEzR3h/ZyBccYYeF1llAzE9S1vm/N69zcXJ588km0Wi3Lli3D3d2dpqYmMjMzeeSRR3jggQecEgU4HdizZw/t7e387ne/w9XVVbQJ9fT05KqrruKFF17gH//4BzfddNP3Ivc4FUgkEiKi49haUcCF8VZWRQ1e+DaVWfmmGry8vIfJCObm5vLU44/Q2lBFgA56+u08/5SC2XMW8s9HHxd7mbNmzQK5iq8qerk6bfhm0mK18/UJOOeK+U6f90QC8cmTJyksKKDx5EmkMjnhCfGkpaWNW7Ic6z30ej16vR673Y7BYMBkMuHm5oZCp8fU10d7ezsKhRKVSgnYkTBwnP2nqtBbrVwcFTXs2FKJhEuioigqL+fSq6/G69e/5pNPPkGj0TBr1qxJq5ZJpVJSUlLIzMykoqKCuLi4KWWbjuIcWq2WsLCwKR0PBvQFUlJSKCoqQqPRiEnHVJM9wYktLy+PpUuXzqhnb9xALHzZN9xwA93d3Tz33HPs3LkTvV6PxWLh8ssv5w9/+MOM22E4A61W63RGDANZ7FhBUpjJdcxyh87kurq6ispkkyHUzIRA/H1mxEajkWeffZagoCAuvPBC8WEMCwsjNDSUL774gmeeeYbHHnvse90ECj3hjIwMEhMTkcvlGAwGccGWyWRiVvj111+TkpIyKfu3iZzPdODWDb/i3rt+w++2mTkr3MaKCClWO2yvtJFTC3Klhosuu2rQ815RUcHj/3yAZf5G7rrRlVhfJTabnf3He7l/x35+cctNvPXeh+K9f/aq8/jPN59xVqSaQLfvFle73c5T+w009SonzKZ35rvfuXMn+V9sw9NkJFqnx2KzUV5aQuGePay9+uqBTQLftQwMBgMGgwFvb+8xn1WbzcbevXvZ89VXNFUeA8A7IhyVrw85xSUkx8bS0tIsZncSCbSaTBR3dRHj5o6nA9nLET4uLvhKID8/n9oTJziWl0dfayuZGg39Wi2Rc+bw1/vuG1M0ZyQoFAqxBKzT6aZc6RPIYAcPHkSr1U45EMOArkJMTAx5eXkir2Kqx5RIJCQlJZGXl0djY+OUiKLTjQkN/95222388pe/pKioCLPZTEBAwP+s2wVMLBBLHITOh87kCgFXmMkVslxnZnInipnA4BZIUt8HHyAjIwODwTCifZ9SqWT58uVs2bJFnBc8HXDcYDl+3wDt7e3MmzcPuVyOUqkcmCd2+EwCAwOx2Wx0dXVNayAWXLume37/wgsvZMeXX1CWvZOilj72nbIgYUAZzNNVhsI/gZtvvmXQazZ99AGJrgaeudIHhWzg2qVSCWdFa3jdS8Halw/zySefcOONNwJwz71/47qry7jw9WNcmyxlYZiKVqOND4p7+aZWwe/u/otTZhUCnLkPCwoKyP9sM+e4u5MsjH8Bi/r62HGojPeffY7mKy5HrVbT09MjOkSp1WrKysqIiYkZ8Rm22Wy8/NJL5H/2GckyOef5+iKRSCgpryDXaKS6vw/NsUoWe3gibW7GarNR0tzMruZm3CIjUTQ1j3r+drudlp4e8t55h4jOLs6XSgmWyjGbeqmxWDmw82setNu5/x//mPB8vDCGVFBQgLe395Tn6z08PEhKShLFOaZjvQsLCxPL6DA97n5SqZS5c+dO2LP5dMPpp7i/vx+DwYBMJiM+Pl5U6qmqqqK7u5ugoKAJ78zONDQaDc3NzaP+XJjT7O7uRiKRUFFRgdlsHjaT6+3tTVhY2KRmcieKmZIRw+nvzQGcOHECb2/vEWdvJRIJAQEBKBQKTpw4MeVAPJLoicFgGFVL28XFZUAfuKNj1DJXS0sLUql0WkTo7XY7n376KS+++CL5+fnAwLz0kiVLWLJkybTwNNRqNY898RRPPfkke7/6HI2sA4AOmZbZCxdz5x/uHqQpXV9fT0VpIY+v1YhB2BEhHnLWxMDnn3zEDTfcQG5uLrt372bugkWcOOHPi2Vl/CfXCBIJyWlLePIvt44q2zgaxrsP7XY7uQcOENbXR7heT3NLC319ffSZzdjtdpK9vKk8VsmpqiouWrduUMvAarWi1+s5fvz4iN9xRkYG+Zs3c0NgELMdyvWzfX2Z19rK0xUVnPTzo8FgwHjyJBabFbW/P6lr13DxwoU889e/UtnRQcwIqlLl7W0Ut7QQYjRxjbs7wRoXNAoFJrMZ9x4THlIpH+7bx+7duweRyJyFt7c3MTExVFRUTEt2KMhqnjhxYloqNIJKVk5ODj09PdPG0ZmKitvpgtPjS1u2bOH1119HrVaLzFWDwYBer6empoa7776bn//859NSlvi+4Mia7u7uxmq1Dst6hJlcGFikgoKCBqkRfd+YSYF4OslrY73XWNdrs9kmdR5D2enC9+24wfLy8iIsLAytVjvqBmvRokXs3r2b1atXDyPQWa1WsrKyWLRo0biymq2trezcuZPq6mpUKhULFixg/vz5gzY9f/nLX3j55ZeJj49n/fr1yGQySkpKeOWVV6itreWVV16ZlgxZr9dzz7338ssNGygsLMRqtRITEzPieGJLSwsSu5UYn5HLqwCxPgp2FNWw/qrLKSs8SJDOhodGwtEWGyhdue23d7F+/foRzUGcgWMgdtw8C3/q6uooy8hgnbsH3UYjSqUSV70epbc3SoUCiUTCgv5+ShsaRqxahIaG0tQ0IE069F7bu3MnCRLJoCAsXreXFwt1emp8fLjxL3/h+PHjFBcXs2DBApYvX47dbufz1FTezsrmN4mJeH1rmgPQ0tPDa0crMXZ2co5WT4qnlygcolMo0MtlqIxGwjq62Ll166QCMQxknYLiV0xMzJTvn5iYGE6ePClKyk51fZBKpURHR5OXl8fhw4dJTEyccUF0OuD0+JLBYKC2tpaIiAi8vb0JDQ1FKpWyZ88e0ZQapqd8cDphsVg4evQoJSUl7N27l5qaGmJiYjCbzbzzzjvDGKyCfV9mZia+vr6ntc/nDM70TDN814/7Ps4jPj6eL7/8kra2thE/+5MnT2Kz2YiMjBw02yrAZrONOCLkKPeo1WrFrHuioicrVqwgOzub1157jUsvvVTMFtvb29m6dStdXV3ccMMNYx7j448/5rXnn0JmaibctZ+uXjtb39MQnjiP+zb+A39/f7Zs2cIrr7zC+vXrB8khpqenU1xczMsvv8wLL7zAbbfd5vS5jwdvb+9x7Rbd3d2xS6ScarcyaxTzpWMtfTQ2NaGzNPHWlS4sDh8Yw2o1Wvn3fgPPPvUoYWFhE1brEiYPLBYLNTU1HD9+fJCgjTB5EBYWhr+vL5GBQQSOUplQyeVYRxgFk0gkSCQSgoKC6OzspLy8XPRc7+/vp6a8nCu9Rid6zfb1ofBkFX5+fkRHR6NQKOjt7aWjowN3d3du//3veeSBB3igpJRUtQo/jQv1JhNFZjMmP1+0lZXMdXcTg7AAmVSGv8YFH6ORb0pLJ/S5Db0+vV6PwWCgpKSEVAf29mSPJ5fLsdlslJWVMWvWrGnRC1CpVDQ2NqLVasf0G/9fxbiBWNA3vuGGG0ZcUAoKCnjrrbdoamoCvp9y5WRxxRVXsGXLFqRSKUlJSQM7Y1dX7rnnHubMmTPmnOtMCIAwszLi74OwtWDBAvz8/Ni2bRtXXHHFoN5Od3c327Ztw2Aw8PDDD4tiIfPnzyc8PFxkp0ul0kH6ys6w052Fp6cn9957L//5z39EQwm5XE5TUxOenp489NBD4iZ1JHz11Ve89OSDrAlqZWVqHxrZQP//WHs3Lxdu5/d3dPHiq2/y8ssvExUVNSgIC0hKSmLu3Lm88sor/PKXv/xeN8MhISGERyfwTn4JaxK0SKWDn/2WbivvF/WjsFt5/SpPor2/2yh5aWXcd54bzcYO/vvvf3H++eePeO6OhhVDZVsdZ/xHE7Tp6+tD7enJqba2UQPxqc4OvGNHZ2oLa9rmzZv56O23USsUeAYE0NVlwOY9er/RareDozDKt3rygiqVr68v9z/8MPv37+fArl0ca23FNT6O9eecQ11dHc9kZNDDwIbSjh2p5LuyqlqpoNdqpWeKs+Q2m42QkBCqq6s5duzYiLKbEz1eQkICJSUlnDx5koiIiCkdz2q1olAomDVrljjWNJS1P1HMtBjl1EoknLTFYhmUDclkMtLS0njuued47LHHuPjii7+XcuVk8Yc//IGHHnqIqKgoZDIZ7777Lo8//rhTurYzgSQlnMeZDsTfZ0asVCr57W9/y8MPP8xLL71EYmIiSqWS7OxsiouL6evrIygoCDc3N/z8/KiqquLll19m9erVXHHFFej1+tPeRggLC+Odd94hIyODgoICrFYrcXFxnHPOOaJH90iw2Wy8/frLpOrquTjSgp9ehruLCokEQtwteKn7+fW2A3z88cfk5ORw6aWXjnqstLQ0cnNzqaurm5TQxlSw7tLLeebJSv5vSzt3rXDDRzcQBA819PF/W7votctZEy0ZFIQFSCQSfjrfhaveq6SoqIiUlJRRKxhClqvVavHz8xMnDw4cOEBoaOio5iFKpZLE9HSKNm0i0d8f1yHfSVVbG1USCReMYXZ/8uRJPn3zLdy6ugjo7yfYz4+6w0dobmhgR2MjCwIDR1z3ipqaCEhJcZgjthMQEIDFYqGwsJAFCxag1WpZs2YNa9asGfTarVu3gkrF9uZmIl3NA2NPEgnKb8lQFomUvL4+NF5edHZ2TpqHYLPZUKlU4hiSoNw2WVitVjQajTgPLIxnTuV4MpkMNzc3Zs+eTVFREQsWLJgW3sVMwYRSAscMwnHHefnll3P22WcP+52ZhvT09EF/d1ZrGn7MiEc6j9OdEQtyjx4eHmzYsIEdO3awd+9ejh07hlarZdasWQQEBNDe3k5paSn9/f1cd911FBcXk5mZydKlS6e8cx4Pju5Ly5YtY9myZU6/9siRI1QdKeS6ef1EeqtQO6hcuGsVLIqQkR5o5K3XXx30XmOdx5lQGktOTuaP92zkP088wpZnW0nylWDqh8p2CcERiUQqTUR5nRz2ugHmvY1QNwk2Sz+ZmZl0dHSMWMHQ6XRjGp2Mt9FaunQpVRUVvFdcwlwfHyK9vbHYbBxpaKCw20DkOeeQmJgoPls2m02cDDCbzXz61luEt7bys6VLMRkMGAzdrI6Lw12p4LV9+9lSWMi6OXMGvWdBQwNldhvXnHfusAQmKSmJ7OxsDh8+PKpMpflbMllhTw+fS6VcondFAfQZjbTa7bzXa6JWqeCOK64gPz+fxYsXT4oNLCRPgi1hUVERLi4uk3JFEz4zwXRECJwLFy6ctMuaI+/I39+fnp4e8vPzWbRoEepRRr/GwkzLhmGCgXg0nHvuudNxmO8dAlnLmQd5JjkwzZTzmM4NwUhyj0ajEavVKmZCF110EYWFhSxbtozZs2ejVqvx8fFBJpNRVVXFli1b2L59OxdeeCHl5eV8/fXXJCcnT9s5joSp6F0bDAZ6TQaivSSDgrAAuUxKlKeUXWXHSUlJobi4eFQv2MLCQgIDAwcxmr8v2O12zj33XC644AK2bt1KRUUFcrmcDYsXs3z5cn5+y0+pqDqG1WoVA9zAvTPw3JU3WUA6EJwWLFgw4QqGM8+vVqvlmp/+lD179pCRnc2BkyewI0Ht78fcC85n0aJFw5yMhP5wbm4u9poaLoiJQSWXo/TwwGw209TUxMVJsyiurePVE8epxs4cXz8kQElrC+USCQsvu2xQO0HwThaqiRkZGbi6ug5jLbe3t1O0dy9J/gGYmhrZ39dPbmsLMXI5fXY7xT091EulpJ27muuuu45Dhw5RUFDAggULJlyRFDYHZrMZhUJBSEiIGOgmGtiFtckxcBqNxkkfTzim4yZMaDvl5eWxcOHCGZ38OYv//SuYApzVmob/fwPgZDFZda2RlMa6u7vp6ekZZGAxlCwH8Pnnn2MymVi/fj0nT54cxFIOCwtj/vz55OXlsXLlSmJiYigrK5u26x3vmobCZrORl5dHTk4OVquVyMhIVqxYMWhe09PTk94+K41GGK2LXNM1IIt42WWXcc8995CTk8OCBQsG/U5FRQUHDx7k7rvvPqMTC66urlxxxRXidyroGkfHxvPmnq8oqe0lKUCJVCpFLpeLwfP1fBORsbNYvnz5pLOVsV4n3KcajYY1a9awdOlSWltbkUql+Pr6imNJwliL43/tdjvlZWVEK5Rovl3wJQwITtTW1tLR0cFvli7lr7k5tCUk8HFDA9jthKenc+PKlaSnpw/TmhYCpUajES0F9Xr9oPHPgwcPompp5a8XXMDTu3fRUleHi8VKtd1Oj81Kr1KJ0lXPht/+VuyfjmaY0NnZyf79+2lpaUGv17Ns2bJBlaLq6mp27thBaVYWtr4+FC4uRCUnYzQaWbt27YQCu/BZO74mMjJSDMYLFiyY8D06NBBLJBISExPJy8sbJNX5v4wfA/GPpelJn8d4maDFYhlxRMhms4nsdDc3NzHojrdbzsvLIzQ0dFAAdjyHhIQEsrKyOH78+NQubgIYaQGoqanhnnvu4ejRo+j1epRKJa2trTzzzDPcddddnHPOOcCAFSQqVzaVtbIiZnhWd6TJysF6CR4eHlx88cWUlpby5ptvUlhYyJw5c5DL5RQVFZGXl8fy5cv55S9/+b1cM3ynLNbd3U1fXx8nTpzgyJEjoqiNXq8XZ+w3bNjAwexMbv0sn7+fK2VljBqpVEJtp4Un9nazq1rFE0/fNenF1DEjdiwpC38cIZFI0Ol06PX6YUF36DEFx7Pmhgb8LP0gkWBnIBBLpVL8/PyoravD1cMDf3d3rrv5ZlFjYaQsbSS9ei8vL2JiYkTyllBqra+pIVSpwN/VlbvXrGVfZSWZlUfpMHTjqVEz182dKplUnBKQyWTMmTOHjIwM9Ho9YWFh2O123nnnHT54+WXMDQ14SCSYpDJe8/Bg9RWXs2HDBkpLS3nlqacIN/dxlb8/vh6eVHd3s+urr3ghOxupVMqaNWuc/m6EZMXxGoWpmpycHEpKSkhJSZnQdz3SSKxUKiU1NZXs7GyOHDkiMtmdwUwM2j8G4h9L05OCY0YsLMpDs1zB1EDIcqeqNCYIayiVSpRKJSaTaRAZSgjkVquVY8eOEfutgtLphrDYCwpad955J0ajkZtuugmVSkVtbS09PT0cP36c++67D51Ox/z585FIJJx30WVs//Bl7tvZyy3zVYS6S7FY7ew9YeHpLAsWiZK56QO97ieeeIIFCxbw0ksv8frrrwMDjjrr16/noYceOm3auY4bKoPBMEhZTKvVYrfb0ev1REVFjSpq8+Irr/GH3/+OX2zeh6eqEze1hKoOGxpXbx585IEJi3g43nuCvrejE5XwPEul0mHBdui9Z7Va6ezsFK9N+K/VakWr1aL38qLRZsfN1XWgPPHtsZVKJT7e3mRWVGDVaPD19R0z2xPuk6FrTVhYGF1dXSJ5SyqVIpFKsVgHrlGvUnFBUhIXOPSSK0+c4J36+kHXolarRalJnU7Htm3beO7++1lhsbBQqUIrkdJns3KkvZ3tL75Ij8lE2cGDpBhN3DlnDi7fPj+p3t6sCArikZISXn3uOeLi4py2uBWC5jDtbKmUtLQ0srKyJszMHk2bQqFQiAQzFxeX/2mVxx90INbpdOLOdzw3o5mUifb395+x9xfkHq1WK1VVVVRWVg5alHU6HR4eHqfF1CAsLIxdu3Zht9vx9vamurpafE+AU6dOIZFIaGtro7W1VZRUPJ0wm83k5uZy9FAR7U21nKxrpqGlk5tuuonPP/+cyspKcQGWyWTI5XJefPFF5s8fGJXZsGEDeZl72Vxxkn1VZrxdwNQPJosMnYsLrn4BXH7lemDgu7/22mu55ppraG9vx2azodfr+eabb6ZF0U14FoZWMXp7e0XpVp1OJyqLCcYDwoz9WKIlXl5evPraG5SVlbF7927MZjMRERGsWbNmXHnF8bJctVpNS0sLYWFh42a5gtiHoCMtbCqEtoherxdFe3Q6HVKplKCgIF44dJjSpibmfMtIFwKNQq2mqKsLaWjIuKItwnkPPS9BA9mRvBURFcXXO3Zg6uvDZYRn6GhrKxIP92HBx8PDg4SEBA4cOMCTf/87a0w9XOXugYdKhUwiwdDfj85sxmqz8dEbb+CmUvHr4GCU0sGBTqtQcE1kJA+eqmLnzp1ceumlTrmcWa3WUTfZAjM7OzsbrVYrGoE4c8zR7m8XFxfmzJlDbm4umm83Q/+L+EEHYiH4mkymcQOxTCY7owFQwPe1IXCUe3TMdIXSo6BzLBhYTNYNZiJYuXIl27Zt4+DBg8ybN4+2tjYaGxsxmUwolUr2799Pf38/ZWVlrF27lrS0tNN6Pl1dXbz+8gv0NZQS5yWlvaWTI4cbkSg0/Otfj+Pn588VV1xBTEwM/f39FBUVsWvXLr766itOnDhBREQE4eHhPPjoU/zrob/R0XAClBY8tDJkFgV9al9u+vntLF68eND7SiQSUdxksvfkUHKcoJbn2DZw1iVqImS1pKSkMc3sHYPuSPe5EGSFQCuVSpk1axZ5eXl4eXmJn4vNZhsUbIX/F8aghNJ5QEAAer1+TFOHkJAQFlx0IV9+9BF1XV2kBQWhVak41dFORl0dktmzWH7WWZSWlo5Zdh2pfypgKHkrLS2NfSGhbDt8mItnzULukBG2dHeT2dxM0jlXjigrHBwczNNPP42tsYkrg4IJcKgaeapUuCkVWLoMbG5rw67X46lUDZp1FhDr5oab1YpEIqGoqIj09PQRpWaHXuNYVQGBmV1YWIhGo3FKFtlqtY7JjnZ3d2fWrFlTZmefSfygA7GgomQ0Gsfd7c2UkvDp6FWPJvco9NMEuUchE1IoFOTm5uLr6/u97kCjo6O5+OKL+fjjj2lsbCQuLg61Wk1FRQUFBQU0NTWxevVqrrzyymEkGZPJxO7du9mzZw9dXV14e3uzcuXKKdmhvfvO20gaCwjT2Xl+Zy1avTuzU+fTb7Vw+PARWlpasNvtqFQqVCoVS5YsITg4mP/+979s3ryZO+64A4CFCxfyzCtvs2PHDkqLC7BaLMyPTWDNmjVTngkWRsCGfr+OxgbT0TaYKEYaE3KEY2Y7VpYLA1lRcHAwhYWFeHl5iVm9MAal1+vx8/MjKioKvV4/YbKQRCLhoosuwsvLiwM7dlBUVQU2GzKdnti1a1lz/vm4u7uTkZFBVVXVqMpPYwViGFiPIiMj2bRpE9HR0aQsXULerl08n59Pkoc7OqWKuq5Oys192ONiOX8MJbJjR4/iLZXgP0ImKZNI8Vep8LZaaejrx87orTm7fSDLDg0NJT8/n/T09DGfl7EyYgE+Pj7ExMSITOqxZu2FY473nQUEBGAymcRzHCtw/9gjnmGQSCROzxL//0DWcpR7dMyGHE0NBILNeHKP36cVoiNuuukm/Pz8+PTTT/nwww+x2+24uLiwePFirrnmGuLj44edc0NDA/fccw/V1dVERETg6elJQ0MD//znP0lKSuL+++8fcxctyLvK5XIiIiKQyWS0trZSnLWLEI2VV/fVs2rVKlafs4zW7n7MFhsXX3wxW7Zs5d1338Xb21ssw/n5+REXF8e+ffvEQAwDpdv169fD+vXD3t9ut3P48GFRLCQyMpL09PRhhCBB8lHIAkcaAXN1dSUwMFAkkU0HxuNYOEOgcgy6wqI7UmnZ8fqELNdsNqNWq5HL5XR2dhIbG4urq+uE5UrHgkQiYcmSJaSnp1NbW0tvby8+Pj54eXmJ7yGIqri6uo4oxzpajxgGKhsffvghB3fswFRTQ1lPL2pvL9TBwbinL6SkuZk+kwmPhATOXbyYjo6OMbPT7vZ2+iRSui0W9A6fqQAXhZwWq4127LSYzfiMcE6HO9oxqVUkJCQQExNDd3c3hYWFzJs3b9RgO15GLCAsLIzu7m7y8/PHHUFy1r8gMjJSDMYLFiz4nxpr+t8509MAIRA7Y4X4v0TWGksS0FEswcfHh4iIiEnJPU52fGmqkEgknH/++Zx33nns3buXrq4uFi9ePGpmbrPZ2LhxIx0dHdx6662DFsj6+nree+89nnjiCf72t78Ne21raysvvfQS27dvp7e3FxjYeV955ZUDrFRTKwdOdRATl8CaVWcDoFJY6e234eKi5aqrruKxxx7jwIEDXHnllSKpLTw83OnRqqamJv754EaOl+XgLutBJZfysUmK3i+Cm35+m5h95ebmjjoCptVqv9csVygrjxRwAZHMM1aWa7FY6OrqGlZehoGWkl6vF0059Ho9CoUCq9VKdnY2BoNhSspQY0EmkxEaGjroWoXrcnd3Jy4ujsLCQhYvXjwsKxNmiIcGYrvdzqsvv0zF1q2c5+vL3IXpdHd2UtfVRVFdPRVdXdx8112D3MW2b98+5ncaEhxMUWkZe3t7WDvC55xtNNIolRAaGcn71dX8OSAQtcPPDX19vHOyirCFC0SjheTkZJGlnJiYOOL7Ohs0hRGkgwcPUlxcTFpa2qibpokcMykpyaljzjT8oAMxDJS1enp6xv29mVKaHpoRC1nQ0CzX0dRAUCiaTteoM5URC5DJZOJ84ljl8by8PCorK7nuuuuGZSkBAQGsXr2abdu2cerUKUJDQ8Wftba2ctttt9Hc3Ex6erpoDFJQUMBTTz1FWloaxh4zFQ0mblj9XS9ao5TTZbJgtVpRKhUiOeXSSy8VSW42m80p/1eDwcCf7/odsqZC7l/hwezAARP3E809vJFVzCMP/Jnb7vyzyFh2c3NDqVRO6ft1VqJ2aJZrsVgGCWIIAae4uJj6+no8PDxYuHDhsBE1xzEhxyxXKJ0LvdyQkBD0ev2YpXOZTEZKSgrZ2dm4ubmd1raJ8BwODa4hISF0dnZSUFDAwoULB53raJ9tRUUFZV/v4rqwMBJ8Bs5Z5elFf38/57u6Yqup4bMPPhANFITPfKSKwYkTJ6ipqcEvLAypiwsfGbtRAMvUGnQqFVYGgvAzra24hoby90cf5S933MFfSoo4y9MTX42GU93d7O/sRB4dw313fTdWJpfLSUtLIzMzUyTtDYUzpWnHzzA1NZWsrCwqKipG9aCeiKOf4zHLy8uJj48f9jszMTj/oAOxRCJBq9U6nRGfydK0I+Ozt7eX0tJSMcuVy+ViwBV6YUOF76cbZyojHnoO420GDh48KJKORkJCQgLbtm0jNzd30MLywgsv0NzczC9+8YtBbNjIyEiioqL46KOP8NTKsNpA50D0U8ikKOVS+vv7kMvluLi40NvbS1tbGzKZDE9PTw4dOsTFF1886DxGqmJs3ryZxopc/nulF4HuA3Z9SqWKxDAX/hHmze/eryInK4OVq8/D09Nz0qW4lpYWPvnkE3Zu+4zOtlb07h6sXLOOyy67DG9vb/H8RiJQCTwCIdgK2e6uXbt49J8PcqziMNisIJHi5RvAjbfcyrp16waRxITSuV6vx83NjaCgIJFANVFotVoSExMpKysTe96nC47PgHDdQqY3knzlaCX87KwsfHt7iff+jqcikUrw8fGhvr6eBT7evHWkXBz7GanXfPLkSV5/6SVOFhRg7zbSY+kXy9Jv9/bwscGAv1RKO1Bns9Kp03LPn/5EYmIi12/YQFdXF19s305fdzcubm6cffXVrFu3Tvz+Bbi4uJCWlkZeXh5arXaYdaWzpWkBSqWSuXPnkpWVhVarHfE5nai1rlKpZN68eWRmZuLi4jLihmGm4QcdiMF5venvMyN29EV2zHItFgsqlQqr1SoKswtCGN/3Lm8mjHM5k5UL/e/RIJPJUKlUg2ZQu7q62LFjx6g+wqmpqWRmZmLo7MRitVF5ooqoiIExEokE9Bo5bd199MnMVFVViaQ2qVTK+++/j81m44ILLqC+vn5Ua0adTkflkRLOS3AhLsSHkWitl6a68/CBb5i3IH3Yz5zFiRMn+L/f346ttZI1sQoi4lRUtdbw5XtPsnPbZ2z857+Iior69tqGE6hMJhM6nY6ysjK8vLwICQlh+/bt/OoXN7M80Mzfr1CR5KegptPK63mneOTvf6WyspJbb72VgIAAYmNjp33T6O/vT0dHB8XFxcyfP/+0bUiFz2BoMHZkQLu5uYnBZbSMuK2picARKlVyuRwfHx/6Gxro7zbQ3t4uHge+C8TV1dU88te/4lldw61hoSTExWO2WnnHRctrJcV4ymT4eHrQ1NtHm9WC1i+I9TfdxBVXXIHRaMTPz4/rrruOX/7yl/T19Y27nnh6epKQkEBhYSGLFi0atNmZjB+9VqsVFcZcXFyGVa4mc0xhrOngwYNoNBqnRq/OJH4MxGewRzwWo9Wx1yeMCLm4uNDV1cXhw4fPuCfnmS5Ng3MZcXBwMF9++SU9PT0jsjObm5sxGo2DtH5PnTpFT0/PqKUyGDBALy0txT8gkN37M0lLScHdTY/NbqffOiB6f/JkFYWFhWIGXVpaitlsJjk5mc8++4zly5fj4+MzqjWjydBJeLSGEWdLgDAvNVg76e7uHueTGoDdbufIkSPs3LGDhvoaNC46sjP2E2I/xRM3BOGm+e69r023cNemEzz0wF959c13xTlOYfFvaWnh3Xff5cDOLZi72+mz2HjHw59l56zmlRefZVVID89d4oJcPhCc3F3kPL5OTZiniUc//oA77rhjTNvRqSI2NpaDBw9SXl4+aj9zOuAohemYDWo0GlJSUigoKBAz/dECsYteT8coVoZqtRrUarpMPeJrhwbiTR9+iP5UNXekpqL+9v5xkUr5WUoKaX5+3FdchDU2lgB3dwJUKlafey7r1q0TM3pHoRNnTRSCg4NFCdNFixaJ9+1k3fe8vLyIj48XFcamGtxhgO2dlJQkjjXp9XpgZpamZ6Zf4feI74s1bbFY6OjooKamhvLycvLy8ti3b58oyWgymXB1dSU2NpYlS5awdOlS0tLSiImJEWcdZTLZGS+RC5iu0rTdbuf48eMUFxdTVVU1oeDuTCBesWIFKpWKffv2Dftdm83Gjh076O/vZ/v27WzcuJHPPvuM3t5eJBLJoCx5KMxmMy4uLtz/94ew2SX85/lX2Z9TSHNnDx3GPgqLivnggw9ENnZ+fj52u52UlBQ0Gg07duzg0Ucfpb29XbRxHFpadvXworbdPOo51LabQaYQZ+A7Ojo4evQo1dXVg9S+bDYbvb29PPboo/z59psp3/ES/o07qD7wNtVH8rgy1oSL/Ntep1SCVCrBXavkrnP9aKmuIC8vT6wItbW1UVhYyG23/pSsTf/mooBa/nYW/GW5lFRlJW+98CQdLY3cfY4etVqFXC5HJpN9uzhL+PlCF1zlfXzwwQdjfm9ThVQqZfbs2TQ1NVFXV3da38tRXtPxmfD29iYqKoqCggL6+vrEfvJQpM2dywkJNI6yoSprb0cbEkxPT8+g95BIJLS3t1O8fz/nBASIQdgRc/39WevnT9KsWTz90ks8/MgjaLVa8TOZSE93KOLi4tBoNBQVFYn322SDJgz01wMDA8nLyxs0Hz+VcwwMDCQ8PJy8vDzM5tGfpTONH3xG7KzetLOl6bHUiRznNgX1nonObc6EkrBwHlPNiLOzs/nss8+orq4WH+DIyEguv/xyp1yTxgvEgnTh2rVr2bRpE52dnSxcuBAPDw8aGxvZuXMnRUVFBAUFYTAYsFgsvPLKK7i4uKDRaCgoKBixv2SxWCgpKWHp0qV4eHjw97//nVdffZXPNm8Ry7YqlYrzzz+fG2+8kT//+c8sW7aMdevWiYHZYDCwdetW7r//fp566qkR5TiXrziPLa8WcaOxHw/t4HlQm83OZ0XtxCavwmKx8MS/Hqcgcxd2sxG7VIZ/eAIXXHy5aKTw5ptvkvvVu9x9litnxfshkUh4fnctps5W4tx6qD5VRVR07ECgsNuxWa1EeMrwU/WwZcsWrFarqLD10YcfoDMc5dGrw/F103xbkoVzkoOoaS1gb0c/MV7DLgcAjUJCit8Aqeh0Q6PRMGvWLIqLi9Hr9WJGdDogrA9DyVsRERF0dnZSVFREeHj4iM96amoqO1JTeSP3INfExxP07T3Sb7WSWV1NVp+Zy2/+JVKpVKyGCe/R0tKCvaeHSL/RWeIRej0ZtbXAwHonCGrodLoJ93SHXnNKSgqZmZki2WoqQRMgPj6e/Px8CgsLmTt3rrjhn8ooUlRU1KCxpulQoZtu/BiInQzEMplsmGi7IPc4tJ9rt9snrE7kLGZKIJ5qRrx7925effVVAgICuPDCC/H09KSlpYWDBw/yr3/9i9tuu22Yy5Cz52C32/nyyy/54osvaGhoEIlGhw8f5ujRo0ilUsxmM83NzZx99tlcddVV4sNpMpn4/PPPOXXqFAcPHiQyMpL4+HisVitWqxWLxcIXX3xBV1cXCxcuRCaTERUVxQcffEBTUxOHDx9GLpczZ84cvLy8eOSRR5DJZKxfv37QYqLX67nyyit59tlnef/997n33nuHXcf555/Pji8+5c+fHOHO1f7E+A2U65oMZl7e10BZlzs/XX42rzz3byJUbfwsxZ2EQHc6TBZ2luXxypNltLX9mgsuuICvt27i2jQNK5IGImS7sZ9TrT1IJRDsoeRYSzfNzU1oXFwGNpx2kMmkyKUDAS0pKQmdTkdnZye1laXclu5NoNfgOVaJBJYmePNVaQtthh58PUYOfK29EmJOI4nKEd7e3oSGhlJcXDzivKrFYmHnzp0cPXoUrVbLmjVrJm0lOVK/WCKRMHv2bDIzM6mpqRkxSCkUCn55++08/5//8ExxMf59/ehkMmotFno93Flx442sXr2a3t5eMjIyUCqV4nHUajXIZHSazQSMMlfcaTajdvmOUOjj40N0dDT5+fnExcVNKXAqFArmzp0rMqkHpgUmv85JJBJSUlLIysriyJEj4gZ1KucomE7k5uaKvIGZhh8DsROB2GaziWWNY8eOidKPgtyjkOV6enqedrlH4WF3xqjidGIqGwKDwcA777xDTEwMK1euFK9Dp9MRFhbGli1bePPNN0lNTR3zoR4pI7bb7bz66qts3bqV2NhYLr30UjQaDSdPnuTgwYPY7XZuvPFGPvnkE+Lj47nmmmvE41gslgEjhvPO4+TJk3R3d/PWW28RGhpKdHQ0/f39HDp0CLPZzMaNG1m7di3Hjh3DYrHg4uJCeHj4oN691Wpl7969zJs3b8QdvUwmY+7cuezfv1+8lxzh4eHB3/7+CP/YeB+/2lRGiK4RtRyOtUtQuAdx+//9gV07txMur+eBdeEDgU8iQQLMifTgo+x63n7nJeRyOf2GJlYnBVDXauT53TVkVLTRaujD2NvP/sp+Al2ldBuNuLt7fNsCkVLd3ketUck16ekigaaqqgq72cDCqOEZWGt3H156FWYrfFLSy63LdMPu0ZL6fsqaJGxYtWrMe2Q6ERUVRWdnJ4cOHWL27NniOW3dupXf3fEbausb0alk9PTbQCLh6vVX88STT06YcT0aeUsY+8nIyBiVCe7p6cnd995LWVkZxcXFmM1mFnt7k56eLo5hCbaJeXl54jUEBwcTEB/PgZIy4r2GlyF6LBbyjN2sXr5s0L+Hh4djMBg4duzYlNcqgWxVUFCAh4fHuEpZ40HYyGZlZYmf11TFORxNJ1paWvDz85vS8aYbPwZirZbW1lbx70NHSIxG4yAyV19fH56enoPkHr9PCA/NTAjEQ43UnUVWVhYmk4nFixcPuwaJRMKyZct48803Rbm60TBSID506BDbtm3jnHPOITU1Vfx3Hx8fEhMTeeuttzhw4ADV1dWce+65tLW10d/fLy6YcrkchUJBamoqVVVV3HzzzWzevJkjR46gUCg499xzufTSS4mJiRGPPVp53Gw209fXN6aeroeHhzgLrlAoho0JBQUF8e9nnic/P19U1jonKopzzjlnQHDkyb/z+yWByOz9WCwWFEolfLupWDvbnU8LjrN582Z6TUZa29q559NqZNYeNsyTMydQzYZPbXx0yMov5lixdXejUA6MSfVbbPxnVyN6v0jOOuss8XxlMhlIJJj7v9uENXaaeXjLCXaWNH1bnrXz4F4r4Z4dnJvkLn7HJ9os3PZZDxExiaz6HgOxkJVmZWVRXV1NaGgoX375JevX/4QLYuDziyAtwEqXGV4rtPOnD9+loaGeTz79bMJBajTyljB3K2zwRlLFEvrajsIdQyEw06uqqujt7UWtVnP+pZfy8qFDfHH8GOeGRyD/9pw7zWZePXQIWVgYZ5999rDzTEpK4sCBA/T39095PfH29iYmJoaKiopp0XoWxqRyc3PF850qlEolixcvPq0jbZPFDzYQ9/X1UV5eTmVlJa2traxatQqr1cq99947qtzjnj17iIyMnPKObypwZE5+H2pJo2EqpemGhgY8PDxGfSDc3d3F2dTxzmFoEPz6669FYXlHoQnhT1xcHDt37qS/v5+Ojo5B/qkeHh74+/uj0Wjw8PCgsrKSFStWsGLFigmdgwCNRoOrqyv19fXDet7Ca2pra1Gr1ajVanFjM9KY0JIlS1i2bHBWU1BQgN1sIDU0GJvFTGdnJzKZDMu3Qv1yuZyUABm5ff0oXVx5KbMTqc3Mf9dp8NEN3Dv/WOPCH78w8Y8DVhaHmmiTNtDWp+CFA21UGTVc/pMlnDx5UiwRxsbGonT1Yc/hdq5ZHEBzVx/XPVOEpbebuxdLWR4uo6LZxu+/gvXvm5kX1MK8UDXVnXb2VkFwRCyvvfrG9y4/qFQqSU5O5uuvv6a5uZm77/o9KyPgk6vsyL59jFxVcPtCiPG0cf47u9ixYwfnnXfehN9LuCeEZ1QIInq9HrVaLTKDJ/sZ+Pj4UFNTI9omLl26lLYNG/jklVfYl59HjFKF2Wqjwm7FJSKCO+6+e9i8L3ynElZRUcGpU6embCMYFhbGyZMnqaurIzIycsrfsYeHB1FRURw9ehSj0Tiu4YQzmKmylzPzrE4j7rjjDvbs2cPhw4dRqVR4enri4+PDBRdcwJw5c1i+fPmoX9ZMkLkUgq/Vaj2jN9VUyFpyuZy+vr5Rd+FWqxWz2TxutcFRZUgYAyspKcHHx4e2tjaROCJkuhqNhqCgINra2mhra+PLL79k7ty5JCcno1ar6ejooLOzk5iYGGpqapyyaRtrpy6RSDj33HP5+OOPWbx48bCFxGQyUVBQwJo1a1CpVKKbEIyss+wogmEwGCgtLcVo6qGlvRNPvRrpt6VQT0/Pb9n1do61nOJU5ylaG1soPtTGyig53X12hKnKOUFyHjpXxQu5/TyXb8ec34zFCloVBHnY2f/pK2z/9B1SFizjr/dtxNfXlxVrL+WjD/5LcqiBD7IaMPd08+F6Bf46Ce3GfhL8lOT83p/nMgzc92UHjYoA4hIS2fjri1m3bt0ZyUhycnJ4+cXnOZi5j+7ubmqau3jxRsQg7Ig10TAnSMYbr782qUAM321ULRYLcrlc/LuLiwsSiYSSkhJSU1MnlenZ7XbUajU2m00UDVm3bh3z5s1j3759nDp5EoVCwfrkZJYsWTLm5y2TyXBzc6OiokI0d5ksJBIJGo2Gvr6+KV2fI7y8vDh+/Lg4JjUdPJuZOL70gwvEc+fO5ZxzziE5OZnw8HCeeeYZPv30U/7whz+M+9qZIHPpOCpxps9jsueQkpLC1q1bqampGTS/K+DYsWPY7fYRS3SOkp7Nzc2YTCb2798vqjMJv6PX65HL5YMCWlZWFps3b0YulzN//nykUimHDh2itLSU8847j/j4eOrq6igoKODYsWNs2LDBqetx1FYeamywbt06du3axWuvvca5555LTEwMEomEY8eO8dVXX6HX67nmmmsGLTD9/f0jGtUDopuQt7c3F1xwAfu2f0ZRQw8XB7tht9loa2unv68PVGr+/N5hdpU0sCiym3kL5dQ12smv7+e696zcd64LZ0XIaTX0oZTZufeiECKKbPx3TyMXJOm44xwvEgM0WG129h3t5l97vuKOXzXzwiuvc+ONN1JddYI/fLqNI5U13JkOWpmVhi47VmR4enmhUiq442xPyhr6KZd48cJLr5yxStL27du57093MtvdwKPnaSmokvPXbTBvFE6WRALz/KwcrJo8s9uxXyxkxnb7wHz57NmzycjI4MSJE0RGRk742ELJ29E2URj9WT+CachYECwGBfeqoTO8kzm38PBwjh8/LiqBTQWCF7GrqysFBQXic/v/G35wgfiGG24Y9HetVuuU1jSceZlLGE4ImU50dHSQn59PXV0dUqmUqKgoUlJSRhzyn8o5JCQkEB0dzVdffcVll102SL2qqamJPXv2kJycjJ+fHy0tLcOMKwRJTyHTTUlJEY0Njh8/zqeffjpM5L6iooIvvviC6OhoQkJCiI+Pp7a2FqvVSllZGdu2bcPFxYXm5mZ27txJUlLSqCXpoZ65AptagGNp2d/fnyeeeIKHH36YTZs2iefU2dmJBBt+3h7c8atfkjJ3gViuFMaEhKArGBsI2ZQjlp+7jve3vExcYDfxgTpcXfV0dnXx0YEGtudXc98KFevPjsBqsXDsaCe/V1v4d5aNP31h5IFVSoK9NPgE+uHv78/u9wtJ8bHwn6sCUcoHzlMmlXBOnJ4YXxXr3yxi8+bNXHvttfzt/o28+WYsD957F4m+NrqtctRal2G8icURar7JqqOkpGRM157Tha6uLh687y+cH2Zk44V+SKVCK6GNU52QOIrg0qkuCW7Bwx2UJgLhPhA270KFRqlUkpaWRk5ODq6ursNkJMeDMCIlkLfy8/PR6/VOefuOdCypVEpwcDBdXV2iRvZkq202mw2VSiWSrQQFwMlCqPzNnj2bnJwcysrKRM3tyWAmZsPwAwzEQ6HT6ZxS1oKZUZqG0zPClJOTw6effkpfXx/e3t5YrVYOHjzI9u3buf7664mIiBh2DpMtTUskEn7zm9/w+OOP8/bbbxMYGIibmxtNTU3U1NTg7+9PUlISH330EbW1tchkMoKDg1myZAk+Pj6iBF9bWxvl5eWD5kNXrVrFF198wdatW7nwwgvFBeXAgQN4enoSExMjlqlDQ0NpaGggISGB48eP8/zzzxMUFIRUKmXNmjUolUox6I7mJuTm5kZ1dTV9fX3odLphGwAYECp48sknKSwsJDc3l88+/QSroZFkXxtpbi109dr56r2DfPn5Ju578FHOOussp0twP735Zurrqvnz57uY699IQqALtc0GnthexTWzZVy6KFxkQas0WqyYuHeljLzGPvI6vVi9PAqZVEpjZx/ldUY2rtWLQbirx8rm4k4O1feikEmI0Pex9bNNXHvttcjlclasWMG/H3VF5aogIHDkLMrQa0On12O1WqmsrBxxXvp04osvvsDa3cydK7yQSiUU1fTyz+3NKKTwTC48ff7w1xxthe3H7Dz9u6um/P5Cn9/RIALAzc2NxMREioqKJpyFOvJDvLy8iImJEfvOzipjjXSs+Ph4Dh48OKWysqAHIPA0ioqKcHFxmTSBSziewKTOzMzk5MmTw9aj/3X84AOxs8paMHNmeKc7Mz9y5AgfffQRYWFhgwbejUYje/fu5bXXXuP2228f1D+abGlaMK7o7u7miiuuICcnh8LCQpqamvDy8uInP/kJSUlJbNq0icrKSpRKJSqVisLCQvbs2cM111zD8uXLxXMYGhx9fHy44447ePLJJ3n++eeJjY1FKpVy8OBBli9fjkajEZmsSqWS0NBQzGYz8+fPJzs7mzvuuIP3338fpVI5apbr+P9+fn50dHRw5MgR5s+fj0Qiobe3d1hZWWBFV1VVYag9zH8u0bMi3nXgOICh18pdnzbx0P338P7Hnzvdq9NoNPz1vo3s27ear7/6ko9PnaCnzwu1qolL53s6LPASPDy9aKo34aK0c260jG+qe5BJpVisdh7bXo1EKiElfCAL/Ly4k/s+r6W/38IsXzD1Q2mTne6K/VRUVBAbG0tISAihkbF8WlLG8ujhgcRqs7P5cD/L154nWuh5enpOOAOcCo4cOcIsX/DSyTnaZGb9y6eIdrfw87kS/ptrJ9wdfrMAVN+uhMWNcNUmGWEhA3aX0wFh3RgqdhEUFERnZyeFhYXiTLozGErUDAsLo6urSyRvTaTq4KiEJTgXZWZmTrqs7HiNgnRrfn4+ixYtmpSJh+P5qdVq5syZQ05ODlqt9rS6a33f+MEHYmGO2Bn6/v+vGfHevXtxc3MbNk6k1WpZvXo1H330ETk5Oaxdu3bQOYyVEdtsNkwm0yCT+qHGBh4eHlx++eXceOONoj1jf38/Dz74IKdOnWLt2rVEREQgkUgwGo188803vPzyy6jVanHBGekc5s2bx6OPPsrXX39NTk4OXV1dqFQqYmJiCAsL48SJE/T19YlZZ09PDzU1NTQ3N/Pyyy/T0dFBaGio2GMeKcsVrtFgMIiSgRkZGVgsFvr7+0U3Ib1eL0qUWq1W/v7XP/HTeWpWJ7oPOpZeLePhi31Z80IdW7Zs4cYbb3T6+1MoFKxcuZKVK1cCUFhYyB23rEejkIhkIRhgofaZzdS3NdPbZ6GqpZenv65l77E+mq0e6N0stJgkHKjs5k8fV7Mu1sYflsjw1g7cE9mnLNyzu48NP/8p72/6DG9vb66/6Wc8eM/vmXuwi2vm6sX7p99q5/5trdT36XnkmmvRarXEx8dTWlpKenr6hDO3yUIul9P77X7qX1+34qm28N6VUrRKUCts3LXDziPfwMIgqO+GvHqIighm82efi9KhE4VAHhzqpdzT00NERMSgtSY+Pl4suTrOOI+FoYFYGEUayfHJmWM5bgCUSqVYVtbr9ROetx16vMjISLq7u0VVq4mqeA2VzHRzc2P27NmifvREM+0fS9MzFBPNiP9/C8QdHR0cP36chQsXjniTKhQKwsPDKSgoGBSIHTPiobPXQgYolUoH+SGPZGwwFAcPHuTo0aP85Cc/GbTjFTYFJpOJTz75RMw+R9sMBAQEcM0113D11VdjNpu55ZZbaG9vJzk5GZVKRVNTEwEBAeTl5bFjxw6sVivx8fHiWNN9993HAw88wJw5cwZdo7C4GgyGQdfo6elJc3MzCQkJ+Pv7j7jgZGZmYupsYl2y+4jn7KaRcXYY7N+7a0KBeCjCwsKQa/QU1xsJ9TCh038XIP38/dG76inYewqjypvc/tksvmIxF110EX+//298WLCbTqOZuf42HlolQyr9lhxohwA9PH+FO1e8X8WHH37Ihg0buPzyyzl+7Bh/f+153ilo4exIGWaLnS+P2uiw6fjbg4+SkJAADOj+trW1UVpaKsoXnm7MmzePz997maKaHraVdXHPMgk61cD7PnaejBtTbDyXa6PKIMdotRMe6k9eQZHT+gA2mw2j0TjovhCsHbVarWj4IKjryeVyMbgIG7zU1FQyMjLEGWdn3nPoxnAk8paz5z/0WvV6PbNnz6a4uJj09PQJSYMOzfoFVauJbjYcjzf0WfL398doNE4p055p+MEHYp1O53QgnglkLZjeQGw2m7HZbGPu/nU6HXV1ddhsNtHEXXAtOnDggGg1ONLs9UQX28zMTPz9/UcsO0kkEubPn8+nn35KZWUl/v7+w5jKI3nmKhQKVqxYwbZt25gzZw4REREcP36c/fv3s3fvXtLS0gbmY781ond1dWXz5s3ceeed/OlPf0Kj0WA2m1Gr1aJRvY+PD3q9ftA1Hj9+nKqqqlHJKb29vWC346YZPStwd5FxrMe5+3E0eHh4sHzVBbz/9ZssjVQjk5twcZA4zKiyUN3vxdPPvsjixYsxmUxkZWURHhXDewd2Yeo08MK674Jwv9VOi9GKRKYg1NeVC+M6+GLzx2zYsAGJRMJdf/wjK1au5P3332NHaSFSmZxV6wekQ4eygoUM8Pjx46K94unEihUreDo4ir99cZh+q51Zft/djzabnSC9nb+fq8LFRcv7JRb+tLtn1I2iIGnrGHC7u7uRSqWDqh9jWTs63qOOJVdBMUuv149ovemI0TQEHMlbQsVpPIx2LH9/fwwGAwUFBaSnpzvFWRh6XQKETcJk+rujmUhERkaKwXgymfZMww8+EGu1Wsxms1NzuTOpND1d56HX61EoFDQ1NQ0qQ9lsNvr6+ujv7+f48eP09PSwb98+AHFnL5PJmDVrlvj3iaKjo4OTJ08CA+L4bm5udHZ2jrmACEpUnZ2d+Pj4iOcpBMPRerk/+clPyM/P56233mLp0qVERkayefNmAgICxIzN03OgpyqTyVi3bh3PP/882dnZ/PrXvxY/p7EQHh5OS0sLR48eJT4+ftjPIyMjQa4m+6SJcxOGZxl2u53saivR5wx/7UTxyw0b+FXhQX79yWEuTZSwONqG2Sbjy9JOPiuHleuuZeHChXzyySd8+NbLmNtr0SlsKKR2zP02FNhp6JJiB/qsIJMr8fX1RSaXE+qh4Iva79ToJBIJ8+bNY968eeOelyMD1sPDY5j37HRDqVTyyL/+zW23/pRucxlHW+yk+kmw2MBsAalMgaurGwBVrT3o9QP3nrDhHFpaVqlUYtD19vYethkbD45iH44tD4FIWFhYyOLFi8fM8sYS8xHIW8Io0ngtgLHckqKjozEYDBQVFTF37txxe8+O0p5DMdn+7mjn55hpl5SUkJKS4tR38GNpeoZCEFkwGo24ubmN+bv/P5amNRqNaPItBOL+/n6xt2gymaivr+fCCy9k/vz54giNwFiezMhEV1cXb7/9NllZWQNZIgMP6pIlS1Cr1bS0tADfqU85lp9bWlqQSqVotVrUajUymYzm5maR7Qwji2Go1WruvPNOXnrpJbZu3YrRaKS2tpZ169bh7u6Or68vSqVStOyTSCTMnTuXwsJCp4OFVCoVe3U+Pj7DCFdhYWGkzF/Cy1lfsSRSi1Y1+Dw/L+niZLeKuy69bAKf5sjw8/Pjv8+/zPPPPccL2z/jqcw2VCo1XgER3HT7tVx77bV88sknvP3sw1wSZ+fSCwPx1iupqO/m3H92cKLDRkKACrVGjV6tGTQ6dbS5Hx//8cVORoNerycuLo6SkpJpE2kYC4mJibz74adcdsnFvFZ0mPOiQSGX46JTfxuoJHSbbXxwyE7KkgXs2bNnxNKyXq+fNkEJx2AsfK5hYWEieWusednxVPUmQt4a61gSiYTk5GSysrKoqKgYcXPpCEeVupHg5uYmumGlp6c7pZQ1VoLkqB89HTPLZxI/+EAslGRNJtO4gfh/vTRtsViG9XK7u7txdXWlt7eX7du3M3/+fMLCwpDJZJw4cYK8vDySkpJYvXr1oF36ZMeXjEYj//jHP6ipqWH+/PnExcVht9spLy9n3759KBQKjEYjDQ0NI+6a8/LyCA0NJSEhAZlMRlJSEiUlJfj6+qLRaLBarXR1dQ0rHwqCHzfeeCPr16+npKSE//73v8yaNWtUUQVXV1dRSMNZaLVaYmJiKCsrY9GiRcOy6N/fdTe/vKWUm96u46YFOuaFamg3Wdlc3MUHZXDBlT8dpJE9Ffj5+fHXv/2N2++4gwMHDtDd3c1ll12GUqmku7ubD958kSsS4afLg8XXxAboWDXLh80VjayOseLn5j5oIazp6GdbpZ0Nf5wao1hQOCstLSUtLe20ZyoBAQE88tjj/PT69Wz8xsw9KzXo5APkwFMdNu7+0ky3xI0LL7wQd3d3kpOTT1u5c6goj6NTk7CRKy8vFys1QzFeIJ4IeWu8YzmODen1eoKCgsY8lnA9oyEgIACDweC0UpbVah2zOiDMLGdnZ6PVap1Sw5uJ+MEHYqVSKWZ+40EqlQ4yrD5TGC8QO+uJLOzyNRoNCxcuZNOmTeTl5ZGTk4Pdbhf1eS+55JJhD8Nkx5e2bdtGVVUVV1999aAS9Ny5cwkLC+Pdd99FLpfz2WefsXLlSiIjI5FKpXR3d/PNN9/Q2NjI7bffjkwmEx2xNBoNOTk5KBQKTCaTOMeo1+sJDAxEr9cP69nFxsby1ltv0dDQMGogrq6unpQlXnBwMM3NzZSXlzNr1qxBP4uOjua5l9/kqSf+xb279oKlHaRS3H1C+NnvbuKGG26Y9qDk7u7O+eefT05ODidOnCAuLo4DBw5g727k0rnDF9bfnBfODc+086ev+vj9yi7mRXlitdk5cLyHjTu68Q1P4rLLppa1SyQSEhISyM7OpqqqapBr1VRht9sxm83DCFS9vb387Je/4ZUXnmVzuZF5gdBvk1DQAO7eQbz+9suiOURDQ8OYQWeqcMapyc3NbcT7zxl/XmfJW2OVpgW4uLiQkpJCQUEBWq121CqYQNQa7/6NiYmhu7ubwsLCcUVenDk/YWa5sLAQjUYzqSrdmcYPPhBLJBJcXFycEvWYST1i4QG2WCyi5KPjH4GA5awnsr+/P7fddhs1NTWislZkZOSo86zjbQaEnzmKYdhsNnbt2kVcXJwYhB17u76+viQkJNDc3ExYWBg7duxAJpOhVqtpb29Ho9Gwdu1apFIpe/fupa+vD41GI46g+fn5kZaWJo5CjQVXV1fOPvtsMjIySEtLGya/2NTUREVFBXfccceYxxkJQpB58803eeutt6iurhbZseeffz4JCQnc89e/sWXLFiorK3FxcWHt2rXMmTPntNpnzp49m+zsbLy9vWlubsZLY8ddO7zvnRKq59lbkrn+2SKufMdEmLeN3n47nf1KZs9ZzsOP/WtaHHYUCgWzZ8/m4MGDuLu7T1oZypG1LPRzBWvKoaXl1atX84tf/IJNmzZRXFyMTCbjikWLuOiii8R7QBiPcXV1nRBjeKJwdGpyHGkSAl9hYaG4oRx6zc7cJ86Qt5w9luCuNJZwiLPHEkrezmbszlQmfHx8iImJEZnUo8mp/tgjnsHQarVOB+IzVZp2NDYQFp76+np6enpQKBRilitkgC4uLpOycAsJCXFq9MGxND2azrLjcaVSKUajkc7OTrFvNdJDERwczJEjR/jd734n9mgNBgPx8fHMmzcPLy8v9Ho9UVFRg0hiLS0tlJSUEB4ePubD1tHRwebNm/nyyy+pra2lpqaGBx54gMsvv5w5c+ZgsVgoLi4WNwznnz+C9NI4sNvtvPHGG2zatAmdTkdKSgpWq5Xs7Gx2797N7NmzMTUdR2dtJdJdQme7nTf/nc32qDR++evfTkl4fyxotVri4uIoLS1Fo9HQ2Qu9fVbUyuELXXqUG4uSgvGavYbw8HDkcjnp6ekTHj8ZD25ubkRHR1NSUkJ6evqYhDhnWMt+fn5ER0ej0+lGXcA9PDz42c9+Nur7eHl5ERYWJs6qnk6r09H6xT4+PkRERJCfn8/ixYsHncNEnNfGI29N5FhC71mQwRz6OmeyVwFDS96jjW1ZLBanjxkWFibOLE9FpvNM4H/nTE8jXFxcnNKb/r7IWo7GBo5/hD6noOcaFhYmZrnfx05vaJZrs9kwm82DslpAdBFyZIUKgVsqldLT04NEIsFqtdLf3y+Sw/r7+6mrq8NoNNLV1UVkZCQpKSnodDpRIcrRzcYR3t7e+Pr6cujQoVF7jnV1dfzud7+joaGBxMREYmNjaWxsJCMjg+eee04kbCmVSpYvX87vf//7SQng79y5k08//ZTzzjuPmJgYbDYbbm5unH322WzevJktn37E79aEc8vqeOTf2v9UNRl4eU8Gz/7Hzt1/+dtpW/wDAwNpaWnBarXSK3dj56E2LkwdLricWdlBu1XHn2699bSTYEJDQ2lra6OsrIyUlBSAEUvL08FadhaRkZF0dnZy6NAhkpOTT+vz5djmEUrUAFFRUXR2dlJcXMycOXMG9ZYnsskei7w1keAp9J5H03yeyLFgIGNPS0vj4MGDaLXaETegzmbEwvklJiZy8OBBiouLvxfuwXThBx+IJ1qans6M2G6309fXN0x9ytHYQBBNF7JcmUxGZWUlNpvttGVOMH6Wq1AoUCgUtLS0EBAQMKYCldVqpaOjA4PBQGBgIDk5OeKsrVwuR6FQoFQqcXFxoba2llWrVjF//nzx9d3d3XzyySfs2rWL1tZWlEolixYtYs2aNYN6i7GxsWRmZlJXVzesv2e327n//vsxGAz8+te/HkTMO++883jppZfo6enhzjvvZM6cOZMmfdjtdjZv3kxERARz5szBbrfT1tYmGjnERkdw/HAgTZ29YhAGCPPVs2FVBBu35lFQUMCCBQsm9f7jQVisMjMzSVu0gpf3fYqLUspZ8Z7IvjVEyDneyVN72khdfMlpD8JCadnb25ujR4+SkZEhbs5OF2tZgNVqpampCRggto0kRJGVlTUtXr1jYbR+sVDCzczMpLKykpiYGGDigXgs8tZEj+U4Ezy0tz9UzMMZeHh4iFMb6enpw/QMJhrchRaQwPSOi4sb9POZGph/8IEYvpO5HA9T6RELC46jp6zRaBTlEB2Drk6nE40NRsJ0k8aGugmN9H6Oc7nCw5aQkMChQ4dEJamh0n7CBsNkMqFQKNDr9Zx11lm8+uqrlJaWcvbZZ4vlI6vVyq5duzAajaxZs0Z87/b2dv7+979TU1NDbGwsSUlJGAwGMjMzOXDggBg4YWBzkJCQQGlpKV5eXoPKcKWlpRw6dIirr756GDvexcWFK6+8khdffBFXV9cpMS9bW1s5ceIEF1xwATDw4Ov1erq6uujt7cXW18u82XHkHswcJqsa4OlCnEc/+XkHT1sghoHPadasWZhMJqRSKY/v+5LXs44T6jYg81jf60LyonX8/q67p/V9LRbLsCxXKC0L6mQtLS0kJibi5+d32ljLFouF1157jXfffI262lMABAWHcfX1N3HjjTeK96RAVszLy8PNze20koAcg7Hj/LtCoRBHdNzc3PD19Z1w8ITRyVuTOZYgQHLw4EH0er2YEEwke3VEcHAwBoOB/Pz8Ye2JiZSmBSiVSubOnUtWVhZarZbg4ODxX3SG8WMgZqBE4kxG7Gxp2tHYwDHLFRYcvV4vBtzRFHjGO4/JZObCaxwDryNGE8MYTWdZrR6Yw8zNzUWlUokkGYEk5urqSlBQkJjJCLO5KpWK1157jcrKSsLDw7Hb7aKwx89//nMSExPF93nppZdobGzk+uuvH7QQLly4kM8//5z//Oc/PPXUUyJ5yMfHBx8fn2El6vz8fNRq9agM6YCAADw9PcnPz2fZsmUT/mwFCEYRjpmbUqkUCWfYbbiolVhtI49+eWoV1Bo6J/3+zsLT05OoqCjc3Ny48qqfsG/fPtra2pjtJb0iZQAAlKRJREFU6spty5aRmJg46exhNNbySKVloe3gqE52/Pjx0ybob7FY+PWvNpCxcwsXx8N56wY2a18eOcpTD93DwZxs/vPfZ8Rg7O7uTkxMjDj7ejpnnh2dmhz7xXq9Xpy/XbRo0aSCJ4xM3prssRwzWcE9ajIZsYC4uDiMRuOIZfjJBHetViteq4uLy2kXjpkqfgzETCwjdgyAgrGBY5Y71NhA0FnW6XROsXmdgTOB2FkClfDAOzqwDIVQPndkpRqNRmQymdhfF0ghY5FkBJx77rkkJSWxa9cujhw5gkQiYe3ataxYsWLQuEZ9fT15eXmcffbZw7IRmUzGeeedxwsvvMC+ffu48MILxZ/FxcUNK1ELJa7xZianygHw9vbG3d2do0ePDgr6Wq2Wrq4urHY4fOwU0X7aYfeC3W7nVFsfvnETE9qfLKKiomhra8Nut49JXhoLjuYejn8mW1qOiIigvb2dw4cPT8l3djS8/fbbfLNzCy9fpmZ51HcVk1Wxai461svPPt7Cu+8u4/rrrxd/FhISQnt7OyUlJYOCxOmA8GwPDcYBAQF0dHRQUFAwqCo1UQwlb0209OuIoZnsVI4llUpJSUkhMzOT8vJyUTxkKsf08vIiPj5eZHqP5Oc9U/BjIMa5QCyI/vf393Po0KFBxgbCguOsscFU4RiIRxoTcoRw4zmKzI+V5TpuKIRFVRgTEnSWfX190ev14saipqaGEydOEB0d7fRDExQUNGixGwlHjx6lr69vVEUfjUZDYGAghw8fHhSIRypRC7OL9fX1g7Sghc+no6ODpqamKfvlyuVyzjvvPN5//31SU1NFtTKJRIKPjw9FRQWUHCrn8rk+PP/lYRQyKWlRXqRGeFF6qp1as46L0hdN6RychVQqZdasWWRnZ+Pl5TWuPeFIpWWj0YhEIhErPc6wlseCY292pF7/VGC323nvrddZG82gICzgrCg150X18u5br3PdddcNIiEKPdbvQyN7NPJWXFwcubm5GAyGKQUUR/IWjK6E5Qzi4uJED2N3d/cptRMUCgVz584VmdSBgYFTCsQwsInq7u4mLy/vtFc0poIfAzEDgVhgTVutVnFMyDHT7evrQ6VSiSXZqRgbTBaOgdZqtdLX1zfo5xMpLQ8dBRGuFRAXVWGEQq/Xj7mxCAoKoqGhgcrKynFl8CZ6vcJ1jYbRFpGhJepFixbh4+PDpk2bRD9jqVSKu7s7np6efPnll7i5ubFixYopn/fll19OQUEBb7/9NqmpqcTFxWGz2SgrK2PPnn3UNHXySb6UsGB/bFYr72SW4Kq04eXlTfKKq6b1MxwPwkiToASmVCqnVFqeDqhUKmbNmkVRURFubm5OSSE6g66uLk6eOMada0dfjFfHKtn65VGMRuOg95XL5SQnJ5Obm4ubm9tp9VQejbwlEJH27NlDS0vLqOYizhw/KSmJrKws8fiThaOHcX9//5SdkISSckFBgcjxmCpXID4+nvz8fLEKMBMNIn7Qgbi9vZ2ioiKOHj1Kf38/8+bNw83NjXvvvVfscwozhTqdDqvVyjfffCN65J4uOGa5IxGoNBqNWP7TaDRjBly73S6Wz4eqDAmLqk6nIywsTGRmT/TaBAGLrKws/P39p43UEh0djUKhGNVEwWw2U1VVhbe3N9u2bcPX15fU1FTxQYuLiyMjI4P6+nokEgk9PT0cOnSI9vZ25s6di4eHBxUVFZSVlWGxWHjkkUcmNa40FC4uLmzcuJH333+fr776iry8PGBgMbfb7axctZqkxHh0KjkSrDQ0tbA/I5f2LiUPXn3N91o+s9lsuLq6olarycnJGXRvnW7W8ljw8vIiNDSU4uJiFi5cOC2L58AxJFhG6c8DWGyAQ0nYEXq9/nvzVHYU+3Dsk6pUKpRKJXV1dQQGBk669ykYtggtHGfsF0eD4GGckZExLb1YQTxEyNin+t1LJBJSUlI4ePAg3d3d40oZnwn8IAPxNddcw4EDB6iuriYkJASlUklwcDC/+tWvmD9/PklJSWMuhs44NTmLifZypVIp3t7e+Pj4cPLkSZKTkwed19As12AwiCpbgsVaSEjItC+qWq2WyMhIDh06RHp6+rQoRAUHB5OcnMz+/fsJCQkZNNpgsVj497//TXl5OZ2dnezduxeJREJAQAA/+9nPWLp0KQqFgsTEREpLS/nwww+RyWT84Q9/IDs7e0Di8dvP22q14uXl5ZR7kLPQarXcfPPNXHPNNdTW1iKRSHjqqafw9vZm/fr1dHR0iJwD/7A4Zs9J59VXX2X//v2sW7du2s7DEYLWuKMWt1Ba1mq19PX14ebmRlpa2qRLy9OJyMhI2tvbKS8vH0Tgmyx0Oh3xSbPYevggl84eecO19bCZpNkLR7UFDQwMpKOjg+Li4nHlGaeKoWIfju8VGhoqOjVNdkMgPP/l5eVO2S+OBaFC0tLSgsFgmLIimWCAUV9fP2nCliPkcjkLFy48rZunqeAHGYjXrVvHrbfeSnJyMp6enmzcuJHi4mJ++tOfjvk64UGY7CzxeGNCQ8vJY7kJhYWFkZubS2lpKTabDYPBMGhMSKfTiYxlrVZ7WhcMAWFhYTQ0NHDy5MlR2ckTxc9+9jM2btzI66+/zuzZswkMDKS7u5v33nuPjo4OLrjgAs466yx0Oh319fXs37+fhx56iD//+c8sXboUHx8f0bf0mmuuIT4+nvj4eLEULwikPP300+zatUscO5ouqNVqoqKiqKmpobKyknXr1olKaALJzVHAYffu3VMOxGOVlgXPZb1eT0RExKDScltbGwUFBURERJzxIAzfyXJmZWXh4eExLYL+1994M/f8IZ9NxSYuTx4cjDcVm9hTJeWhO8ZeB4Re7dGjR4fNqU43RlLestls+Pv709/fP6rKlTMQWM4TsU0cC2q1Gjc3N1FmciobfYlEQkREBPX19ZSWlpKamjrlStFMJWrBDzQQr1+/ftDfXVxcnDZ9EBShxsJ0jwkNnbsUSodKpZKmpibCwsIICAhAr9ePOX98uiGVSkVlG4EpPlX4+flx//33s2XLFvbs2UNRURFGoxGj0chPfvITFi9eLP5uQEAAV155JR999BEvvvgi6enpyOVyMetzFB8QGO2Ory0pKZn2QCygo6NjkAiLRqMRCYBC9uDl5UVVVdWEjjsR1rIwnz4aPD09CQsLo6SkZMaYravVatFhy9XVddRM1Vlceuml5Oflcde7r/FJaTtr4gaCxbbyfjJrpFxx3S1cfPHFYx5DJpOJWsnu7u6DfLynG8KzLARjoXcsk8lITEx0Sq95NAjHmYht4njH8/LympCH8Viw2+0oFAq6uroGCZpMFj8G4hkOZ7WmYfgs8URKy2NlucAglS3H0qGgpavT6QaxUqVSqeiU5OMzXKbwTEBY9A8fPsy8efOm5eb38vLixhtv5JprrqGrq4t33nmHXbt2kZ6ePux3JRIJy5Yt4/XXXx/k7qJSqcSMcKTPXsg8Thc8PDxE72QhGOt0Ojo6OjCbzahUKlpaWsZUSxNKy0M3ZsCEtJbHQmRkJK2traP25c8EfHx8CAoKEjcIU1ncJRIJG//+dxYtXszbb7zOX/fmAhLS5qTz2P/dxNq1a526Z11cXEhKSqK0tFTUAzhdEM7HarUOCsiOIh3CczcRCMcZS3lrIhBadoKH8VhWjhM53pw5c8jKyhL1F/5/xI+BmMGs6dHgyGDs6+sbkbEs/Hy8LNdut4/oGGM2m1Gr1eKi6uPjM66WbkJCArm5uQQEBJzWxWAiiI6OJiMjg5qaGqcMJJyFQqHAy8uLtrY2fH19R12Q/f39kUgkonyh4HlcW1srloUdIYw1XXXVVdN2rkMRFBQkZjCxsbHiQqrVasXv//jx4/zmN79xurQ8FYLdaHAsB3t7e59WdvBEEBMTQ25urlMG9eNBIpFwwQUXcMEFF4ib6slsWnx9fQkODqa4uPi0VxAcmdSCZjsMVFYEi0Kh+uEsHMU8HKUrx7JNHAtCqdvR0EEQ9ZkMhNElweawqKgIjUYzI8lWU8WPgZjhc8RjZblKpZKuri5cXV2dKi2PlcUIpUMvLy/Cw8PR6XQTFvsXbvQjR46cdrEBZyGUzYqKivDx8Zl2goQQvEaDYAPpaGsXExPD/v378fHxEY0dYOBh/+KLL9DpdKxevXpaz3Morr32Wu699142bdrEypUr8fT0RKlUcuTIEXbu3ImXlxdubm7s3bt3UqXl6YKLiwvx8fGUlZWRnp7+vbzneHC0cfT09Jw25a2pBs/o6Gg6OzvFTHK6n7+hm7Kuri5gcEXN29ubqKgoCgoKWLx4sdO92aEzukJQH8s2cSw4kqqc9TB29vwEjQZBnGMy9+RMWBtHw4+BmIFMq7W1ddQsVygpS6VSoqOjKS0tFdnWAux2Oz09PcOy3KFZTGho6KRtCkdDdHQ033zzDQ0NDdNCaJkOeHl54evry+HDh6eFaOGIxYsXs2PHDurr60e8XmEhEVjQEomEP//5z9x55528+eabJCUlERUVRUdHBwcPHsRgMHDfffedVu9ZgKSkJH7zm9/w7LPP8vTTT6PX6+nt7aWnp4egoCBuuukmMcs906zlgIAAWlpaOHTo0LR/f5OFi4sLCQkJlJWViZWiMw3HDUJtbe2UdI2FUUPHTXtXV9ewTVlISIio7S58LxEREXR2dk6oNzuSvOV4toljYajEpTMexuMdz/EZiIyMFG0OZwqHYbogsZ/Oxtj/CN555x2uv/56srOziY6OHjfLLSgoEJWSHAOv1WoV54+FwPt9zV42NDRQXl4+zLv0TKKvr4+MjAwSEhKmldBisVj47W9/S11dHZdffrkYjG02GyUlJWzfvp0rr7ySW265ZdDramtreffdd9m8eTN9fX24uLiwePFi1q9fz6xZs6bt/ByzGMdqiMlkEnWnjx49Snt7OzqdjvT0dLy8vMQMdCYEGBgQfcnKyiIsLGxKc6bTDUHZ7nSPD00Era2tFBYWMn/+fFH3fCw4qtgJAbe7uxu73T5s/dDr9YOCztA+sRCMLRYLmZmZ+Pr6OsXmrq+vp6qqahjXwm63U1JSgslkmlBPPiMjg6ioqEHPunAso9E44eB56tQpmpubmTt3rvhvVquVnJwctFrthL2xpVLpjFkbh+LHQMzAzfKLX/yC6upqPvroo0FMxdF0lgFx9k54cAQC1Zm6hoKCAjQazZQIEtON+vp6KioqnNog2Gw2jhw5QmtrKxqNhsTExFEFNpqbm7n//vs5evSo2EtvbGykp6eHc889l9/85jejznobDAa+/vprZs2aNWWbv/FYy8K94erqOm5p+dChQ5hMJubOnTulDLSvr48DBw5QWVkpZmzz5s2bVAbR3t4uZiCnu2LgLITFWMi4ZgqOHz9OXV0dCxcuHOYg5BhwHbXahwZcZ0cNhZaZEIwFdHd3k5WVxaxZs8YlNtXU1FBfXz/IclSA1WolOzsbNzc3p8lb+/fvJyEhYRivYLLB88SJE3R2dpKamjro33t7e8nMzCQ8PJyIiAinjgUDbYjTKT08FfwYiL9FbW0tCQkJ3HzzzbS3t9Pb28tNN90kZk5Dd6nV1dW0t7czf/78GVG2AzCZTGRmZooKYTMBdrudwsJClErlmA90YWEhH3zwAfX19VitVlG/eNWqVVx88cWj9t9zcnL45ptvMBqN+Pn5sWrVKrGqMRaampooKytj8eLFTvebnGEtC38mU1q2WCxkZWUREhIyaf/b/Px8nnzkQQyNxwl3s9FvtVNrUuIbnsj/3XP/hBYuAZWVlTQ1NU2bwtV0oLu7m5ycHJKTk2cMocxut5OXl4fVah1ULRsqDSpszKZqAuNIIHV8PhobG0WnprFGCEfKOB3R09NDZmYmMTExTpG39uzZI2ozDIUQPCMiIgaNEY6FyspKenp6mD179rCfdXZ2kpOTQ0pKitN8gR8D8QzFSy+9xDfffENRURFlZWXAwNzq/PnzmT9/PjfccAM6nW7EL89qtYqlGEfHoDONY8eO0dzcPOUxj+mE8ECnpqaO+JAWFhby3//+Fx8fHxYuXIivry8mk4mioiKKi4tZsWIFN9xww7SfV0lJCRaLZVgPdLzS8tCgO52s5Y6ODvLy8li4cOGE57ArKir4y523McetmVvPCiDQY6AnV9lo4j+7GmhQRvPYv5+bMNHJZrNx8OBBXF1dZ8xIE0BdXR0VFRWnXW5yJDhyQhyVyvr6+kSlMn9/f1xdXU9re0royzqWqGHgXmhsbGTRokWjBp+TJ0/S3t5OWlraqMdvbW0lPz+fefPmjUve2rVrF3Pnzh01Cejo6CA3N5e0tDSnNk/l5eVYrdZRVdXq6+spKytj4cKFTlVrZnIgnpln9T3h+PHjBAcHc8EFF5CamkpISAhpaWnMmzeP22+/fczXymQyYmNjOXLkCD4+PjOm9xAREUFDQwM1NTUzpq+n0WiIjo7m0KFDw0TXbTYb77//Pj4+Plx00UXiYqLValm8eLHIIj7rrLMmnSXCwI48JyeHwsJC+vr6CAwMJD09nVOnTnHy5EnUavWYpeXAwEBRMOV0wt3dndDQUEpLSye8mfrw/fcIljVyz0URyGXfvS7az4W/XxLKz9+qZOvWreMqyA2F4NKUlZWFl5fXjJlZDwgIoK2tjdLS0imX88eCzWbDaDQOCriCdKxwf3h7exMZGYlOp8NoNHLw4EHc3d2nJBvpDEYyh4CBca/Ozk5KSkpGJds54x88EfLWeE5J7u7u4jSFYEs4FsY7XkBAgGjDOFUlrzONH3RGPBK2b9/OVVddRUFBwbg9FrvdLjJ0T7fU3UTQ2toqlqZmiraq3W4nNzcXDw+PQX29Q4cO8eijj3LJJZeMSOiy2Wy88cYbnHXWWVx99dWTeu/q6mqeeOIJ6urqxE1TbW0tPT09pKSksGzZMlxdXXFzc5tSaXm6YLPZyM7OxsfHx+ketsFg4MarLuJXc/s5P2XkQPnSnhp2toXx+rubJnVeQgY62fGR0wGLxUJ2djb+/v7TYk84ktVjd3e3KKojlJXH6+dWV1dz/Pjx72X8azTylkCWDA0NHVFy9ujRo5jN5nGJis6Qt+x2O9u3b2f58uXjBtjDhw/T2toqKt+NhpKSEnETP9a5FRQUYLFYxiXvyeXyGdNaGYofdEY8Es4991zOPvts7r//fp555pkxd9kSiYS4uDiys7MJCgqaNru2qULwlq2oqBhkCnEmIZFIREELPz8/kVna0tKC1WodtVwqmFy0tLQ4/V6OJLvGxkYefvhh7HY75513Hm5ubigUCqRSKYcPHyY3N5fg4GAWLlxIfHz8ac2qmpubRZLNkSNHsFqthIaGEhMTM+h9hQxUICQ5M4NpMBiwW8yEeI6+CAZ7quk63jZo7GUiCAgIoLW1lbKyMtLS0mYEN0JQcsrJycHDw2NC7j8jiaY4th9cXV3x9vYeV1RnJAQHB9Pe3k5paelpn+8fzalJqVSSlpZGTk6OeC2OGGl8abTjj6e8JeRzzgS6uLg48vLyxszWYfyMWDg3QW50KqpgZxo/BuIhkEgkPP744yQnJ3PLLbeM68ij0+kIDg6mvLx8xghqAMTGxpKRkUFLS8uMIbMIdouHDh0Sd9bCqI7JZBpVGcxkMo060uOoUubYzxVIdqWlpRiNRm688UZRZlLA4sWL6e7uprS0lJiYmNMyh22xWNi8eTOff/45tbW1tLQ009fTjb9eTri/O1aFjsCYFG659bZBWYterycqKorS0lKnPFRdXV2RyFVUtfYyO2Tkftmp1l7cPQMnfY9KJBLi4+PJysqiurp6xrQ+9Ho9sbGxoj3h0BLl0Bl/oa/b19eHRqMRM9zpbD8IG8+cnByOHTs2ZXa+M+/nmBkL37Gbm9uo5WBnAzGMr7w1EYUyqVRKSkoKmZmZY342zgRiYJCSl6DV8L+GmcHmmWGIjo7m9ttv56677nLKaUkYNBckFWcCVCoV0dHRYuY1UxAZGYnNZuPUqVPAgMiFXq+nuLh4xN9vbm6mra2NOXPmYLFY6OjooLq6mkOHDpGdnc2uXbvIzs6murqa/v5+fHx8SElJYcWKFSxZsoTW1laio6Px9vYe8aFOSUmhtbUVtVrNkSNHMJvN03atVquVf/7znzz33HPodDrmz5vLuWcvYfHc2UjlcoK0fdx1tgva+m94+IG/iJ+JgLCwMFQqFRUVFeO+l06nY/6y1Xxe3EmfZfg922nqZ8dRM2efOzVTC4VCwezZszl69CgGg2FKx5pOBAcH4+bmRklJCV1dXdTV1VFeXs7BgwfZs2cPGRkZnDhxArPZjJeXF8nJyZxzzjksXbqU5ORkIiIi8Pb2ntYyspCtnzp1akIVnclCCMYWi2WQGmBQUBABAQEUFhYOWgucDXQCBOWtI0eO0N7ePuhnwnGdDeyCh/GJEydobGwc8Xcmcn4ajYa0tDTKy8tpbW0d8XdmSpI0En4MxKPgL3/5CzU1Nbz77rvj/q5CoSA6OpqKiooZFfSCg4NRKBScPHnyTJ+KCKlUSkJCAseOHcNkMuHi4sKKFSsoKiri0KFDg5yrampq+OSTT0RJy927d1NUVERzczMKhYKwsDDS09NZsWIFCxYsIDExkZCQENzd3cUH2Gg0jsmo1Ol02O12NBoNXl5eHD58eNrMH7766iv27t3L5ZdfzrJly/D11HNWagRXX7ic9ZdeQHZVH8cajPzp4mh8+06w6aMPB71eIpEwa9Ys6uvrnVrIr/rJehqkQdz/2UlONg9op9vtdg7VdvPnj0+h8E3gwgsvnPJ1ubu7Ex4eTklJidP3e319PZ988gnvvfceWVlZk7YSdYTVahU3ZocPH8ZkMtHW1kZOTg61tbXY7XYCAgKYO3cuK1asID09naSkJEJDQ/Hw8PheGLQ6nY6EhARKS0vH1bOfKoaWqB3v4/j4eKRSKWVlZeK/TyQjFuBI3urt7RX/fSTP5PGg1+tJTk6muLh4xE3dRDcKHh4eJCQkUFhY6LSJz0zBj6XpUaDX6/nnP//J3Xffzbp168alxwcGBlJbW8vJkyenhTQyHZBIJKIphL+//4wxhfDw8CAwMJDDhw+TlpbGqlWrqKmp4auvvmL37t14eHjQ3d1NW1sbQUFB3HLLLaI06EQzFn9/f44cOTLqz+vq6pDJZPj6+uLj40NmZua0lKjtdjtbtmwhKiqKmJgYjlaU46YCtXJgYYkI8ScmNpbP8yu4bGEQa5M9eSl7D+3tNw9i2mo0GuLi4sSZ57HY+VFRUdyz8VGeeORBbvuokgCtBYsNmntVBEYv4P6//G1Md6eJICIigtbWVioqKsYUkDEajTz6yMPs+Woz8j4DKrkEo1WOb2gsv7/7LyxcuNCp93MU1hHYy0PHyYQZaaHVMBl949OBgIAAOjo6KC4uZv78+ad1rFAIho4ZqvBvqampZGRkiG2FyQRiYETbRGcY2CPBz8+PiIiIEZnPEw3EMJB8CEzq9PT0GTPNMh5+DMRj4Nprr+W5557jkUceYePGjWP+rkDcOnjwIIGBgTNGpnAmmUI4CmJYrVba29vZtWsXEomE9PR0EhMTKS8vx2g04u7uzrx580hJSZlS5rJ06VIyMzM5ceLEMDELq9VKbm4uiYmJokNMfHw8hw8fxtPTc0plyr6+Pk6cOCEaSfSZe3FXD15U4qNC2VpeRlePhUg/Hfb+Vtra2oaNvAQGBtLU1MThw4fHJd8lJyfz4mtvk5WVRWVlpUhmmW696KEuTSONNFksFu6+6/eczNvB/y3Vcl5SIGqFlMP1Pbx4oIw/3fkrHvvPC8yZM0d8jd1up7e3d9h8rtlsRqPRiEF3LP/t3t5eSkpKZtRCHBsbO23uUeNBMKMRslTh81Gr1aSmppKXl4der59UoBOOL5C3Dh06xKxZswaRxCaKqKgoDAbDINtScG68aiTExcVhNBpF3W3h+mdyafrHQDwGpFIpTz75JGeddRY33njjuIQLNzc3/P39KS8vHybLdiYRFRVFRkbG92oKMR4jVa/XExISQm1tLYsXLxbHrKbbASklJYUFCxbwxRdfMG/ePGbPno1Go+HUqVNkZGTQ09PDlVdeKf6+n58fjY2NHD58mJSUlCkRm2AgGAHIZHL6rb2Dfsf6bXlWKoFWgxmkihFHPwTiT2Zm5qhGF46Qy+UsXbqUpUuXTurcnYVGoxFdmkYaadq7dy+l2bt57gpPUoK/u66EAA2PXh7Er9+r4emn/sXf//HIoPtE0GzX6/V4enqKRhjOBtWwsDDa29s5dOgQycnJM2IBlslkpKSkkJWVhbu7+2n31RXmi4cGY09PT7G0PBXjGUfyVnV1NVqtdtKBWCKRiJs6Rw9jwY94ohDIYMLxZpIIzWj4MRCPg3nz5nH11Vfz5z//mffff3/chzomJoZvvvlmRrGVFQoFcXFxlJeX4+3tPa1ZwkiOMY6sZSHojsZINZvNp3XMSiqVctttt/H++++zZ88esrOzsdvtyOVyIiIi+PWvfz1Mrzg+Pp6MjAwaGxsnvWAqlUpmzZrFoUOHBtSGPDxpr+8iwN2OVDpwD5WVHyfGV4VWJWNnSRM90iCeeeYZ1Go1V155JbGxseLxVCoViYmJlJWV4eHhMWPmwwWXppFGmr7Yspk5fpaBIGy3Y7XZsFqtWK1WbFYrlyYqueurTLKzs4mPj8ff35/Y2NgpLerwXcaWlZU17Z7YU4FGo2HWrFmUlJSg0+lO+7ijkBXDYLGPsLAwOjs7aW5untImxdE2MTo6ekol96HM5+Dg4Eln7DCw5jkeb7KeyN8XfhT0cAKNjY3ExcXxxhtvsGrVqnF//9SpU1RXV7No0aIZIzMpiI8IVnKTgdVqHSZ0IJAsxnOMGQ1ms5mMjAxmzZp12hWbDAYDhw4doq+vD39//zE1qYWseCriFfv372fjxo2sWrWK5ORkjpYfRoOJUC8NZRUn2PbV1/xhTRD7DrXw0t5aZHIFCoVCZL4mJiby7rvvDtrQlZaWYjabz3ibwRGCS1NoaChhYWH09/fT1dXFrTdfxzm6cn6xxAObbUA/XCqTIZPJkElltPfYufjVVh7896ssWbJk2s9rJhpWwICQRnNz8/ei3T2WU9OuXbvw8vIaVWvaWZw8eZLKykrUavWUqzCOkprZ2dlOCYSMhZaWFgoKCpg3bx5+fn4z5pkZih8zYifg5+fHPffcwx//+EeysrLGlVILDg6mtraWU6dOOS1wfrrhOAMaGBg4rinEaKVlhUIhzl2GhIRM2VtZpVIRGxvL4cOHTzuTVa/XO00OEkrUR44cmXR5c+nSpVxxxRV89NFHlJSUEBkZSXNjAx8dq6Sp9iQx3lKe+bqOjPJmoqKiWLVqFfHx8fT09JCXl8fXX3/NqlWryMjI+H/tnXd8U+X+xz9J90oHnXRBV7r3ZshQUQRFZKm4BRRRwYFXxAlurgw3qDiu46qocAVFlE33SPeiezfdK2nW+f3R33NMmpSmbZIe6nm/Xn15L03Sp+nJ+T7Pd3w+9M2Iz+cz5qSnXM91cHBAeXk5ampqIJFIYG5uDmMTC7T2KWBubjYcBEZcI21CEcA11luQtLe3x+zZs5Gfn4+EhATG6Az7+vqip6eHrq9OhdiHsbExzM3N0dnZiaampknp5Xt7e6O1tRW9vb0TbgAjkK7s3Nxcep2TwdHREQEBAcjNzcWCBQsY07szEvZErCUSiQTh4eG477778Oijj475+M7OTggEApX6JxMgphAJCQn03KE2qWVlb2Vd3ziIa421tTWj6jlEIpCkTScCRVFIS0vD//73P3rcx97eHk5OTvD09MAnn3wKGxsbbN68We3kXV1djY8++ggbNmzA888/T/87ubYSEhIM1gmvLJyiSZObx+NhaGgIg4ODiIuLg7m5Ob7//nscfGsHfrzPGS489XLIy/9rQo4sEN/9eFRvmSOSCTIzM0NISAhjTkRDQ0NIS0uDr68vPDw89P7zNDk1nT9/Hu7u7rQU52Q2RLW1tSgvL4ebm9ukvb0pikJ+fj6am5uxcOHCSc92UxSF5uZmeHt7MyZDORI2EI+DEydO4I477oBAINDKwSY/P5+WK2QCcrkcPT09yM/Pp09Yo6WWR3Od0hcDAwNIS0tDTEwMY8ZOAN2kqAnko0aCQUFBAZYsWYK1a9ciPj5e43M+/fRTtLe3QyAQqPx7WVkZuru79TIOI5fL1eweyXVCrg2SFVHW5FYoFPSGKigoCH19fbh3/TrY9ZfglZuc4eM0/P6JpQr8J60Dn+YC23a+jltuuUWn6x/J0NAQUlNTERAQwCintM7OTuTm5iIuLo6WfNUXRFqVw+HQKeqzZ8/SgjZNTU1ISkqacP9IbW0t2tra0NfXp7Vt4pUYGBjAhQsXMHPmzHF5GI8Gh8NhtCkEM3I1Vwk33ngj5s2bh5deegnvv//+mBcHkZns7u42eHC5UmrZ3Nwcvb29CAgIQHBw8KRSy7rCysoKPj4+KC4uRmJi4pSvh6CLFDVh5HOLiopgZGR0RVcpDw8PVFZWqv27n58f0tPTUVNTo1HQX1ukUqma9OPg4CCMjIzGXYIY6dLk7OyMdw58gGeefBx3fl2MEEc5eGZAYRvQx+Hhvi1bcfPNN0947dpiZmaGsLAw5OXlgcfjMUYT3sHBAT4+PsjLy9P7qBW59pRPxqQZiqTK8/PzJ9x7IJfLYWJiQjdvWVtbT8p5iqIocLlcdHR0oLa2ljElPn3BBuJxQHSoIyMj8eCDD6rMP2rC3Nwcs2fPRmlpKZ0K1jXj7VomqeWCggJ0d3czSpeV1Jqqq6sZI4oC6KaLWhMODg6gKAo9PT0anaeA4YyFpsyEkZERQkNDkZmZCUdHxzFPVMoey8pBVywWw9zcnL5OnJ2dYWNjM2HTejLSVFxcDB6PB29vb3z17Q84f/48UlJSMDQ0hJu9vbFs2TKDjdIBw7VHT09PFBQUID4+njEuPLNmzUJ3dzcKCwt1Pus9ElIvVlavI4If4eHhSE1NxeXLl9WmCLSBBPXx2CZq83pRUVHIzMyEtbU1Y6ZQ9AEbiMdJQEAAtmzZgqeffhqnTp0a8+Tm7e2NxsZGnTTXXCllSFLLTk5OtC/qlVLL5LTe0dGhM7WlycLlchEcHIzMzEy4uLgw5uRiamqKwMBAlJaWwsHBQWcprkWLFsHc3ByZmZkqo0qEwcFBFBQUIDk5WePzeTweZs+ejcLCQpUO3JGbMyKMQeq5REzF2toaMTEx43Is0gZll6bo6GiYmJhg8eLFWLx4sU5/znjx9fVFV1cXysrKRjWbNzRExjQtLQ01NTVqojP6+HnKzVvk/mViYoKoqCikpaXB1tZWq9KbMsqNYJqUt8YLCcTj9TAeDab0BowGWyOeAL29vQgMDMTu3buxbt26MR8vFAppiUJtb+IjTy/9/f0YGBiAiYkJbdFGgu9EU8v19fWora3Vyt3HkJSXl9P1TyZ9gPLy8gAMi4ToihdeeAGfffYZrrvuOixcuJBOT3Z3d+O7775DVVUVTp48ieDgYLrxKC0tDTKZDIGBgZg/fz4EAgFMTU1haWlJXysURanV/YeGhvDZp58g4/wfoIZ6AXBhynPEohtW4N777tNpU6FMJkNaWho8PDwYlVYUiURIS0tDUFCQ3kU1xkNPTw+ysrIQFRWl840R8HdGhGzKent70d7ertYM1dzcTAu0jKcRsLi4GFwul262lMvlSE9PB4/Hm1CPjFAoRGlpKebNmwdAew/j0eByuYxRWdMEG4gnyOeff47nnnsOubm5Wp3ccnNzYW5urjbDq21qmdxUNUn6TRSKomjPWyalguVyOVJTU+Ht7T3lIzrKkC7qoKCgUVPJ40WhUODBBx/E77//To/biMViVFZWQi6XY+/evbjtttvQ0NCAJ554AkVFRTA3N4eJiQk6OzvB4/Fwzz33IDAwkNbL5vF4apuzjo4OPPPkY+C05GFtjC0S/XiQyCicLenCD3kizIpdgld2v6bTm1V3dzeys7MN0ow0Htra2lBUVISEhIRJzajqmvr6erqDeTKNgcq2j8qBV7nDndxXiDWo8j2ltLQU7e3t4wp6hYWFMDMzU0lri0QipKamTqh5q6WlBVVVVXQ2iDQCknT1eO+BbCCepigUCiQnJ2P+/Pl46aWXxnz8wMAAUlNTERwcDIVCMWpq2dBdy729vcjMzERiYiJjTCGA4cCRl5fHuPGvlpYWlJaWjiu7oQ0XL17Evn37UF1dDWNjYyQnJ+PBBx+EhYUFGhsb8cwzz0Aul+Pmm2+Gv78/jIyMIBQKceLECTQ2NuK1116DpaXlqE0/77/3HjKPfYR313nC0UZ13SVNA3jqlw5s2P4abrzxRp39TgBQVVWF5uZmJCYmMirrUlZWhq6urgmnTvUBRVEqgi3arEuhUNAbeWVtbrlcrtLdTjJoyn8D5dliZQ9jhUKBzMxMmJmZaS3zmpeXBxsbG7XGQWWBjvE0b5FynvLcv0QiQWpqKmbOnDnuOraRkRFj5sg1wQbiSZCWloZFixYhIyND7QIcLbXM4XBgb2+vEnSnumu5tLQUAwMDjFJrAoa7iiUSid6bWMYDmXEkDS66eL2xMiInTpzAN998g6eeekrtZiaTyfDuu++Cz+djw4YNtLSmMiKRCPesW4Hb/bqxLlFzOnbX0Ro02SVj33sfTvp3Gvn7ZWVlwcrKijF1WWA42GRkZMDe3h58Pn+ql0Mjk8mQkZEBJycntWCjUCjQ39+vEnCVx8qUg66VlZVW95TRgjFRvJs9e7ZWpYWcnBw4ODhofGxNTQ2qq6vH1bxVV1eHtrY2xMbGqvx7X18f0tLSEBYWNq7SAtMDMXNXdhWQkJCAVatWYfv27bjxxhshEAgwf/58ODs7q6WW3dzcYGlpSbszGbJjdCymwhRCG/z9/ZGamqrzbuXJQBTKyLrGk6ImN9KRQVehUFyx2W7Hjh0IDw/XeKIgBg+//PILduzYgcuXL6utq729HZKBHoR5jl5CifSyQnr+5XG8E9qh3Izk6Og47iYgfcHlchEeHo709HTY29szZl3Gxsb0ukxMTGBkZEQH3v7+fhgZGdHBloyVWVlZTcqcRJNTk5mZGd2xzOPxxqxbX8l9aSLNW6PpTCt7GBNjkOkAG4jHSXV1Nf744w8IBAIIBALk5eVhaGgIly9fRkhICOzt7RERETFqajkgIADl5eVwcnJizA5Nn6YQk8HU1JRe14wZMxizLjMzM9ou0d7eXmOKWiaTqQVcciMlKUN3d3f6Rnqlm5NQKLziqc3V1ZUO8kFBQSgpKYGdnR1dZzQ1NQU4RugTyUd9jV6RDObm+pH/s7CwQFBQED3SxJRSA9FdLy4uho2NzZTJHyrPcpOgq1AoUFFRAXt7e9jZ2dEjahMdK7sSozk12dnZgc/na6UQeCWDBk22iWNxpde7kofxaDAlozYazIgEVxE5OTk4cuQIIiMj8eijjyIyMhJHjx7FV199hc8//3zMYOHq6oqGhgZUVVVpHFmZKlxcXNDY2IjKykpGyUy6uLigubkZ5eXlCAkJmerl0CgLffD5fLX5XJFIBFNTUzpd6OjoSN/sx3tTcHBwgFAoHPX7bW1t4HK5sLe3h6urK9ra2lBcXEyn9J2dneEVEIo/CtOQ6KeuMS5XUPizTIS46xeN+33QFldXV9qliUklEFdXV3R2dqKwsBAxMTF6LxFJJBKVBqq+vj6IRCJ6lpvH48HV1RU2Njaorq5GT08PZs+erff6+mhOTZ6enujp6UFubi4SEhJGfX/G8g4eaZs4VvPWWM5Lo3kYX62wNWIdMDQ0hLCwMGzYsAGPPPLImI/v6+tDRkYG4xqkiMxkbGzsmKYQhoR0X0ZEREzpzLNyN2pfXx+6u7vR1dUFYPjUp9yNqsnycaJ88MEHOHjwIJ555hm1Dn2FQoH3338fXl5e+OSTTwAMn7BSU1Ph4+ND6xj/9ddfePe1f+HBGCPcGuNMWzFKZAq8d6oefzXb4e33PtVr9zxTR5rIqI2zs/OYnuPaomlcqK+vD0NDQ3TJSvl60XSqI01TPB5vwo5p412zJqcm8v7Y2tqOuhm+ePEi+Hz+mA5q2jZvjRyH0gS5nmbMmDHm+2NsbMyoZsGRsIFYR/z666+46667IBAItLLzKy0txeDg4IRa8fXJSFMIplBXV4e6ujqDzTwrFAoVk4Pe3l709/fT3ajkBiqRSFBfX485c+boTcu2vb0da9asAUVRWL16NX2a6Orqwq+//oqKigocPHgQcXFx9HNI13liYiIsLS1BURS++uor/PTVx3A16UW8lwkkMgqXamUYNHHGY8+8gGuuuUYv61eGzMsybaSpv78fGRkZE9rsjSagIpPJ6DqmctAdT0lKJBIhPT0dfD7fIP0bRJOaBGPldaSkpIDP52s0qTh37hxCQ0O1eu+0ad4qKCiAubn5mN3Rg4ODSE1NHXVdBDYQ/0OgKAo33XQT3NzccODAgTGDmFQqxaVLlxAcHMyYRhHg7xleLy8vRslfUhSFzMxM2NnZ6TylL5PJ6CYq5cYYLper5j5lbW2tkgbTdRf1aFRWVuLxxx9HTU0NHBwcYGxsjLa2NvB4PLzyyisafbJLSkrQ39+P2NhY+nosKyvD77//jssl+TAyNkZ4dAJuvPFGgzbpMXWkqbGxEZcvX77iHC8ZFxp50lUoFPQ1oskQYzIIhUJamtMQanOanJqAv7194+Pj1TJmZ86cQVRUlFaa+hRFoaCgAIODg6M2bwkEAvB4PK101LU5ZZuYmDA6fc0GYh1SWlqK6Oho/PXXX1qpLzU0NKCmpoZxylYdHR3Iz8+flFasPujv70d6evqkTlP9/f2or6+HSCSCkZER+vv7MTg4CFNTU7Wga2lpqVVWgLj76FLoQxNyuRwXLlxARkYGpFIpAgMDccMNN4xa3pDL5bT/tL6lE8cDU0eayByvRCJBdHS0Wpc7yYpwOBy1oKvtuNBEId3whvBV1uTURKiqqkJdXZ3aHP2ff/6JhIQErbuYx1Leys7OhqOj4xUNUZSpra1FVVXVqPcsNhD/w3jyySeRnp6O33//fcw/PEVRdG1qMg46+iA/Px8A9HrKmwgkdT7WGAQxrScnltbWVpw+fRqFhYWQy+UwNjaGu7s7Fi9ejIULF066G5U0lGnbxWkoSCo4Pj6eUaMeRGoyODhYr5sXbSFZke7ublRVVcHIyAhSqVTFhYoEXm03aLqEyJuamJjoxBZQm59HUtTK88UURUEgEEAmk6k0t508eRJz584dV8/LlZS3MjMz4ebmprVXM9lE9ff3azT1YAPxP4yenh7w+Xy8+eabWL169ZiPJzKAycnJUzY+oQky0B8eHs4YUwhgOG1GTnmk4YfUcwsLC1FfXw+ZTEanb8lJ5YcffkBXVxdCQ0Mxa9YsSCQSFBcXo7GxETfeeCNWrlw5qZsbRVHIy8uj51OZxOXLl+m6P5NuRi0tLbTXsyEzL2RcSFkYY2BggO5yNzMzQ1NTE0JDQ+Hi4sKYXomhoSGkpaXBx8fHINKvozVvyWQypKamwtnZGXw+HxRF4eTJk1iwYMG4/46dnZ3Izs5WSyunpqZi1qxZ4yqZKBQKpKenw8rKSm2zQlznmAo7vqRjbG1t8eqrr2Lnzp1YunTpmDtEOzs7uLi4oLy8XKdmApPFzMwMvr6+9I2SKalziqLg5eWF0tJSekyopKQEFy5cQG9vL7hcLoyNjWFnZ4drr70WCxcuxHfffYf+/n6sW7dOpbY1e/Zs5Ofn4+TJkwgLC5tU7ZnD4SAoKIi2S2TCKY/g4+OD9vZ2VFZWTsjiTl+4urqio6ODHh3Sx42SKNwp13SJ9SM56bq6utIBmGBjY4Py8nKdum1NFjMzM4SHhyMnJwc8Hk/vkw3KTk0kVQ0MNz4pOzURe8KJ3CMcHBw02iaONb6kCS6XS49IGcLJSpewJ2I9IJfLkZiYiGuvvRbPP//8mI9n6umTpM6dnJymxBRCIpGoiWIQByrldNnXX38NOzs7JCQkwM3NjbYPzM/PR2JiIvLz8xEcHIyYmBi1n0FRFL755huEh4fjgQcemPSamZqiJvX16OjoSRm26xoyguLu7j6pGycpRYwMukThbmR6eax5f9KEp1AoGCWxCgx3HdfX1yMhIcEg1xgR+hhZL25tbUV+fj5iYmKQkZGB66+/fkIZF03NW+fPn0dISMiE7ofd3d3IzMxEVFQUvUlg+omYDcR6IiUlBddddx0yMzO1mpmsra1FY2MjEhMTGZU+JDVGfc48K99Elb9GnlyU53PlcjkuXbqE48ePg8vlYsWKFWrvW2lpKU6ePAmZTIb77rtvVJm+S5cuobOzE7t27dLJ75KXlwcjIyOEhYVN+vV0SW1tLerr6ydsJacvyDWm7fy68riQcnqZjAuNNDqY6O8qlUqRlpYGLy8vrZuGDAG5xiiKMsgmYbQUNTBsWdrc3AyRSIQlS5ZMeC0jm7fG04WticbGRjqbZ2VlpbOZfn3BnE/jNCMpKQkrV67Ejh078PXXX495gXp6eqKxsRH19fWM+tDb2trC3d2d7gif7IeeoiiV+dyRN1Fizebl5QUbG5tRTy7GxsawtLTE5cuXcfvtt2vcvPD5fFy4cAFVVVWQy0eXd5TJZDrb/CinqNva2hg1mubl5QWhUIjy8nJGdSvb2trCx8cHhYWFal3ByvPcykFX2W/ZxcUFfn5+OhsXIpDGqOzsbNjZ2TFG5EZZMtIQKViSotakvOXv74/Ozk6IRKJJ/YyRylsTSU0r4+7ujr6+PloGkw3E/1A4HA7eeOMNBAUF4dy5c1iwYMEVH8/lcsHn85Gfnw9XV1dGXTgTNYWQy+Uq87nkfwOgT7eTuYlSFAVjY+NRm9w4HA48PDxQU1ODsrIyjUIrCoUC1dXVmD9//rh+9pVQ1qK2s7PTWfpQKpUiIyMDQqEQ1tbWSExMHNdcKbmBp6WlwcnJSSvhGUMxa9YstLe3o6CgAE5OTirz3GRcaDz63LrCzs4Ovr6+KCgoQEJCAmP0zk1MTBAeHo7MzEzY2tqOacowWZTrxcoGDxwOB35+fsjKykJ1dfWkpj8sLCwQGRmJ7OzsSQdiYFjXv6+vjy5RMTk1zQZiPeLu7o5nn30W27dvx6VLl8b8EM+YMQMODg60gQRT0MYUQlm4ngTewcFBGBsb00FXF24xypiZmcHKygpdXV2jivaLxWL4+/ujuLgY3t7eKt2mFEXh7NmzAKDTQAwMNyK1tLSgrKxMJynq33//HYcOHUJbWxutC2xjY4M1a9bgnnvu0TooWVhYgM/no7i4eErr2MqmGMpBFxj+m5EZ0vHMc+sLb29vdHZ2ori4GOHh4Yy5ofN4PPD5fHqToO/Ocw6HQwdiEpiB4dOsqakpKisrwePx6LrsRCDNW6WlpZDJZJNaL5fLRUREBIqKiiCVShnVszEStkasZ4aGhhAaGoqHH34YDz300JiPJ1JyMTExE66P6AMyx2hpaYnZs2erpQpJPXekKIY+3GIIYrEYzz77LFxcXBAYGAhXV1eVgNTT04PvvvsO9913H/Ly8pCbmwtXV1d4eXlhaGgIFRUVkEqluPvuu5GUlKTz9ZEmvJCQkEmlqH///Xe8/vrrCAgIwDXXXANnZ2f09vYiPT0daWlpWL16NR577DGtX480IgEwSGBRbroj18zg4CDMzMzUhDF6enqmZKRpLCQSCdLS0jB79myDjA5pC0VRKCoqgkgkMohpBfC3wQNJUbe3t6O4uBi+vr4oLS1FUlISLC0tJ/z6EokEp0+fhq2trU5G7jgcDqODMMAGYoNw9OhROhho0wVYWVmJ9vZ2xMfHT+numzTFkJtnd3c3enp6AICu5yp/TcXFfvz4cfz0008IDQ1FUFAQnaLr7u7GiRMnwOPxsHPnTnC5XGRmZuLixYuor6+HsbExIiIicM011+jVgKCpqQkVFRVITk6eUFpzaGgIa9asgaurq8ZZ59TUVJw5cwZffvnluCRJJRIJLaYwc+bMca/rSutV3qCRcSELCws1zeXRyi/KgYUpp09geOaVSDwySRxFLpcjIyMDM2bMMIij28jmLaFQiIqKCsyZMwfFxcXo7u5GQkLChFPLYrEYZ8+eBY/HG1V5azxwuVzGlBRGgw3EBoCiKNxwww3w9vbG3r17x7y5yOVypKSkYPbs2Vory0wW5Xqu8hcAFZOD3t5e9Pb2MqbmolAo8O233+L06dMYGhrC7NmzIZVKIRQK4e7ujs2bN8PV1XXK1keUiIyNjSeUoj5z5gyef/55bNmyRWMdUCaTYe/evVizZg02bNgwrtcWCoUoLCxEYmLiuMVkRiqXkf9KJBKVTRoJvOO5EcpkMqSnpzNOmhMY3iS3tLQYRGpyPAwMDCAjI2PS2RdtUQ7GbW1tqKurQ2JiIhQKBTIyMmBlZYXQ0NAJ3SMGBgZw6dIlzJs3b1TlrfFwNQRi5lxJ0xgOh4O9e/ciJiYGDzzwwJg3ZCMjI7qO5+LiovOLaGQ9l8znGhkZ0TdQDw8PWs5POTVETCHq6+sZYQrB5XJxxx13YM6cOTh27Bhqamrg7++PFStWIDo6espTUqSLOjU1dUJd1C0tLTA3Nx+1GcfY2BjOzs5oaWkZ99qcnJzg4uKCoqKiK54+R2ZGSOBVdqJydHSEj4/PpMaFlH+n0NBQZGVlwcHBgTHdysCwOEpXVxdKS0snfVLTJUS3u6ioCNbW1pNKDWuDcvOW8tQBl8tFZGQkUlJSJnyPII1ays1b1tbWjJp/1zVsIDYQwcHBeOihh/D000/jxIkTY9Y9nJycwOPxUFlZeUVPzitBPFFHBl2RSKRSn3N2dta6nmtkZISgoCDk5+fDxcWFEd3dHA4Hs2bNwpYtW5CRkTFlAiSjYW5uDj6fj5KSEtjb249rY0WsFkkNfiQURaG3t3fCN96AgACkpaWhrq4O3t7e9LiQctAl40LKne7+/v5qTlS6hIw0FRQUMGrumcPhIDQ0FGlpaWhqatJpWn+yuLi4oLu7G/n5+YiLi9O7Gh5p3iKa3ARzc3M6gJJxxPGg3DHt4OCAgIAANeWt6QYzru5/CC+++CL4fD5+/vln3HbbbVd8LIfDAZ/Pp1WHxqpJjfREJV9EWcjGxga2trbw8PCYdD13xowZmDFjBsrKyhilq8zlchEcHIzMzEy4uLgYxDJOW9zc3NDa2oqysrJxnaQSExNhamqK7OxszJkzR+37NTU16OnpGXM8ThNyuRwDAwNwcnJCeXk5GhsbMTg4CCMjI1hbW9PjQpoyI4Zg1qxZ6OjoQFlZGaOmCMzNzREaGor8/HzY2trqTehmIvj7+yMrKwtlZWV6mRVX7gEgZaqhoSF4e3uryGAqS1cmJyePa8M+cnTJy8sLPT09yM3NnVDzFhNKaGPB1ogNzKFDh7B7925kZ2drdYopLy9Hb2+vSupwpD0b+VIoFPQNVNk/Vx+nCUPKchLz76qqKgDDN5uEhIRR65oVFRXo6upCXFwcoz6EYrEYqamp467j7du3Dz/99BNuvvlmhIaG0jei2tpa/PDDD/Dz88N77713xRsUGRcaaXRgbGwMHo8HmUyGoaEhREVF6Wy8TBeIxWKkpaXRXfFMoqKigm6qZIoWO/D3exYQEDCpE7uyZCi5boaGhlQkQ4l6mZGREe1frOzUlJ+fD7FYjLi4OK0DaGtrKyorK5GcnEz/21i2iVfCyMiIMRmV0WADsYGRy+VISEjAkiVL8Nxzz435eHLzdnJyAofDQW9vL13PHRl0DSVyQKirq0NdXZ1eTSFyc3Nx8OBB9PT00AIUQqEQ9vb22Lx5s8aTEqlje3l5MaKOrcxEuqilUineeOMN/Pnnn7CysqIFL9rb2xEaGopXX31VpYZMxoWUTy7K5Qjlm6iZmRk9l0x0xf38/PT160+I1tZWFBcXT6ipTJ8oFApkZWXBxsYGQUFBU70cFdrb2+kUtTbZNHLSHanTPVIy1MbGRmNQUxb6UA7GpPHOwcFB6/eoqamJ1tJW5kq2iVeCDcQsGrl48SKWLFmCrKwsFTlL5Xou+UCIRCIYGxtDoVDA09MTtra2tHjFVJ9a9G0KUV1djd27d8PJyQmLFy+mU4B9fX34888/0dPTgxdeeEFjZ3lHRwfy8vKQlJTEqJs36aI2MTEZ186eoiiUlpbi999/R1tbG3g8HhYsWICIiAi1mq6mcSEejzdmOaKvrw8ZGRlaaz4bkuLiYgwMDCA2NnbKr3tlmOarrIymDm9N3e69vb2QSqV04x0ZGxpvNk1ZAlP5QDA4OIiUlBQEBwdrdUKvr69Ha2srYmNj1b43mm3ilTA2NmZUxkITbCCeAhQKBVasWAGxWAxvb28UFhbi7rvvxsyZM+l67sj53MzMTNjb2zPKxg7QrynEBx98gLy8PKxfv17tgySTyfDll18iOTkZ999/v8bnFxUVQSKRMM49h2Q5QkNDtZaZpCgKIpFILb088tRCvibaaV9dXY2mpiYkJiYy6uYll8uRlpYGNze3Scko6oO2tjYUFRUhISFB793K44Gc2IFhqU5N5hjK140u/t6jOTUJhUIIBAIkJiaOeUKvqalBV1cXoqKiNH6/trYWVVVVWjdvXQ2BmNnn9WnEX3/9hf/973/Izc2FQCCASCSChYUFFAoFFi9ejMTERPj6+o66Aw0MDERmZibc3d0Z9WHXtSkEYWhoCJmZmYiJidH4ITI2NkZISAhSU1Nx7733akzJBwQE0P7ATKovki7q4uJijSlqZWMM5aCrPC7k5OQEHx8fnd1ACUTzuby8nFHpVuJmlZmZiRkzZjDqxO7s7IzOzk4UFBSMqxaqSzQ5UpERM4Kbm5tezDGUIWUOQNUcwsnJCbNnz0Zubi6SkpKuuFEcS2d6ss1bTIQNxAaipqYGCoUC9957L6KiohAcHIy33noL33//PbZv3z5mCojH48HNzQ1lZWWj7hSnCmIKocuAJxKJIJfLwePxRn0Mj8eDVCqFVCrV2JVJNLJLS0sZYfDe2tqKP/74A7W1tTA2NoadnR2sra3h7++vVpIA/jbGcHNzQ0BAgF7HhQgjjSEmoxusa3g8Hm3AwKSRJmB405eRkYGKigrw+Xy9/izlue6RGzVNI2a9vb3Izs5GQECA3jcwV3Jq8vX1RU9PD/Lz86+4aR8rECu7TxUXF49Z4mFSNmw02NT0FCIWixESEoLHHntMK1UkiURCaxczyTkHAJqbm1FeXj5hKceRyGQyPPLII/D391fpnlTm7NmzaG5uxoEDB64oRjGRmqyu+emnn/Dxxx9DLpfD1dUVIpEIjY2N4PF4uOuuuxAUFKRS053qzuWGhgY6/cckVSKieW5mZsYoQQ1gWBEqPT0dYWFhOvt8kuzISAUz5blukmK+UrNmbW0tamtr6XE4fUNRFCiKomUwCVKpFKmpqXBzcxu1zFZSUgIOhzOmfoK2zVsmJiaMPzWzgXiK+fnnn/Hggw8iLy9PKyuz+vp61NbWIjk5mVEXF7lBWllZTViAZCRfffUVTp8+jfXr16s1XA0MDOCrr77C8uXLsXr16iu+jlgsRkpKCiIiIvQ+akVQVi87ffo0Dh06hKioKCQnJ8PS0hJGRkbo6enBzz//jL6+PnzzzTeM2lyRDYyRkRGjZsUBZo80NTc3o6ysDImJieMWnxjpvTwyO6Jc0x3vhAQZJZLL5YiKijLIJm+05q2+vj6kpaUhIiJC4xhfYWEhzMzMtOqH0aZ5iw3ELGOiUCiwZMkS+Pn5Yc+ePWN+QEinsouLC+N0eAcGBpCWlqazrluhUIhdu3ZBIpFg3rx5mDVrFiiKQk1NDc6fPw9ra2u88MILWrlUkQ2MPkatJBKJmvyjSCSCubk5rK2t8dZbb8HMzAy333672s8eGBjAnj17cMstt+Cpp57S6bomy9DQEFJTUxkZ8EiDFNNGmoDhJsHBwcEruiEpK5gpXzvK3svkv7rKjkilUqSnp8PNzc0gynPkVKypeau5uRlFRUVISkpSa/LMy8uDjY2N1k15YzVvsYGYRSuKiooQGxuLc+fOaZVu6+rqQm5uLpKTkxkn+VZZWQmhUIiEhASd3Dyamppw8OBBVFZWAgD9webz+diwYYPWIyMURSEzMxN2dnYTdqhRnrdUThMODQ3BwsJCbd7S1NQU1dXV2LBhA1avXj3qze/IkSNoamrCJ598wqhTMfD3DC8T5QXJSJOh7P+0hYhPODs7w8/PjxbgGRl0lbXdyWlX397LfX19yMzMNFh2aKRTk/LvVlpaivb2drV6f05ODhwcHLR2RaMoCgUFBRgYGNDYvGVqasr4OvFVG4hJQV9ZVu1qhaIoPP7448jPz8evv/6q1U2loKAAFEUxLm2oDzENiqJQWVmJyspKcDgc+Pv7Y9asWeP+u/f39yM9PR1xcXFXbAIjP1MkEqmddJXnLZWVhUaro+bn52Pr1q3YuHHjqI1Pp0+fRklJCbZt28a4miwwnCqUSCQGS2lqCxNHmoiLmVAoRE1NDczNzSEWi2FkZKS2UdN30B2NxsZGVFRUTCh9PhFGC8YKhQKZmZkwNzdX8cXOzMyEm5vbuJznrqS8dTUEYua0HWoBCboHDhyAVCrFli1bYGZmdtUHYw6Hg5deegl8Ph/Hjh3DihUrxnyOv78/UlJS0NnZqVVt2VDowxSCw+HAz89v0opP1tbWmDVrFoqLixEfH09veBQKhZq7kPK4EI/Ho0VLxjv64eTkBCMjIzQ1NY0aiJubmzFr1ixYW1ujvLycUbrKAGjN84aGhknZ0eka5ZEmBwcHrUoUukQul6uNCxHZUBsbGzg4OKCnp4cu1TDlHuXu7k6bQ8TGxhqkE584NRH1LUDVqam2tpY+Acvl8nGvycjICFFRUUhNTUVdXR3jFPXG4qoKxORC7uvrw5kzZ1BbW4sDBw5c9YEYGBZJf+WVV7Bjxw4sWbJkzLqXubk5fHx8UFZWxrhZOqaaQgCAt7c3mpqakJ+fDzMzMxXfZXJacXNzA5/P14lkqJubG6Kjo5Gamorg4GC1kZumpibU1tZi3bp1CA4ORmpqKpydnRmVojYxMUFISAgEAgEcHBwYZXJARpqIr7K+RpqIVrdyI9XAwABMTEzotLKPjw94PB7tYkZRFAoLC1FZWYno6Gi9rGuiBAYGGmzcCvjbqYmcjMn92szMDFFRUcjMzASPx4ODgwPkcvmE/o7KtonKrk9XQ2y4qlLT5I9I0p9Lly7FmTNnEBMTMy2CsUwmQ1xcHJYvX45//etfYz5eoVAgNTUVnp6ejNsBEvUoQ5hCjMbIE0tfXx/6+/vpa2jmzJlwcHDQe22utLQUTz31FGxtbbF48WJ4enpCJpOhoKAAp0+fRkBAAN5++22YmZmhsbERlZWVjExRl5WV0Sc8Jm38dD3SJJPJ1PoABgYGYGpqqtK5rKzVfaXXIg5qTGyuzMjIMJg855XqxXV1dbh8+TKSk5ORkZGBkJCQCd83RjZvMcGqdSyumkBMasLkDwkAGzduhFQqxeHDh6d4dbrj3LlzuOmmm5Cdna1VGrC9vR0FBQWYM2fOlAtWjMQQphAEMi400l3I1NRUrQvV3NwcZWVl6O/vV3G10icFBQXYu3cvamtracEDLpeLefPmYdu2bbTsH0VRyM3NhZmZGeNS1KQO5+rqypiaLIGMNPH5fLi5uWn9POXrhlw7g4ODMDMzU7lmSNCdCL29vcjMzER0dDTjzO1JM158fLxBMh2jmUOQ7EF/fz9EIhGioqIm/F6NbN5iWle9JhgbiF9//XW0trZi8eLFiImJ0SgWvnbtWnh4eODf//73FKxQP1AUhdtvvx0KhQKHDx/WKkgIBAKYmprqxX90MujLFGI0cwxzc3ON7kKakMlkSElJga+vL9zd3XW2titBZnNrampgamqK6OhojUGDGAmEhYUxStkK+DuoaNPwZmjGGmmSSqVqDkPkuhnZSKXrU1RdXR1qamoMJqgxHsrKytDZ2WkwO8fRgjHZ6PX19SE5OXlMTeorQV7L1taWcWUBTTA2EF977bU4ffo05syZg5KSEoSGhmLOnDmIjo7GrFmzkJeXhw0bNuCdd97B448/PtXL1Sl1dXUICQnBTz/9pNEMfiTEr5eJrjmTMYUgTjEjT7rEE1X5xknGhcaDUChEYWHhuI3LDQFTla0AoKqqinb1YZqYfklJCfr6+hAeHk57dpPgO9KVSnnMTN9QFIW8vDxQFMU4ExJiDmFlZWWwLMxoYh8ikQjnzp2Dv7//pDfvIpEIhYWFSEhIYNxnaCSMC8QkBd3S0gIvLy+cO3cOdnZ2+Omnn1BQUICMjAyIRCK6a/qll16a6iXrhVdeeQU//fQTzp8/r1XjwuXLl9HZ2Ym4uDhGfciB4Zvj4ODgFfVllUXrlQMvcYoZGXR11ZSTn58PiqIQERGhk9fTFaTuaW5uzrgUNRk7sbOzM0ijz1goz3b39PSgo6MDFEXRs93Kp92pvCFLpVKkpaXBy8tLxf6UCZDUvr+/v8EyRJqcmhQKBf744w9wuVwkJCRM+mDB5XIZH4QBBgZi4O9gvGXLFqSkpCAnJ0fl+8XFxfRuyczMbEyR8KuRwcFBhIaGYtu2bXjggQfGfLxcLqdTrdp4fhoSqVSKlJQU8Pl8uLq6qkj5jdTPVfZEtbGx0atTDPC3fndwcLBGub2phMkpaqKrHBkZadDxOeUMibKgiqWlpYpxfWlpKWJjYw0+0jQW3d3dtCQj07JXHR0dEAgEiI+Pn1RaWFs0NW9JpVL89ddf8PPzQ0NDA5KTkyeVsWAD8SRQ7oC2t7fHG2+8gU2bNkEmk8HY2FitQzovLw8RERHTLiD/+OOPeOihh5CXl6dV40JraytKS0t1ZrygC4jAQUNDA1paWmBlZYWBgQFaym+k0cFUdOM2NTXh8uXLjEwDMzlFXV9fT9c9db02ZRUz5ZruSP9l5eCrTG1tLerq6vSytslSXV2NxsZGRqZMq6qq0NTUZLC1jQzGQ0NDOHv2LK677jrk5+dDJpMhNjZ2wlk+NhBPEhJ0Dx48iE8//RSXLl2iu6ZJsP31119x//33Y9asWcjIyJjiFesehUKBa6+9FkFBQXjrrbe00qHOycmBtbX1lKQMZTIZLeWnLHBAVIUGBwdhZWUFPp8/ZapCmiDvm6WlJaM8eIG/12ZhYcHIZjxdjA2RXgDl62akihlJMVtbW2tVliDd5yYmJggLC5vw2vQBWZuxsTHCwsIY8zkA/l4bMfswxNqUnZrEYjEuXryIJUuWQCaT0XP1E72fGRkZMcouczQYG4iVuXTpEsLCwuguzZKSEmzYsAEpKSkAgC1btmDFihVYtGjRtDsVFxQUID4+HhcuXNDqRkxkHBMSEmBtba23dZEOVOX08uDgID1rqXxaIQIHujaF0CWDg4NIS0ub1NiEviB2b4Z0j9IWMi8eEhKiltqXy+U4c+YMioqKwOVyER8fj7i4OI3pZdILoFzTtbGxmdRnmZhWjHekyRBIJBKkpqbC19d3XFKOhkAikSA9Pd2gtWzSvDU4OIjMzExce+21AIbvZ2lpaQgNDZ2Q8QgbiPXA0NAQHn30UXzyySdYtGgRHnnkEfB4PNTU1GDXrl04ffo0fHx8VGaNr3YoisKWLVtQWlqKo0ePavV7kRnZKzVHjQflZhhyAxWLxSpjH2ONCxF0bQqhS2pqaui0HNM2cw0NDaiurmZkqpVY/yUlJdF//+zsbDz99NNoaGiAra0trUw1c+ZMbNy4EX5+fiobNn31ApDO+ISEBFhaWur89SdDZ2cnBAIB4uLiDFKTHQ+klh0TE2OQOjs5Fff09KCgoAALFy6kv9fa2or8/HwkJSWN+3DBBmIdc/bsWSxatAh8Ph9PP/00brrpJjg7O9M38yVLlsDd3R2fffbZFK9U93R0dCAgIAAffPABli9fPubjSXNUYGDguBRzNI0LkbqccjMM+e9EAgJRRfP29maUbjEwvCvPyMiAo6PjpHWtdQ2TU9TAcJ+GRCKBu7s78vLy8NRTT8HV1RVLly6Fh4cHOBwOampqcPToURgbG+PHH380mIwnGWlimiIYMLwxbW1tZeTmz1Czz6R5s6enB+3t7RgYGMC8efNUNurl5eVobW1FUlLSuAIrG4h1jEgkwttvv421a9fCz89PzXlpwYIFmDt3Lnbt2sW4k5YueO+99/DOO+8gKytLK8eUxsZGVFVVITk5WeMHnIwLjTzpyuVytWYYbety2kLUwJg4v0sEKwzVOToemJKipiiK9tJV3rTJ5XJYWFjg0KFDKC8vx9atW9UkIHt7e/H222/jgQceMNj8PxF3cHFxMYgP73igKArZ2dmwsLBg3JgaUaiSSqU6y66N9GHu7e1Ff38/OBwOfc8hWvXKY00URSErKwvGxsbjmsM2NjZm3AZHE1dFIFbWmB75ptbW1mLLli347bffcOrUKZWUxnRCJpMhJiYGK1euxNNPPz3m44n/roODA3x8fOhxIeXAS8aFlNPL+h4XIuTn54PD4TCukQYAKioq0NXVxciZbNKpPN6TwURRHjVTrukCUBPGILOozz77LG688UbMnTtX42v+9NNPqK2txdmzZ/W+fkJfXx8yMjIYKTNJ3reAgADGjR7KZDJa1nS8mxhlVzPla4dMTJBrZ6TWO0lTjxT7IKOGXl5eWkusXi2BmPlndoD+Y4x8Q5977jl88MEH4PP5+P3336dtEAaGL6j9+/dj+fLluPPOO0f9wJJxob6+PtqYvqamBlwul7743d3dp3RcCAACAgKQmpqKjo4OxjUg+fj4IDU1FfX19Ywz0/Dw8EBbWxvKy8t1nqImBvbKQZecVsi14+HhAR6PBysrK7VNio2NDSwsLCAWi6/YWOPq6oqsrCyDGrXY2NjA39+fdmliUp2diLYUFBTA1taWUe5WxsbGiIiIQEZGBmxtbUedZ1fOkowWdD09PbUyWFF2auJwOPRjTU1NERUVhYyMDPB4PMbN1k+Gq+JEPJLDhw9j165dAIAHH3wQ9913H9zc3NDR0QEHBwd6TAbAtGvcWrt2LbhcLj799FN0d3ejtbUVlpaWKi4xxA+Vx+Ohp6cHAAxmbjAe6urqUF9fj8TERMbtWkkjTVJSEuNE43WRoiZBV/mm2dfXByMjI5Xmu/E6U3V0dCAhIQG33nrrqCfiY8eOoaysDBcvXpzQ2icKk8eGgOE6aEdHh8E0n8dDU1MTysvLkZCQAHNzc41ZEoqi1E66mjZs2nAlp6bGxkaUlpYiKSlpzAY8ExOTq+L+f1WciEdy4cIFJCYmYuvWrYiPj0dDQwOef/55ZGZmQiAQIDQ0FCtWrMCWLVtwFe4zNNLR0YHc3Fz4+Pjgww8/REBAAJqbm7Fs2TI888wz4PF4cHZ2VhkXAobTOZcuXUJHRwfjdpAeHh5oampCTU0N42p3Dg4OcHFxQUlJCaKiohh107awsIC/vz+Ki4u1SlGTLInyTbO/v5+e77axsYG3tzd4PB4sLCwm9bvOmDED11xzDS5duqRRFGJwcBDZ2dm48847J/wzJgqHw0FISAhSU1PR3NzMuDSwn58furu7UV5ezph5dtJLwuFwYGpqitTUVPrflTNsZMOmq6DH4XBolzIAKsHY3d0dPT09EAgEjGxymwhX1YmY1IjJOI2TkxOKi4uxfft2DA4OIiAgANHR0aivr8ebb76JlJQUxMbGQqFQYGhoiHEnG204c+YM7r33XtTV1WH27NmIjo5GV1cXJBIJDh48qNWcHzl5JiUlMW53OBlTCH1Dus8DAgIYN4c6Whf1SA9mIqqinCUhwXeyQXc0BAIBbr/9dvj6+mLlypX0qb2pqQk//PADRCIRjhw5MmWBUCgUoqCgAImJiYwbaSKypobyCFZmZAMn2biRoGttbY2Ojg7Y2toiNDTUIPeS0ZyayISDlZUVQkNDR72Or5YT8VUViDVxxx13oKSkBK+88gpuvPFG+nSwbds2iMVifPjhhzh37hy2b9+Offv2ISkpaYpXPD6IX2hkZCTdZDIwMICQkBBs374d995775ivoVAokJ6eDjc3N8yaNUu/C54A2phCTBWtra0oKSmZtOatPiDiLTNnzoRcLqeDromJiZrZgXKWxBCcOXMGW7duxeDgINzd3SGTySAUCuHh4YEDBw5MeYdwaWkpent7GTnSRD7zo9k56gKKoiASidRqunK5XGN6mbxHg4ODSE9PR1BQ0IQENibCaE5NYrEYKSkp8PPzG7WXgw3EBqCiogLXXHMNPvroI9x8880q33vhhRdgYWGB1tZWHDx4EIGBgfjhhx8YlwKdKP/973/x6KOPQiAQaDVwT2qeycnJWo0/GZKRphBMgtjXGRsbT0rGcbLIZDK1cSEiH0pRFLy8vGBra0uLqjBhQyMUCvHBBx/QpgxxcXFYvHgxI+Y65XI5MjIy4OTkxLiZcWB4o9DT04O4uLhJBxJlCVHlL7lcTk9NKEuIjvXz2traaJEUQ2Wx5HK5xnpxZ2cnbaKhqRve1NSUEZ+FsbiqA3FaWhqWLl2KjIwM+sMkk8lQUFCAW265BQ0NDXB3d8fevXuxatWqKV6tblEoFFi4cCHCw8Px5ptvavWc/Px8cLncKQ0oo9Hc3Izy8nJGGVYQiIxjeHi4QTq8pVKpWiPM4OAgzMzMVE65PB4PpqamyM7OhpWVFWPqisrU1tbSDXlMCMDKMHmkSS6X0+OHAQEBWj9PU9AlEqITCbqjUVFRQSvkGaJGe6XmrZqaGlRXV2vUJWADsYGIjY3F3LlzsWHDBpiamiIrKwu7du1CXV0ddu/ejccee4y+2Aw5LmEIcnNzMWfOHFy8eBGBgYFjPp6kcqKjoxlnD0dqnlZWVlr9LoaGzO+OJpAyUYhmt/JpVyQSqciHkv+OJn7CFKEPTRDBCktLS0YqgtXV1aG2tpZxI03A31aTYWFhGlXIRirhka+Rut36kBBVKBQqQiSGMofQFIwpikJ+fj7EYrFaBoENxHqGNG6Vlpbi888/x5EjR2iJtFWrVmHfvn10M8h0GmFShqIobN68GZWVlfj555+1uuCqq6tpST2mXaDEFCIuLo42+GAKRNmHx+NN2AlGIpGoNcKIxWJYWFioNVKNtx5taKGP8UAakEJDQw0ma6ktFEVBIBDAyMiIkSNNRMebfF5HppcNEXRHY2hoCGlpaQY1rlB2alL+HYnwiIODg0pmiGnKfaNx1QZi4O8T7qlTp7BkyRIsXrwYL774Ij2/OF0DsDLt7e3g8/n46KOPcNNNN435eIVCgZSUFEZqPQPA5cuX6VlKpt0USXOUNu5RykYZ5L8k6I5spNLFSYycPJmaom5qakJFRQWSkpIY1/RGnJD8/f0ZM9KkbAvZ2NgIiURCK+GN3LRN5fhOZ2cncnNzDbp5Js1bxsbGKveIwcFBpKSkIDg4mP47soHYgAwMDODbb7/Fgw8+CAD07DDTbuT64t1338X+/fuRkZGhVSOWUChEUVERIzuBmWwKAQwbp5OMApfLpQ3sR9Z0SYOSLowytIVYOUZGRsLBwUFvP2cikKY3DodjMJ/b8dDe3o78/PwpGWkimzblL4lEQp90ra2tUV9fD1dXV/j7+xt0bdpQXV2NxsZGjXPj+oCcijkcjlq9WCgUQiAQIDExke6huBq46gPxyFPvaHXg6Xw6lslkiIqKwpo1a/Dkk09q9Zzc3FyYm5sz8vTEVFMIMvKRlZUFS0tLGBkZqd00lYPuVKSISc2TiSlqJp48ldFlp/JoKAddsnkbGhoa8/rp6+tDZmYmYzdZAoEAHA4HERERBq0Xj5wvBoazak1NTUhOTmacNsFoXPWBeCJMt6YtADh9+jRWrFiB3NxcrcQnmFyPBabeFEK5+1T5tCuVSmFhYQGRSITZs2fD0dFR5+5Uk4HpKeq2tjYUFRUhKSmJcWN0uh5pIj0Byl/KmRLl9LI2109DQwMqKysZmd6XSqVIS0uDp6enwbQKrtS8lZOTAwBISkq6Ku710z4Qd3R0oKqqCvn5+ejo6MCKFSvg6+sLIyOjaXVKpigKq1atgrm5OQ4ePKjVxVdRUYHu7m7ExsYy7mIlHd6G6AQmJ92R6eWRjTBEg5k0Cfb39zNSw5vJKWoAKCoqglgsZqSAC+kDiIqKGtd7J5FI1LqXxWIxLC0t1Wq6E03fEltCkgFj2ntHVPIMOQ42mtiHVCpFdXU1goODrwoJzGkZiMmJt7q6GgcOHMCff/6Jjo4OJCYmoqamBtdffz3eeOONaRWIgeH6ZVhYGH799VckJCSM+XiZTIaUlBT4+/szTsIR0I8pBJHxGxl0lcUNlH2YR/u5MpkMqampmD17tsE6RsdDXV0d6urqGDm/S947b29vxrlbAcMd6NXV1UhKStIYNJVHzkZ2v4886eq6ZiqTyZCWlgZ3d3fMnj1bp6+tC+rr61FVVYXExESDlZWIS9PIejGXy2XcSNpoTMtATFi3bh1Onz4Nd3d3eHp64tixYzh79iyWLVuGixcvIjIyctoF4507d+L333/HmTNntApeLS0tKCsrw5w5cxh3wyZ6sk5OThNSRFPWzlV2GFIoFGrdpxMRN2BqLRv4e9zKxsaGkXPZXV1dyMnJQUJCAqytrad6OSqQmieXy0VQUBB93ZDrSCQSqY2c6ar7XRvIyTMmJoaRegCFhYUYGhpCdHS0QfWoR6ao2UDMANLT07FkyRKcP38eQUFBmDlzJt5//32sWbMGd9xxB6ytrXHw4EEA06tm3N/fj+DgYOzYsQN33333mI8nNUUejzcuBR9DQW46Y1meKZuQK48MEcF65ZumLn2YCwoKoFAoEBERoZPX0yVMT1GXl5ejs7MT8fHxjNgMKyuadXd3QygUAgAtrjKZOW9dw2QhEplMhoyMDDg7OxtMPlRTMDYyMmLc4WI0ro5VToC6ujr4+vrC2dkZJiYmePLJJ/HCCy9gzZo1UCgUKhfIdAnCAGBtbY0333wT27Ztw4oVK8ZsxOJwOODz+cjIyIC7uzvjugxtbW0xc+ZMlJaW0nUxhUJBm5ArB10AKtZsNjY2Og26muDz+UhJSUFbWxucnZ319nMmgqWlJfz8/Gi7RKbVynx9fdHR0YHq6mqDa8AT7W7la2hwcFAt6FZXVyM6OppxnwtPT090dnaiqKjIYJ3K2mJsbIzw8HBkZGTA1tbWICIuxDZRuZv6amLanoh/++03bN68Gbm5ubCzs4NIJEJ0dDQ9k3fo0CEsX76cfvz58+chk8mwaNGiKVy1blAoFFiwYAGio6Px2muvafWc0tJSDA4OMq4JRKFQoLu7GwKBALa2tpDJZOjv7weHw1EZ9ZiMCflkIWIVTNTJZnqKmug9ayOSMlFkMplaI5Wydrfy18iTbllZGbq7u/U60jRRSKcyU2vtzc3NKC0t1auL1EiUm7dMTEyumhPxtA3EABAeHo4lS5Zg9+7dMDMzw/Hjx7F161Zs3rwZ27ZtAzBsOebi4oJTp05h586dSE1NZdwHbiLk5ORg7ty5tJ/uWEilUly6dAnBwcFTdrJTKBRq9ThiYG9mZgaRSITAwEDY2dnB0tKSMRuG0byBmQLTU9TV1dVoamrSSVOesh8z+RoYGICZmZlaTVebuj4ZaXJ0dGSkmEZ3dzeys7MZO4ZYUlKC3t5eg21klMU+TE1NGbcxHo1pGYiJDnVOTg7uuOMO3HrrrXj66afh4OCAgoICBAUFQSgU4ueff8bx48dx+PBhODs7o6uri3EuLBOFoig89NBDqK2txZEjR7QKWg0NDbResb5TO3K5HP39/SrpQRJ0RzoMkd00k00hiPFCVFQUI68h0kXNxBQ1RVHIzMwEj8cb1992tKBramqqdtKdTDPdREeaDAVRtmJih7xCoaD/tvqea6coCn19fRAIBMjKykJ9fT0+/vhjvf5MXTEtAzHwdwPWTz/9BJFIhOuuuw7Ozs7o7+/HhQsX8O233yI3NxcODg749NNP4evrS9cfp8OJGBiWe+Pz+Th06BBuvPHGMR9PURTS09Ph7OwMHx8fna1D+YZJ/jswMABjY2M1NSELC4tRNw1MFyGpra1FQ0ODTsetdAXTU9Tk1D7a3Di5hpQD78DAAExMTNQ2bvrwYx5rpGkqIRkZExMTRhpXENOPwMBAnY1Jktn//Px8ZGdnIzc3Fzk5OSgvL4ebmxtiYmIQGxuLnTt3XhX382kfiJX/f1paGn755RecPHkSFhYW2Lx5M+66664pXKX+2bdvH95//31kZGRodSogqa7k5OQJ1XVIPW60G6Zy56m5ufm4bxpMNoUgu/8ZM2Yw0mye2OoxNUXd0NCAqqoqxMfHq+kvK2/cRp50DSWpyGSt7KlwQhoPQqEQBQUFiI+PH/e4GkVRkEgkKCoqQk5ODh14S0pKYGdnh5iYGDrwxsXFwd3dnXF/n7GYtoFYme7ubhw4cADnzp1Da2srVq1ahe3bt9PjMB0dHYzzcdUVUqkUkZGRuPPOO7F161atnlNYWAi5XD7mSA7pPFU+7Y5MDZKTiq5umEw3hSDNR/Hx8bCxsZnq5ahRW1uL+vp6xqSolfsCenp60NraCrlcrrJxI9fQRDZuuoRoZTM12HV0dCAvL29Cwc4QVFRUQCgUIj4+ftQUOkVRkMvlKC0tVQm6hYWFMDc3R1RUFGJjYxEbG4v4+Hh4e3tfFSfesfhHBGIAuOmmm2BhYYHdu3fTqbkvv/wSBw4cgI2NDby8vLB+/Xpcd911kMlkjKu1TIY//vgDq1evRm5uLlxdXcd8/NDQEFJSUhAeHk5vUJRnLEnQVe48HZka1CdMFtIAhm84ZD6WaTvzqUxRKxQKlb4A5WY85X6AiooKnaYxdQkJdgkJCYwbaQKGM0ZtbW1ISEhgxEZLGYVCgcOHD+P8+fM4fPgwuFwu5HI5KisrVdLLJPMQGRmJ6OhoxMXFIS4uDv7+/oz7nXTFtA/EpHGrs7OTTsfV1tZi3bp1EAgEuPvuu2FnZ4fLly/jzz//RFNTE6ysrKaVyAdFUVi5ciVsbGzw4Ycfjvl7SSQS+gNtb2+Pvr4+iEQiesZSuaY7VYFwqk0hroRcLqcF8Jk4VkJq7frUBCaz3j09PSoZE+WgS66jkX0Bra2t9Owz04whgOGRpq6uLsYIkSijUCiQnZ0NS0tLhISETPVyVCBKeatWrcL8+fPR09OD3NxciMVihIeHqwTdoKAgNb/h6cy0D8Qj6e7uxr333ovOzk78+9//RlRUFH36Xbp0KZYtW4bNmzdPq0AMAJWVlQgPD8dvv/2G2NhY+t+JQ4zyaVcsFsPc3BxSqRT29vbw8vJihJqQMoY0hZgInZ2dEAgESEpKMtgM5XjQZYpaWWBF+aTL5XLVRoau1IynTGFhISQSCePm2oG/A8qMGTMYOdIkFouRlpYGPp8/ZVkFiqLQ3Nyskl7Ozc1Fd3c3wsPDkZ+fjyeeeAKrVq1CWFiYwWr9TOUfE4iVR5qWLFmCDz74AKtXr1bpkt66dStMTEzwzDPPwNHRcVp1UAPA448/jtTUVMTHx6OgoABz585FfHy8ilg9uXGamJigo6MD+fn5jE0BE1OIpKQkRv6diouLIRaLGRlMyMiQra0t+Hy+1s9TlhJVLlNwOBy1EsVkZr2lUiltqsHEXoD+/n5kZGQwtvGNNEcZIoVOURQ6OjrUgm5zczP4fD5iY2MRExODuLg4REZGwtLSEm+//TY++OAD5OTkMPL9MzT/iEBMHEsiIiJw+vRpPPHEEygoKFDRLq6srMSSJUsgEomwfv16vPnmm1O4Yt2QlpaGkydPIjs7G9nZ2WhuboalpSUiIiIwZ84cLF26FFFRUVccx8jLy4OxsTHj0lzA3ycTXY9b6QqpVEoLqjCx3jlWipqiKLWTLgm6I0+6+hBYYXo9lnR5JyYmMipbRCBa3nFxcTp1L+vt7aXrudnZ2RAIBKipqYGvry+io6Pp7uWYmBjY2NhovC4UCgVuvfVWzJgxA5999plO1nY1848IxOnp6di4cSOOHDkCHx8f2NjYYO/evbjjjjtgbm6OTz/9FF9++SXq6uqwfPlyrF27Ftdcc81UL3vSvPvuu8jIyEB0dDRiYmIQGRmJY8eO4emnn0Zubq5Ws7gikQgpKSmMdHoBtDeFmCpaW1tRUlKC5ORkRt6sSYo6MTERYrFYrSGPoii1k64hpURLS0vR29uL2NhYxmU9yEgTAMbpPQN/j9PZ2tpOqDGPuJcJBALk5OTQwffy5cvw8PBQCbqxsbFwcHAY13vQ1dUFqVTKOI32qeAfEYgBICQkBOvXr8ezzz4LgUCAffv2obi4GEKhEJaWlggICMBdd92FlStXTvVS9YpCocD8+fMRHx+P3bt3a/WcyspKtLe3M7ILGBiW0ROJRIxNAZOsQmho6FQvB4C6PWRjYyPt6TrypDtV+t0EuVyO9PR0uLm5MdJ/l+kjTURMIyQk5IoBj6IoDA0NobCwUKWDuaSkBI6OjvSsLgm6bm5ujPusXc38YwLxyZMnceedd+LRRx9FSEgI/vjjD1y8eBG2tra4/fbb6e5pAMjOzoa3tzfs7e1hZGQ07WrFmZmZuOaaa5CamqpVs4lcLkdKSgpmz57NyJsNSQHz+XytxrMMjVgsRmpqKsLCwuDo6GjQn60cdJVPu8QekhgdVFVVITo6mpH1OpL1YKqiGtNT6E1NTXj88cexe/du8Pl8UBQFmUyGkpISlbpuUVERrKys6Awa6WD29PScVvc/JvKPCcQAsH//fvz444/IycnBvHnzEBkZiXvvvZdO23z88cfYt28fKIqCubk55s6di5deegmOjo50s9d0gKIobNiwAc3Nzfj++++12tm2tbWhuLgYc+bMYZzEHzDs9FJeXo45c+Ywcga8oaGBlkjU1/qI7N/Imq5cLtd40lW+udbU1NB6xUy8zisrK9Ha2srI+ViAed7Kysjlctx5550oLS3FggULkJ+fj/z8fBgZGSEqKopWpYqPj4evry/j1v9P4B8ViAGgt7cXEokEYrGYPt0NDAzgtttuw+nTpyGTyfD777/D0tISb731FuRyOY4fPz7txplaW1vB5/PxxRdf4Lrrrhvz8RRFITc3F5aWlozUKiZ6u0w1hSBCGjweb1xdyld6PbFYrBJ0e3t76aCrHHitra3HvLlOtIvaUJB6p52dHWPXl5GRAQcHB63czvS5jpqaGmRnZ9N1XYFAAIqiYGRkhNmzZ+Oxxx6jZ3WZuKn5J/KPC8Sa+OKLL/Dyyy/jl19+wbfffoszZ84gLS0N1dXViI2Nxe+//464uLipXqbOeeedd3Dw4EGkpaVp1UhEumyZKt/IdFMIsr7xeu9qCrp9fX2QyWSwtrZWOelqE3THWh9TG/OYrpVN1meo2XaFQoGmpiY66JLA29/fj9DQUPqkGxsbi7CwMFRWViI+Ph7ff/+9ViYwLIbjHx2ISe1327ZtKCoqwsmTJ9HR0YHIyEi8/fbbuP322zF//ny88MILuPbaa6d6uTpHIpEgIiIC99xzDx577DGtnlNeXo6enh7ExsYyMkPAZFMIAKiqqqJTrJoCJgm6I+39lIOusrKZrtOITE9R19XVoba2Vq8p/snQ0NCAyspKJCUl6bRLnqIotLW1ITc3V6WZSigUIigoSKWZKiIiYlThlC+//BJPPvkkysrKGLmZ+afCvCvZgJCbWHd3N20f5ujoiOeffx7btm1DZ2cnysrKGNmgpAtMTU2xd+9erFu3DuvWrdNqjMDHxweXLl1Ca2srIxujZs+ejZaWFjQ0NDBSCGLWrFlobW1FbW0tZs2apeYyRIKulZUVeDwenJyc4OvrC2tra4MERm9vb7S1taGysnJKU6yj4enpCaFQiLKyMkbOtru7u6OjowPFxcUTHmmiKArd3d1qs7p1dXXw9/dHTEwMFi1ahGeeeQbR0dGwtrbW+ufcfffdmDVrFiM9s//JsCdiLhfHjh3DPffcg6KiIsycORMKhQKLFy+GQCDAs88+i+3bt6s8bzrViymKwi233IIZM2bgvffe0+r3ampqwuXLl5GcnMzIUwlTTSFIelkoFKKpqQkmJiaQSqUq6WVS353K0yjTU9SkC32skZypQiKRIC0tTStVMCKaQoIuSS9XVlbC29tbbVbXzs5u2tx7WP7mHx2IlbnxxhthY2ODZ555BjExMcjOzkZKSgo2bdoEU1NTpKamoqysDPfee++0CsTAsFtQREQETp48iZiYmDEfTxp77O3tGam1C0y9KYSmk65EIqFPumKxGFKpFHFxcYzczDA9Rd3c3IyysjLGCqV0dnZiz549WLVqFaKjowH8XXYoKChQSS+XlZXBxcWFDrrky8XFZVrdZ1hG5x8fiMlYUn19PV555RUEBgZi48aNdDNSW1sb3nvvPRw6dAgWFhYoKSmBmZnZtJstfuaZZ3D+/HmcOnVKq9+rt7cXmZmZjFW0MqQpBAm6ynXdoaEhOugqn3RJ0JXJZLSWMhNLH2SzZWdnx8gUNUVRKCgogEKhYKSqFUVReOSRR3D+/Hls2bIFBQUFyMnJQXFxMXg8nkpNNy4uDh4eHoz7HVgMxz8+EAN/p5o7OzsBAA4ODujq6sLPP/+Ml19+Gd3d3Zg5cyauvfZahIWFYePGjVO8Yt3T29uLwMBA7Nq1C7fffrtWzykpKaFNDZiIPkwhiFuV8tfQ0BAsLS1VupeVg+5okBQ6U+3++vv7kZ6eztgUNVG18vPzg7u7+5SuRSaToby8XGVsqKioCKamprC2tsaaNWvoWd3Zs2dPq008y+RhA/EIZDIZzp49izfeeANnzpzB5s2b8corrwAYTtfddNNN2Lt3L9auXTutRD6A4TGuHTt2IDc3F9bW1mM+XiKRICUlBSEhIXBycjLACsfHZE0hJBKJWveyWCymg67yrO5E08vKpzomwvQUdXt7O/Lz8w1qN6lQKFBZWalS083Ly4NcLkdkZKSKQAZFUUhISMCRI0dw/fXXG2R9LFcfbCAeQXp6OpKSkrB48WLs378fwcHBKt9/4IEH0NLSguPHj0/RCvWHQqHAnDlzMHfuXLz88staPae+vh61tbVITk5m5C5fW1MIqVSqdtIVi8UqFpEk+OpSWYxsZoKCguDi4qKz19UVREjD3t6ekSlqYDgz09/fr5eROoVCgYaGBpWTbk5ODkQiEcLCwuj0ckxMDEJCQmBiYqK2hk8++QTPP/888vPzGblhZZl62ECsgfPnz2P+/PkAVDuka2trcfPNN2P16tXYuXPnVC5Rb2RkZGDBggVIT0+Hr6/vmI+nKArp6elwcXFhpCg/oG4KQYKu8mlXJBKN6susb4g8Z3JyMiPlQ5meoiY2px4eHpg1a9aEX4eiKLS2tqoF3c7OTgQHB6sIZERERGhtZk9RFA4ePIj169czUouaZephA7ESo3VDy+Vy/PHHH3jmmWfQ2tqKn3/+GcnJyfT3p1PjFkVReOCBByAUCvHdd99pdaPp6upCbm4ukpOTGVfrlEql6OrqQmFhIaytrSGRSCASiWBubq5y0jVU0NUEkQ81NzdXy8AwherqajQ1NTE2Rd3d3Y3s7GytVd8oikJnZyctkEECb1NTE/z9/VXM7KOioqbchYplesMG4jEoKSnBCy+8gJ9++gmrV6/GoUOHYGNjA6FQiP/85z/Ytm3bVC9R57S0tCAwMBD/+c9/sGjRIq2eU1BQAIqiEB4erufVjY5MJlM76Q4ODsLc3BympqYYGBhASEgI7O3tGTfyIhKJkJqaylj5RpKidnBwYOzI2uXLl3Hy5Ek88MADKhtCiqLQ19cHgUCgEnSrq6sxe/ZsuoM5Pj4e0dHRsLW1ZYMui0FhA/EVePHFF7F7925ER0fjrbfewsKFCwEMO8Fs3rwZVVVVEAgE0zLd9Pbbb+Pw4cNITU3V6qRIxoUMFUhkMplaI9Xg4CDMzMzUTrqmpqagKArZ2dmwsbFhpGkAMFz6aGhoYOypk6Sox6uVbSikUiliY2Mxb9483HnnnSrNVOXl5Zg5c6aaQIajoyMbdFmmHDYQX4EPPvgAIpEI27ZtU0k9v/rqq/j111+xd+9eJCYmTuEK9YdEIkFYWBgefPBBPPLII1o9p6amBs3NzaPqKE8UuVyuFnQHBgbooKvcvXwlJS2mm0JQFIWMjAzMmDEDfn5+U70cjVRXV9N/46neLFAUBYlEgsLCQjrolpWVITs7G1ZWVkhMTKRrunFxcZg5cyYbdFkYCRuINTBarVgmk+HFF1/E22+/jU8//RR33XXXFKzOcBw/fhzr169Hbm6uVlKCCoUCqamp8PT0hJeX14R+5mhB19TUVO2kOxH5SqabQvT19SEjI4OxDldTlaKmKApyuRylpaUqbkOFhYWwtLSkfXXj4uKQl5eH//73vxAIBFqN4bGwTDVsIB4Hx48fx8svv4xHHnkE99xzj8bHTCf5S4qisGzZMri4uODdd9/V6vciIhVz5swZsw5Lgq5y4B0YGICJiYla97KumsDkcjlSU1Ph7e3NSFMIgPmbBUOkqOVyOS5fvqw2q8vhcBAVFYXo6Ghalcrf318lA6NQKLBo0SIEBgbio48+0sv6WFh0CRuItSQ1NRWrVq3CihUr8P7779Oi/enp6RAIBPDx8cEdd9wxrQIxAJSVlSEqKgp//vknIiMjtXqOQCCAqampSgewXC5Hf3+/2knX2NhY40lXn+8hU00hCHK5nB7H8fb2nurlaESXKWqFQoHa2loVpyGBQACxWIzw8HAVgYzAwECtehZqa2vx4osv4pNPPmGkljcLizJsINaCgYEBLFiwACUlJdi7dy+OHz+O9vZ2tLS0QCgUIiEhASKRCH/88YfB1H0MydNPP42UlBScPHlSq9pvf38/0tLS4O3tTc/s9vf3qwVdGxsbmJubT8nGJS8vD1wud8pMIcais7MTAoHAoIpR44Gols2YMWNcKWqKotDc3KxmZt/T04OQkBCVWd3w8HCYmppOq40tC4sm2EA8BuSE+8wzz+B///sfLCwsEBERgfj4eNja2mL58uWQy+WM7CLVFT09PQgMDMTrr7+ONWvWqHxPoVConXT7+/vB4XDA5XLh4eFBB96pCrqaMHSX90QoLi6mtbyZ8r4pQzZcfn5+GoU0KIpCe3u7SsDNycmhx+OUg25UVNSoZvYsLNMdNhCPARHroCgK1dXVmDlzJszNzVVS0JWVlfD09ERHRwfc3NxUnjddOHz4MF588UV8+OGHdNpw0aJF6Ovrg5GRkdpJl1hH+vr6YubMmVO9fI2QcSFdmkLoEqlUipSUFPj7+zP2Pfzwww/x7rvvIiMjAzKZTG1Wt7a2Fr6+vmqzujY2NmzQZWH5f9jiyRiQGzSHw6GNA4jZQ1FREQ4fPgyBQID29nYAwC233IKXX36ZDt5X681GKpWiuLgYWVlZyM7ORmZmJhobG7FmzRoEBARgwYIF8Pb2Bo/HG/UkExAQgNLSUjg5OTFSutHT0xPNzc2oqamZkCmEvjExMUFQUBCKi4vh6OjIGBESYmafl5cHsVgMAEhOTkZVVRU8PDzooPvQQw8hNjYWDg4OV+3ngIXFELAn4gnS1NSETZs2ob6+HiUlJfjtt98gl8uxdetW3Hrrrdi9e/dV7c70n//8Bw8//DB9UyWC+g888AAyMzO10vSlKAo5OTmwtrZmrIiGtqYQU8lU1rMpisLQ0BDtp0tOuiUlJXB0dERMTAw8PT3x+eef45dffsENN9zABl0WlnHCBuIJsnPnTnzxxRf4+OOP8fbbbyMsLAwHDhzA2bNnceedd6KoqIiRAvnaIpVKYWRkpJKypSgK99xzD3p7e/H1119rdcMloy4JCQmMnelkei1WLBYjNTUVYWFhcHR01NvPoSgKMpkMJSUlKs1URUVFsLa2plWpiECGp6cnfX28+uqr+Oabb5CTk8PITnQWFibDBuIJsnLlSvj5+eGtt95CXl4e5s6di3PnzsHNzQ3XX389vvjiC0RHR0/1MnVOY2MjgoOD8e2332LBggVaPaesrAz9/f2Ijo5mZKCTSqW4dOkSY60IAaChoQHV1dVISkrS2TiOXC6nzexJI1VBQQGMjY3VZnV9fX2vWEeXSqVISkrCzp07sWLFCp2sj4XlnwJbI54AYrEYHR0ddCCKiIjAPffcg2effRZ33303OBwOY71bJ4u7uzueffZZeqRJm9qvj48PUlJS0NbWxshAZ2JigoCAAJSVlWHGjBmMnDt1d3dHS0sLKisrJ5TmVygUqK6uVpvVlUqliIiIoGu6ZFZ3vCUVExMTXLhwgZGjViwsTIc9EY8T0g390ksv4ZdffkF6ejrMzMzQ1taGwMBAdHd348CBA9iyZQtkMhkjb+qTZWhoCGFhYdi0aRMefvhhrZ7T2NiIqqoqJCcnM7JufjWYQhCt7LEUrRQKBRobG+nUMqnr9vf3IzQ0VMXiLzQ0VKOZPQsLi+FgA/E4Ue6E5vP5WLduHW677TaEh4fjzJkz6O/vx/Lly6/4vOnA//73P9x9993Iy8vTqm5JURStUcxUQwOmm0IAwMWLF7Fr1y4cPXqUHqNra2tTC7pCoRBBQUEqs7qRkZGMmuVmYWEZhg3EE4CcdM+dO0fbI27btg1GRkaoqqrC999/jxMnTsDf3x/Lly/HLbfcAg6HM61miymKwtKlS+Hh4YF9+/ZpdXO/GjqUmazzTFEUWltbsXjxYoSFhYHD4SA3NxcNDQ3w8/NTCbrR0dGwtrZm3O/AwsKiDhuIJ4lQKISTkxMA4MyZM7j//vsxMDCA1atXo6ioCL29vVi0aBH27NlzVY8zaaK0tBTR0dE4ffo0wsPDtXpOcXExJBKJ1rrVhoYpphAURaG/vx8CgUDltFtVVYWgoCCUl5fj0UcfxU033YSYmBjY2dmxQZeF5SqFDcQ6Ijs7G0uXLsVNN92EAwcOwNraGnK5HBcvXsSSJUuQl5cHPp8/rU7FAPDEE08gKysLJ06c0Or3kkgkuHTpkt5HcSaDUChEYWGhwUwhKIqCWCxGfn4+3cGcm5uLsrIyuLi40LPcxMze2dkZTz31FDIyMnDu3LlpdT2xsPwTYQOxjvjggw/w73//G2lpaXByclIJuPfccw8cHBywd+/eKV6l7unu7gafz8eePXtw2223afWcuro61NfXM1ZaEhgW0TAyMkJoaKhOX5eiKEilUhQVFdEdzLm5uSguLoatra3arK6Hh4fGk+7AwABCQ0Pxr3/9C5s2bdLpGllYWAzL9GvpnSIqKiowZ84cOk1NAgxFUbjmmmvg5eVFP3ZoaAhmZmbT4nRsZ2eHV199Fc899xxuuOEGWFlZjfkcDw8PNDY2oq6uTiuFrqmAz+cjJSUFnZ2dkzKFkMlkKC8vR1ZWlsqsrpmZGaKiohAbG4tnn30WcXFxmD17ttbXg5WVFb7//nvGalCzsLBoD3siniSkG/qTTz7B7t27UVRURAcjEmh7enpgY2OD+++/H3feeSc2btyIY8eOMdaCb7zI5XIkJibiuuuuw86dO7V6DrH5S05Ohrm5uZ5XODHGawqhUChQWVmpIpCRl5cHiqIQGRmpIpAREBAwrfoFWFhYJg4biHVIcHAwli1bhoceegg+Pj4qI0vl5eUIDAyEmZkZVq1ahd27dzPW9H0iXLp0Cddffz2ysrK0/r3y8/PB5XJ1nv7VFQqFAmfPnkVLSwvuuOMOte/V19erCGTk5ORAJBKpmNnHxcUhKCiIndVlYWEZFTYQ6wAyziQQCPDll19i6dKlWLx4MTgcDnp6evDqq69i//79tBtRQ0MDXFxcpkVqmkBRFO6++24MDAzgq6++0iroEE/gqKgo2NvbG2CV4+f06dNYtWoVfvnlF/T29tJzurm5uejs7ERwcLCKQEZ4eDjMzMzYoMvCwqI1bCDWMf39/bS5wVdffYV//etfAIAXX3wRGzduxC233AIAOHr06LQT+WhoaEBwcDC+//57zJ8/X6vnVFdXo7W1FQkJCYx4LyiKQmdnp8rI0OXLl1FfXw93d3e6kYrM6lpaWjJi3SwsLFcvbCDWAxKJBA8//DAOHz6M7du347HHHqObahoaGrBjxw689957jFVvmgyvvfYavvvuO1y8eFEreU+FQoGUlJQpmdulKAp9fX10PZd8EX9i0sEcGBiIBx98EB988AFWr15t0DWysLBMf9hArAdqa2uxatUqPPzww7j//vsBYFqloa+EWCxGaGgotmzZgo0bN2r1HKFQiKKiIiQnJ8PU1FQv66IoCiKRCHl5eSrNVBUVFZg5cyY9qxsfH4+YmBg4OjqqnHRJdqOkpGRabqBYWFimDnZ8SQ94e3ujpaUFYrEYAFTMH3744Qe0t7dj1qxZiIiIwMyZM6eV4pa5uTn27NmDBx54AKtWrdJq9MfJyQm2traorKxEUFDQpNdAURQkEgkKCwtpX12BQIDi4mI4ODjQQXfdunWIjY3FzJkzx0wvr1+/Hr/++itKS0sRHx8/6TWysLCwENgTsY4hQferr77Cnj17kJeXBwD466+/8PDDD6O1tRW+vr5oa2uDn58fTp8+DS6XO61OzAqFAjfccAN8fHzw73//W6sa6kQNF4iZfWlpqYpARmFhISwtLelZ3djYWMTHx8PLy2vavM8sLCzTAzYQ65FLly5hzpw5+PLLL7Fp0yasW7cOTzzxBFxcXNDf34/169cjKioK77///rQKxABQVFSE2NhYnDt3TuvxpIqKCnR1dSE2NnbU90Iul+Py5csq6eX8/HxwOBxERUWpjA35+/tPq/eUhYVlesIGYgNw/fXXw9PTE/v374elpSUdHL755hu8/vrruHTp0rSrO1IUha1bt0IgEOD48eNaBUSZTIZbb70VS5cuxcMPPwyFQoHa2lo6vZybmwuBQIChoSFERESoCGQEBQVNS+9nFhaW6Q9759IzpaWlaGxsxGOPPUaPNRFSUlLg4OBgEGMBQ8PhcPDSSy+Bz+fj6NGjuPXWW6/4eGLxFxoaildffRXHjx9Hfn4+enp6EBISgtjYWKxbtw7//ve/ERYWBlNTU3ZsiIWFZVrABmI94+Pjg+7ubgwODtL/JhQKcfToUZw4cQJPP/30tAzEAGBvb4/du3djx44dWLJkCe1BTFEU2tvbVWq6ubm5aG1tRUBAAG1ef+zYMURGRsLCwoINuiwsLNMWNjWtR0g39DvvvIO9e/fioYcegpmZGcrKylBYWAhfX1/s2bMHrq6uKs+bTkIfcrkcsbGxCAkJAZ/Pp+Uga2tr4efnR8/qxsXFITo6GjY2NigoKEBCQgIEAgH4fP5U/wosLCwseoUNxAZiz549+OabbyCTyeDp6YlFixbhkUcegbm5OYRCIf71r38hIiICjz322FQvVeccOnQIjzzyCJYuXYr4+Hi6i9ne3n7UDcejjz6KqqoqHD9+3MCrZWFhYTEsbCDWM8rd0P39/bCwsEBPTw89X/vuu+/inXfeQW1tLd5//31s2rRp2o0zkREjExMTrZ/T1dWFxsZGxhpCsLCwsOgKNhBPEadOncJTTz2FiooKrFixAv39/XBzc4OFhQX27ds3rdLTLCwsLCyjwwZiA1NdXY1t27bhxIkTWLFiBV544QUEBgaCy+UiNzcXCxcuRGpqKkJCQqZ6qSwsLCwsBoDtmjYw33//PbKzs/Hrr7/i2muvVUk/x8TE4MiRI7CwsJjCFbKwsLCwGBL2RGwglFPNAwMDsLKy0vi46aQ7zcLConumU/8IyzBsIGZhYWFhOMrGMSzTD3ZbxcLCwmIgFArFqN+jKApyuRxyuVzt30kQlslkOHfuHD755BM0NDToda0shoPdYrGwsLAYAIqiwOVyQVEUSCJSOcXM4XA0lqU4HA4OHz6M3377DcnJyTh48CASExMxd+5cg62dRb+wgZiFhYVFz5Aeka1bt6K0tBQnTpyggzCp+ba0tOC///0vzp07By8vL9x9992Ijo4GADg4OODHH39EY2Mjfv75Z/D5fLBVxekDm5pmYWFh0TEKhUIlxUwaNaOiolBUVIQ///wT+/fvx/79+8HlctHe3o6tW7fiiy++gIeHB2pra3Hdddfh7NmzAIA5c+YAABYvXkzLvrI6A9MH9kTMwsLCMkEUCgU4HI5aUFROOdfU1EChUMDJyQkvvfQSmpubsXLlSgQGBmLt2rUAgD///BPHjh1DamoqIiIiMDAwgA0bNuDZZ5/FH3/8AUdHR/B4PFhYWLCNW9MQ9kTMwsLCMkG4XK7Gk2lRURGWL18Oa2trzJ8/H2+88QZsbGzw7rvvgsPh4OOPP0ZWVhaefvppAMCRI0dw2223ISwsDABgZWWFu+66Cy0tLcjKygIABAYGoqysDDKZzHC/IItBYAMxCwsLywRoaWnB0aNH8d1330EikdD/3tPTgyeffBIWFhY4deoUMjIysHr1avT19WHZsmXw8fGBQCDA0NAQ3UUtk8kgk8nQ09NDv46HhwdmzJgBgUAAAJg3bx5yc3PR19dn0N+TRf+wgZiFhYXl/xmtAaqnp0cl2P7rX/9CWFgYduzYgTfffBNLly5FcXExAOCHH35AcXExtmzZgqSkJLi4uOC6666DjY0NACAxMRE5OTkQiUR0CjsxMRHFxcVoa2ujfwaHw4FMJqMNYhYvXoyamhoIhUK9/O4sUwcbiFlYWP4xkECrUCg0zvRyOBy1Od7Kykp4e3vj1KlTAID9+/fj1KlT+OWXX1BUVIScnBxYWlri5ZdfBgCUlZXB1dWV7njmcDigKIoO5Ndccw1KSkrQ29tL/4wbbrgBHA4Hr7/+OgYHBwEAP/74I5qbm7F06VIAQEJCAvr6+tDc3KzLt4SFAbAVfxYWlmkJCbrKNVzyv8lJVCKRQCqV0pKzt956K+zt7bF3717Y2toCAHJzcxESEgJnZ2cAQEZGBpYtW4Y5c+bgt99+Q3V1NQoLCyGTyVBSUoLAwEB8++23KidoDocDU1NTAMD8+fPR3t6OsrIyeHl5AQDCw8Pxyiuv4KGHHsLy5cvR0tKCgYEBHDhwAE5OTgAAe3t7tLW1wdHRUW/vGcvUwJ6IWVhYphXkpKupm7miogI5OTk4fvw4+Hw+ZsyYgYcffhgZGRkAgJtvvhmlpaX4z3/+Qz+nr68Pra2tiI6ORmtrK8RiMV577TXY2tpi06ZNOHbsGO6880589dVXCAwMxIIFC9Da2ooLFy6o/OzS0lIAgJ+fH8LDw/Hmm2/ioYcewoIFC9DW1oabb74Zv/32G5YuXYpnnnkGFy5coLuqgeGNBRuEpyfsiZiFhYWxSKVSmJiYqPybch2XqFUpQ/5/fn4++vv7ERkZCUtLSwDAW2+9hcOHD+PBBx/Em2++CQsLC+zcuROPP/44UlNTsWbNGlRUVOCdd97Bxo0bYWJigq6uLpiZmcHIyAjW1tawtLSkndKcnZ3V1ufr64v169dj8+bNaGpqwvz585GXl4c//vgDu3fvhoeHBz7//HMcPHgQnZ2dWLVqFV0/joiIQEREhMb3gp0bnsZQLCwsLAyjuLiY8vb2pr777juKoihKoVBo9Ty5XE7t3buXcnJyombMmEHFxcVRq1evpiorKymKoqijR49SHA6Heumll+jnpKamUsbGxlRpaSlFURTV09NDeXh4UK+//jpFURR11113URs2bKCGhoYoiqKo/fv3U87OzlRBQQH9GhKJhNq3bx91/vx5iqIoqru7m9q5cycVFxdHWVlZUX5+ftTu3bupjo4OrX4HuVyu1e/LMj1g3ZdYWFgYh0gkQmdnJ9zd3dVs/6qrq5GXlwdra2tkZ2ejqKgIX375JQDgr7/+woMPPogPP/wQN9xwA2pqarBt2zZYWlri66+/RkVFBfh8Ps6dO4d58+YBGB4d4vF4+Oyzz7B27VpwOBwcOHAA+/fvx9mzZ7Fq1Srcdttt2L59O4DhuvKtt96KgoIC3H777TAzM0NmZiaEQiF27dqFG2+8EcBwiry1tRUuLi4abQup/zd5GE1jmuWfA1sjZmFhmTKIFOTI84CFhQXc3d3R0dGhEsR27tyJ8PBwvPrqq/jkk0+wb98+lJeX09///PPP8fjjj+OGG25AbW0tBAIB+vr68O2336KxsRH+/v6wt7fH5cuXAfztbOTn54fs7Gy6vrxp0yZ4enpiy5Yt6O7uhr29PYDhIGxqaoqvv/4ae/bsQVFRETIyMjBv3jwcPnwYN9xwA70WLpcLNzc3cLlcjb8nh8OBsbExG4RZWD9iFhYWZtDX10fXSimKwuHDh7F582aIxWIAQFZWFhYtWoSPP/4Yt99+O0pLS/HAAw+gvr4ely5dgqenJ0JCQtDZ2QmFQoGhoSG4u7sjISEBiYmJWLt2LWxtbTF//nwEBwfjo48+os0YNm3ahKqqKhw7dgwWFhYAgD/++AMrV67E4OAgjh49iuXLl0/Ze8MyvWFPxCwsLDqDoigIBAJkZmYCgNpM7kjS0tKwYsUKODs7Izk5GS+++CLa29vB4XDg5uYGCwsLutv4p59+QkxMDBYvXgxgWPLxoYcegpmZGa0+xefzaaeimpoaFBYW4rPPPsPGjRvpcaTk5GQIBALIZDK6ASo+Ph5//fUXOjs76bXNnTsX//3vf3HmzBk63awJ4iHMnmlYJgobiFlYWDRCUdQVjezJY+RyOf04iqJw33330eM/RkZGowaozs5O7N69G3Z2dvjxxx9x4MABfPPNN3j99dcxMDAAf39/2Nra0mNAXV1daq8XGBgIBwcH5ObmAhgOqIODg7C2toadnR04HA4kEglOnTqFI0eOABh2QMrIyEBHRwf9OkuWLMFzzz1Hn8gBwNLSEjfddBOuueaaK5osGBkZwcjIiO1qZpkwbCBmYWHRCIfD0dhkNPIxRkZGtOE9l8vFzJkzcfbsWSxcuBBcLhc//vijynNIIP34449hZmaGzz//HPPnz8fChQtxxx134IcffsBff/0FT09P+Pj44NKlSwCAmJgYVFVVqUg8enp6oqurC2VlZQCAe+65BxEREbjhhhtw6NAhfPbZZ9i0aRPefvtt+nQ+f/58PPXUUzAzM6Nfx8PDA7t27QKPx1Nb61ibERaWycLOEbOwsKjR2NiIP//8E6dPn8ayZcuwcuVKjU1Fubm5uHjxIurr6/H4448jLS0NeXl56O3tRWJiIp577jkkJCSM+jOysrJw//3348yZM+js7ISVlRUWLFiAmTNngsvlIjw8nPbkXbBgAd544w38+OOPCA0NBQD6ZxM9Zjc3Nxw6dAhfffUVvvjiC3R0dCA5ORmPP/44Fi5cSD/mrbfeUluPXC5Xc1PSJArCwqJr2EDMwsKiwuXLl/HKK6+gqqoKKSkpMDIywrJly2BhYUE3NzU2NmLr1q04e/YsgoKC4OzsjJycHNx2220wNjbGa6+9hltvvRXXXnutWmqaw+FAKpXCw8MDjY2N8PLywhtvvIHY2Fh4enrSUpAURSEkJARff/01gGFFqieffBLbt2+HUCiEmZkZampqcO+99yIjIwO1tbXw9vaGk5MTtm3bhieeeGLU31FT0GW7l1mmCjYQs7CwqGBsbIy5c+filVdewXvvvYczZ86gv79fJRAfOXIEmZmZqKysBI/Hg0gkos0KQkNDIZPJUFhYqDLOo4yJiQn8/f2hUCiwbds2upEKGJ4Tbm9vR1xcHPz8/NDT04OcnBxER0fj4YcfxowZM/Dll19CKpXihRdeQE5ODrKzs2kTBeW5Y2LuwOVyVdLsbNBlYRJsjZiFhUWFWbNmYePGjZg1axauv/56lJeXo729HQDomVixWAyKoiAWi5Gbm4vu7m7MmDEDAODj4wMej4fa2lpasEITK1asQEREBJYuXYqff/4ZTU1NOH78OHbu3ImUlBRQFAU3NzfMnTtXpZt5zZo1+PXXX3Hy5EnMmTMHJ06cgImJCfz8/Og1ErhcLoyNjcesdbOwTCXsiZiFhUUNcvKNi4vDwMAA6urqEBQUBGA4uN1111345ptvEBwcjJiYGIjFYlhYWGD//v3g8/mYNWsWWltb0dbWBjc3N7XXVygUMDIywuHDh/Hxxx9j165dKC8vh62tLd2pzOFwEBgYiNOnT6usqaioCN988w2srKxw6tQpDAwMYM+ePfT8LwvL1QYr6MHCwqIREvhmzJiBHTt2YNu2bXR3NIfDgVgshlAoRFFREfr6+vDyyy8jPDwc33zzDd5//30cPHgQr776KpYtW4ampibMnDlT4+srFApcvnwZLi4uKilqZeRyOZ1Obmpqwn333QdgeFxp5cqViIqK0u+bwcKiR9hAzMLCohGZTAZjY2MsWrQIrq6u+PTTT9VOnSSYAsDatWvR1NSECxcu4PLly3j99dfx22+/oaWlBYsWLcKff/455s8k40LajE6xsEwX2NQ0CwuLRkiAnTdvHn788Uf09fXRgbi3txcnT56EUCiEubk5zp49i9raWuzYsQPAcIfzm2++ifXr18PX1xdeXl5a/0y2kYrlnwZ7ImZhYbkif/31F2699VZ8+OGHEIlEcHR0xIoVK/Df//4XBw4cgEgkQmRkJNauXYtrr72WDaQsLOOEDcQsLCwaqa2txYsvvoisrCwUFxeDy+XCw8MDTz75JB599FFIpVKYmJhc8TWUU9csLCyaYVPTLCwsGunv78fQ0BDuuece3HTTTQgODlb5PgnCo83qAmCDMAuLFrAnYhYWFq0hes1s+pmFRXewgZiFhWVUFAoFbebAnm5ZWPQDG4hZWFhYWFimEHZQj4WFhYWFZQphAzELCwsLC8sUwgZiFhYWFhaWKYQNxCwsLCwsLFMIG4hZWFhYWFimEDYQs7CwsLCwTCFsIGZhYWFhYZlC/g+rzS+2WwwvqgAAAABJRU5ErkJggg==",
"text/plain": [
"<Figure size 800x600 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# Code source: Gaël Varoquaux\n",
"# Modified for documentation by Jaques Grobler\n",
"# License: BSD 3 clause\n",
"\n",
"import matplotlib.pyplot as plt\n",
"\n",
"# unused but required import for doing 3d projections with matplotlib < 3.2\n",
"import mpl_toolkits.mplot3d # noqa: F401\n",
"\n",
"from sklearn import datasets\n",
"from sklearn.decomposition import PCA\n",
"\n",
"# import some data to play with\n",
"iris = datasets.load_iris()\n",
"X = iris.data[:, :2] # we only take the first two features.\n",
"y = iris.target\n",
"\n",
"x_min, x_max = X[:, 0].min() - 0.5, X[:, 0].max() + 0.5\n",
"y_min, y_max = X[:, 1].min() - 0.5, X[:, 1].max() + 0.5\n",
"\n",
"plt.figure(2, figsize=(8, 6))\n",
"plt.clf()\n",
"\n",
"# Plot the training points\n",
"plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.Set1, edgecolor=\"k\")\n",
"plt.xlabel(\"Sepal length\")\n",
"plt.ylabel(\"Sepal width\")\n",
"\n",
"plt.xlim(x_min, x_max)\n",
"plt.ylim(y_min, y_max)\n",
"plt.xticks(())\n",
"plt.yticks(())\n",
"\n",
"# To getter a better understanding of interaction of the dimensions\n",
"# plot the first three PCA dimensions\n",
"fig = plt.figure(1, figsize=(8, 6))\n",
"ax = fig.add_subplot(111, projection=\"3d\", elev=-150, azim=110)\n",
"\n",
"X_reduced = PCA(n_components=3).fit_transform(iris.data)\n",
"ax.scatter(\n",
" X_reduced[:, 0],\n",
" X_reduced[:, 1],\n",
" X_reduced[:, 2],\n",
" c=y,\n",
" cmap=plt.cm.Set1,\n",
" edgecolor=\"k\",\n",
" s=40,\n",
")\n",
"\n",
"ax.set_title(\"First three PCA directions\")\n",
"ax.set_xlabel(\"1st eigenvector\")\n",
"ax.xaxis.set_ticklabels([])\n",
"ax.set_ylabel(\"2nd eigenvector\")\n",
"ax.yaxis.set_ticklabels([])\n",
"ax.set_zlabel(\"3rd eigenvector\")\n",
"ax.zaxis.set_ticklabels([])\n",
"\n",
"plt.savefig(\"iris_dataset.png\")\n",
"plt.show()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.9"
}
},
"nbformat": 4,
"nbformat_minor": 0
}
| https://github.com/only4sim/ZK-DTP |
plot_iris_dtc.ipynb | {"cells":[{"cell_type":"code","execution_count":44,"metadata":{"collapsed":false,"jupyter":{"outputs_hidden":false},"trusted":true},"outputs":[],"source":["%matplotlib inline"]},{"cell_type":"markdown","metadata":{},"source":["\n","# Plot the decision surface of decision trees trained on the iris dataset\n","\n","Plot the decision surface of a decision tree trained on pairs\n","of features of the iris dataset.\n","\n","See `decision tree <tree>` for more information on the estimator.\n","\n","For each pair of iris features, the decision tree learns decision\n","boundaries made of combinations of simple thresholding rules inferred from\n","the training samples.\n","\n","We also show the tree structure of a model built on all of the features.\n"]},{"cell_type":"markdown","metadata":{},"source":["First load the copy of the Iris dataset shipped with scikit-learn:\n","\n"]},{"cell_type":"code","execution_count":45,"metadata":{"collapsed":false,"jupyter":{"outputs_hidden":false},"trusted":true},"outputs":[],"source":["from sklearn.datasets import load_iris\n","\n","iris = load_iris()"]},{"cell_type":"markdown","metadata":{},"source":["Display the decision functions of trees trained on all pairs of features.\n","\n"]},{"cell_type":"code","execution_count":46,"metadata":{"collapsed":false,"jupyter":{"outputs_hidden":false},"trusted":true},"outputs":[{"name":"stderr","output_type":"stream","text":["/var/folders/4v/hb_2qz1d4knbrvsjqc0skwrm0000gn/T/ipykernel_1670/3884067348.py:39: UserWarning: No data for colormapping provided via 'c'. Parameters 'cmap' will be ignored\n"," plt.scatter(\n","/var/folders/4v/hb_2qz1d4knbrvsjqc0skwrm0000gn/T/ipykernel_1670/3884067348.py:39: UserWarning: No data for colormapping provided via 'c'. Parameters 'cmap' will be ignored\n"," plt.scatter(\n","/var/folders/4v/hb_2qz1d4knbrvsjqc0skwrm0000gn/T/ipykernel_1670/3884067348.py:39: UserWarning: No data for colormapping provided via 'c'. Parameters 'cmap' will be ignored\n"," plt.scatter(\n","/var/folders/4v/hb_2qz1d4knbrvsjqc0skwrm0000gn/T/ipykernel_1670/3884067348.py:39: UserWarning: No data for colormapping provided via 'c'. Parameters 'cmap' will be ignored\n"," plt.scatter(\n","/var/folders/4v/hb_2qz1d4knbrvsjqc0skwrm0000gn/T/ipykernel_1670/3884067348.py:39: UserWarning: No data for colormapping provided via 'c'. Parameters 'cmap' will be ignored\n"," plt.scatter(\n","/var/folders/4v/hb_2qz1d4knbrvsjqc0skwrm0000gn/T/ipykernel_1670/3884067348.py:39: UserWarning: No data for colormapping provided via 'c'. Parameters 'cmap' will be ignored\n"," plt.scatter(\n"]},{"data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAAkMAAAHHCAYAAAC88FzIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOydd3gU1deA3930XoDQIRBqCL0T6R2UIhYQaRYQ6VgQVBDp8omASLMAgopU4af0roihiZTQISHUUJKQXnbv98dkN9nsJtkNSTblvs+zT9g7M3fODndmzj33FJUQQiCRSCQSiURSTFFbWwCJRCKRSCQSayKVIYlEIpFIJMUaqQxJJBKJRCIp1khlSCKRSCQSSbFGKkMSiUQikUiKNVIZkkgkEolEUqyRypBEIpFIJJJijVSGJBKJRCKRFGukMiSRSCQSiaRYI5WhbAgJCUGlUrF69WqLjmvXrh3t2rXLE5kKCg8ePOCll16iRIkSqFQqFi5caG2RcsyJEydo1aoVLi4uqFQqzpw5Y9Hxq1evRqVSERISkify5bR/lUrFZ599licySXLGZ599hkqlssq5Dx06hEql4tChQ1Y5f37g6+vL0KFDrS2GnqtXr9KlSxc8PDxQqVT89ttvme5blJ6phQ1bawuQHatXr2bYsGH67w4ODnh7e1O3bl169uzJsGHDcHNzs6KExZcJEyawe/dupk2bRpkyZWjSpIm1RcoRycnJvPzyyzg6OvLVV1/h7OxM5cqVrS1WoWbHjh0cP368UCpicXFxfPHFF8ViQiPJe4YMGcLNmzeZNWsWnp6eWT4n8+uZOnv2bPz9/enTp0+e9F8oEQWcVatWCUB8/vnnYu3ateKHH34Qs2fPFl26dBEqlUpUrlxZ/Pfff3l2fq1WK+Lj40VKSopFxyUmJorExMQ8kqpgULp0aTFw4EBri/HMXLx4UQDi22+/zXEfunF68+bN3BMsHSkpKSI+Pl5otVqLjouPjxfJycl5IlNWjBo1ShSCx4tJHj58KAAxbdq0POk/OTlZxMfH50nf2XHw4EEBiIMHD1rl/PlBQkKCSEpKsrYYQggh4uLiBCA+/vhjs/bPr2eqi4uLGDJkSJ6fpzBR4C1DOrp3726gJU+ePJkDBw7w/PPP06tXLy5evIiTk1Oun1elUuHo6Gjxcfb29rkuS0EgJSUFrVaLvb094eHheHp6WlukZyY8PBygQP8WGxsbbGxsLD4uJ2M3v0k/pgojsbGxuLi4mL2/ra0ttraF5tFb6HBwcMh2H0v/z3LKw4cPAfOfLYX5marVaklKSioUzxyTWFsbyw7djPvEiRMmt8+ePVsAYuXKlQbtFy9eFP369RNeXl7CwcFBNG7cWGzbts3o+IiICDF+/HhRuXJlYW9vL8qXLy8GDRokHj58KIQQ4ubNmwIQq1at0h9z7949MXToUFG+fHlhb28vypQpI3r16mVgFWjbtq1o27atwbkePHgg3njjDeHj4yMcHBxEvXr1xOrVqw320Z1v/vz5YsWKFaJq1arC3t5eNGnSRBw/fjzb65WUlCQ+++wzUa1aNeHg4CC8vb1FYGCg2LNnT5ayCSHEkCFDROXKlU3K8tVXX4mqVasKtVotvvrqKwEYfYQQ4vHjx+K9994TAQEBwsXFRbi5uYlu3bqJM2fOGJ0vPj5eTJs2TVSvXl04ODiIMmXKiL59+4pr167p99FoNOKrr74S/v7+wsHBQfj4+Ijhw4eLJ0+eZHsthBBi//794rnnnhPOzs7Cw8ND9OrVSwQHBxv85oy/w9S1Sc/58+dF+/bthaOjoyhfvryYMWOG+P77701ahnbs2KE/v6urq+jRo4c4f/68UZ8XL14UL7/8sihZsqRwdHQUNWrUEFOmTNFvN2V5OnHihOjSpYsoUaKEcHR0FL6+vmLYsGEG/WLCwnH69GnRrVs34ebmJlxcXESHDh3EsWPHDPbRne+vv/4SEyZMECVLlhTOzs6iT58+Ijw8PMvrY+qa6sZHZmPq33//1V8Hc+/bcePGiQoVKgh7e3vh5+cn5s6dKzQajcF+v/zyi2jUqJFwdXUVbm5uIiAgQCxcuDBT2XXyZfzoruGQIUOEi4uLuHbtmujevbtwdXUVvXv3FkIIceTIEfHSSy+JihUrCnt7e1GhQgUxfvx4ERcXZ3COadOmGVnNADFq1CixdetWUadOHWFvby/8/f3Fzp07jWS8ffu2GDZsmPDx8dHv9/333xvtFxYWJnr37i2cnZ1FqVKlxPjx48WuXbvMtgzl9TgRIu16Xr9+XXTp0kU4OzuLsmXLiunTpxtZQefPny9atmwpvL29haOjo2jUqJHYuHGjUZ+VK1c2sHroZDx06JAYOXKkKFWqlPD09BRCCPH06VMxbtw4/fO/VKlSolOnTuLUqVPPfH10/8/pP+mfr6auo6l7Rgjzx7s518jUeXTXK+M7IONvydjPqFGjxLp164S/v7+wtbUVW7duFUKYP0YXL14s/P39hZOTk/D09BSNGzcWP/30U2aXPE8p9NOTQYMGMWXKFPbs2cPbb78NwIULFwgMDKR8+fJ89NFHuLi4sGHDBvr06cPmzZvp27cvADExMbRu3ZqLFy/yxhtv0KhRIx49esT27du5ffs2JUuWNHnOfv36ceHCBcaMGYOvry/h4eHs3buXW7du4evra/KY+Ph42rVrx7Vr1xg9ejRVqlRh48aNDB06lMjISMaNG2ew/88//0x0dDQjRoxApVLxxRdf8OKLL3Ljxg3s7OwyvR6fffYZc+bM4a233qJZs2Y8ffqUkydPcvr0aTp37pyDKwyrVq0iISGB4cOH4+DgQKNGjVi7di2DBg2ic+fODB48WL/vjRs3+O2333j55ZepUqUKDx48YMWKFbRt25bg4GDKlSsHgEaj4fnnn2f//v3079+fcePGER0dzd69ezl//jx+fn4AjBgxQu83NnbsWG7evMmSJUv4999/OXr0aJbXYt++fXTv3p2qVavy2WefER8fz9dff01gYCCnT5/G19eXESNGUL58eWbPns3YsWNp2rQppUuXzrTP+/fv0759e1JSUvRja+XKlSatkmvXrmXIkCF07dqVefPmERcXx7Jly3juuef4999/9WPl7NmztG7dGjs7O4YPH46vry/Xr1/nf//7H7NmzTIpR3h4OF26dKFUqVJ89NFHeHp6EhISwpYtW7L8v7xw4QKtW7fG3d2dDz/8EDs7O1asWEG7du04fPgwzZs3N9h/zJgxeHl5MW3aNEJCQli4cCGjR4/m119/zfQcI0aM4O7du+zdu5e1a9ea3CfjmPL29jb7vo2Li6Nt27bcuXOHESNGUKlSJf7++28mT57MvXv39E6ne/fuZcCAAXTs2JF58+YBcPHiRY4ePWp0v+koVaoUy5YtY+TIkfTt25cXX3wRgHr16un3SUlJoWvXrjz33HP83//9H87OzgBs3LiRuLg4Ro4cSYkSJTh+/Dhff/01t2/fZuPGjVn+vwD89ddfbNmyhXfffRc3NzcWL15Mv379uHXrFiVKlAAUB9sWLVqgUqkYPXo0pUqVYufOnbz55ps8ffqU8ePHA8rzpmPHjty6dYuxY8dSrlw51q5dy4EDB7KVA/JnnOjQaDR069aNFi1a8MUXX7Br1y6mTZtGSkoKn3/+uX6/RYsW0atXLwYOHEhSUhLr16/n5Zdf5vfff6dnz57Znufdd9+lVKlSTJ06ldjYWADeeecdNm3axOjRo/H39+fx48f89ddfXLx4kUaNGj3T9XnxxRfx9PRkwoQJDBgwgB49euDq6mqyvzZt2mT6TDV3vJt7jdauXat/PwwfPhxA/7y1lAMHDrBhwwZGjx5NyZIl8fX1NXuMfvvtt4wdO5aXXnqJcePGkZCQwNmzZwkKCuK1117LkTzPhFVUMAvIzjIkhBAeHh6iYcOG+u8dO3YUdevWFQkJCfo2rVYrWrVqJapXr65vmzp1qgDEli1bjPrUzUoyWoYiIiL0M9usyGh9WbhwoQDEunXr9G1JSUmiZcuWwtXVVTx9+tTgfCVKlDCwfmzbtk0A4n//+1+W561fv77o2bOnRbLpyMwy5O7ubnKWR+rMID0JCQlGs5WbN28KBwcH8fnnn+vbfvjhBwGIBQsWGPWru/Z//vmnAIxmCrrZbXYziAYNGggfHx/x+PFjfdt///0n1Gq1GDx4sL5N50dhapaZkfHjxwtABAUF6dvCw8OFh4eHgeUmOjpaeHp6irffftvg+Pv37wsPDw+D9jZt2gg3NzcRGhpq8joIYWwZ2rp1a7b3hRDGlqE+ffoIe3t7cf36dX3b3bt3hZubm2jTpo3R+Tp16mQgx4QJE4SNjY2IjIzM8ryZ+QxlNabMvW9nzJghXFxcxJUrVwyO/+ijj4SNjY24deuWEEKIcePGCXd3d4v9/bLyGdJZvT766COjbRktQEIIMWfOHKFSqQz+bzObZdvb2xtYRf/77z8BiK+//lrf9uabb4qyZcuKR48eGRzfv39/4eHhoZdB97zZsGGDfp/Y2FhRrVo1syxD+TVOdNdzzJgx+jatVit69uwp7O3t9RZ6IYyvb1JSkggICBAdOnQwaM/MMvTcc88ZjQUPDw+jZ5g5mHt90ltCzcHUM9Xc8S6E+dcoM58hSy1DarVaXLhwwaDd3DHau3dvUadOHaNzWYsiEVrv6upKdHQ0AE+ePOHAgQO88sorREdH8+jRIx49esTjx4/p2rUrV69e5c6dOwBs3ryZ+vXr62ec6cks9NXJyQl7e3sOHTpERESE2TLu2LGDMmXKMGDAAH2bnZ0dY8eOJSYmhsOHDxvs/+qrr+Ll5aX/3rp1a0CxvGSFp6cnFy5c4OrVq2bLlh39+vWjVKlSZu3r4OCAWq0MK41Gw+PHj3F1daVmzZqcPn1av9/mzZspWbIkY8aMMepDd+03btyIh4cHnTt31v8/Pnr0iMaNG+Pq6srBgwczlePevXucOXOGoUOH4u3trW+vV68enTt3ZseOHWb9nozs2LGDFi1a0KxZM31bqVKlGDhwoMF+e/fuJTIykgEDBhjIbmNjQ/PmzfWyP3z4kCNHjvDGG29QqVIlk9fBFDq/gt9//53k5GSzZNdoNOzZs4c+ffpQtWpVfXvZsmV57bXX+Ouvv3j69KnBMcOHDzeQo3Xr1mg0GkJDQ806Z2ZkHFOW3LcbN26kdevWeHl5GVzbTp06odFoOHLkCKBco9jYWPbu3ftMsppi5MiRRm3prYOxsbE8evSIVq1aIYTg33//zbbPTp06GczQ69Wrh7u7u/6eF0KwefNmXnjhBYQQBr+9a9euREVF6e+xHTt2ULZsWV566SV9f87OznpLQFZYY5yMHj1a/2+dRSEpKYl9+/bp29Nf34iICKKiomjdurXBcyUr3n77bSO/O09PT4KCgrh7965ZfUDOrs+zYO54h2e/RpbStm1b/P399d8tGaOenp7cvn2bEydO5IlsllLol8lAWe7y8fEB4Nq1awgh+PTTT/n0009N7h8eHk758uW5fv06/fr1s+hcDg4OzJs3j/fee4/SpUvTokULnn/+eQYPHkyZMmUyPS40NJTq1avrFQUdtWvX1m9PT8YXo04xyk4B+/zzz+nduzc1atQgICCAbt26MWjQIAMzv6VUqVLF7H21Wi2LFi1i6dKl3Lx5E41Go9+mM/UDXL9+nZo1a2bpSHr16lWioqL0/7cZ0Tk+m0J3PWvWrGm0rXbt2uzevTtHTpShoaFGSwSmzqNTRjt06GCyH3d3dyBNuQ0ICLBIjrZt29KvXz+mT5/OV199Rbt27ejTpw+vvfZapg6kDx8+JC4uLtNrotVqCQsLo06dOvr2nI7D7Mg4piy5b69evcrZs2czVdB14+Ldd99lw4YNdO/enfLly9OlSxdeeeUVunXr9kyy29raUqFCBaP2W7duMXXqVLZv3250faKiorLtN+O1BuV66/p6+PAhkZGRrFy5kpUrV5rsQ/fbQ0NDqVatmpFCber/PiP5PU7UarWBUgFQo0YNAIO8Wr///jszZ87kzJkzJCYm6tvNzdlk6jn2xRdfMGTIECpWrEjjxo3p0aMHgwcPNpInPTm5Ps+CueMdnv0aWUrGa2rJGJ00aRL79u2jWbNmVKtWjS5duvDaa68RGBiYJ7JmR6FXhm7fvk1UVBTVqlUDlJcxwPvvv0/Xrl1NHqPbN6eMHz+eF154gd9++43du3fz6aefMmfOHA4cOEDDhg2fqW8dmUUOCSGyPK5NmzZcv36dbdu2sWfPHr777ju++uorli9fzltvvQUoN4apftIrLumxJEpv9uzZfPrpp7zxxhvMmDEDb29v1Go148eP1//fmItWq8XHx4effvrJ5HZzrVXWQPdb165da1JJftZoIpVKxaZNm/jnn3/43//+x+7du3njjTf48ssv+eeffzL1TbCUnI7D7Mg4piy5b7VaLZ07d+bDDz80uZ/uRerj48OZM2fYvXs3O3fuZOfOnaxatYrBgwezZs2aHMue3vqpQ6PR0LlzZ548ecKkSZOoVasWLi4u3Llzh6FDh5o19rO71ro+Xn/9dYYMGWJy32eZ9DwLeTVOdPz555/06tWLNm3asHTpUsqWLYudnR2rVq3i559/NqsPU8+xV155hdatW7N161b27NnD/PnzmTdvHlu2bKF79+65IvuzYu54z41rlJnSZO67wZIxWrt2bS5fvszvv//Orl272Lx5M0uXLmXq1KlMnz7dLHlzk0KvDOkcNHUPUJ1Gb2dnR6dOnbI81s/Pj/Pnz+fovH5+frz33nu89957XL16lQYNGvDll1+ybt06k/tXrlyZs2fPotVqDR6kly5d0m/PLby9vRk2bBjDhg0jJiaGNm3a8Nlnn+mVIS8vL5PLbc+69AGwadMm2rdvz/fff2/QHhkZaeCQ7ufnR1BQEMnJyZk6Qfv5+bFv3z4CAwMtTpugu56XL1822nbp0iVKliyZo9DaypUrm1yCzHge3XKHj49PluNQN15zOg5btGhBixYtmDVrFj///DMDBw5k/fr1+v/r9JQqVQpnZ+dMr4laraZixYo5kiMjls5ELb1vY2Jist0PlBQXL7zwAi+88AJarZZ3332XFStW8Omnn2Y6KcrJLPrcuXNcuXKFNWvWGDi/5uYSXalSpXBzc0Oj0WT72ytXrsz58+cRQhj8HlP/96bOk1/jBJQX6I0bN/QvdYArV64A6IMMNm/ejKOjI7t37zawfK5ateqZz1+2bFneffdd3n33XcLDw2nUqBGzZs3KVBnK7+tj7ni35BplNsa9vLyIjIw0ajf33WDJGAVwcXHh1Vdf5dVXXyUpKYkXX3yRWbNmMXny5HwP0S/UPkMHDhxgxowZVKlSRe+z4ePjQ7t27VixYgX37t0zOkaX9wEUv4X//vuPrVu3Gu2X2YwmLi6OhIQEgzY/Pz/c3NwMzJIZ6dGjB/fv3zeIrkhJSeHrr7/G1dWVtm3bZv1jzeTx48cG311dXalWrZqBbH5+fly6dMngWvz3338cPXr0mc9vY2NjdO02btyo9/fQ0a9fPx49esSSJUuM+tAd/8orr6DRaJgxY4bRPikpKSZvWh1ly5alQYMGrFmzxmC/8+fPs2fPHnr06GHBr0qjR48e/PPPPxw/flzf9vDhQyPrVdeuXXF3d2f27NkmfXp0175UqVK0adOGH374gVu3bhnsk9WsOiIiwmh7gwYNADIdhzY2NnTp0oVt27YZLD88ePCAn3/+meeee06/fPes6BTNrP6P0mPJffvKK69w7Ngxdu/ebbRfZGQkKSkpgPG9oFar9bPSrO5VXXSYubJDmmUk/f+JEIJFixaZ3Yc55+jXrx+bN282qTynv0Y9evTg7t27bNq0Sd8WFxeX6dJFxvPk1zjRkf45IIRgyZIl2NnZ0bFjR71MKpXKwEIREhKSZWmL7NBoNEbLlz4+PpQrVy7L8ZHf18fc8W7JNXJxcTE5vv38/IiKiuLs2bP6tnv37pl8R5rCkjGa8f60t7fH398fIYTZfpC5SaGxDO3cuZNLly6RkpLCgwcPOHDgAHv37qVy5cps377dQIv85ptveO6556hbty5vv/02VatW5cGDBxw7dozbt2/z33//AfDBBx+wadMmXn75Zd544w0aN27MkydP2L59O8uXL6d+/fpGcly5coWOHTvyyiuv4O/vj62tLVu3buXBgwf0798/U/mHDx/OihUrGDp0KKdOncLX15dNmzZx9OhRFi5cmGslRfz9/WnXrh2NGzfG29ubkydP6kNHdbzxxhssWLCArl278uabbxIeHs7y5cupU6fOMzv+Pf/883z++ecMGzaMVq1ace7cOX766SejNfjBgwfz448/MnHiRI4fP07r1q2JjY1l3759vPvuu/Tu3Zu2bdsyYsQI5syZw5kzZ+jSpQt2dnZcvXqVjRs3smjRIgMH0YzMnz+f7t2707JlS9588019aL2Hh0eOy0R8+OGHrF27lm7dujFu3Dh9aL3O8qfD3d2dZcuWMWjQIBo1akT//v0pVaoUt27d4o8//iAwMFD/Ali8eDHPPfccjRo1Yvjw4VSpUoWQkBD++OOPTGukrVmzhqVLl9K3b1/8/PyIjo7m22+/xd3dPUtFb+bMmezdu5fnnnuOd999F1tbW1asWEFiYiJffPFFjq6JKRo3bgzA2LFj6dq1KzY2NlneH2DZfbt9+3aef/55hg4dSuPGjYmNjeXcuXNs2rSJkJAQSpYsyVtvvcWTJ0/o0KEDFSpUIDQ0lK+//poGDRroffVM4eTkhL+/P7/++is1atTA29ubgICALP26atWqhZ+fH++//z537tzB3d2dzZs3P7NvVUbmzp3LwYMHad68OW+//Tb+/v48efKE06dPs2/fPp48eQIozsJLlixh8ODBnDp1irJly7J27Vq9opcd+TVOQEkMumvXLoYMGULz5s3ZuXMnf/zxB1OmTNEvhffs2ZMFCxbQrVs3XnvtNcLDw/nmm2+oVq2awX1nCdHR0VSoUIGXXnqJ+vXr4+rqyr59+zhx4gRffvlllsfm5/Uxd7xbco0aN27Mvn37WLBgAeXKlaNKlSo0b96c/v37M2nSJPr27cvYsWP16UBq1KhhthO2uWO0S5culClThsDAQEqXLs3FixdZsmQJPXv2tE6JrfwMXcsJGZNR6ZIcdu7cWSxatEgfkp6R69evi8GDB4syZcoIOzs7Ub58efH888+LTZs2Gez3+PFjMXr0aH0CxQoVKoghQ4bowwIzhtY/evRIjBo1StSqVUu4uLgIDw8P0bx5c4MQViEyT7o4bNgwUbJkSWFvby/q1q1rkMwx/flMhWJiRomAmTNnimbNmglPT0/h5OQkatWqJWbNmmWUnn7dunX6hI4NGjQQu3fvzjLpoinIJLT+vffeE2XLlhVOTk4iMDBQHDt2zOT1iIuLEx9//LGoUqWKsLOzE2XKlBEvvfSSQbiqEEKsXLlSNG7cWDg5OQk3NzdRt25d8eGHH4q7d+9meS2EEGLfvn0iMDBQODk5CXd3d/HCCy8YJF0UwrLQeiGEOHv2rGjbtq1ZSRcPHjwounbtKjw8PISjo6Pw8/MTQ4cOFSdPnjTY7/z586Jv377C09NTODo6ipo1a4pPP/1Uvz1jaP3p06fFgAEDRKVKlfTJKJ9//nmjfk2NmdOnT4uuXbsKV1dX4ezsLNq3by/+/vtvg30yS2lhbjmHlJQUMWbMGFGqVCmhUqlMJl00hbn3bXR0tJg8ebKoVq2asLe3FyVLlhStWrUS//d//6cf65s2bRJdunTRJ36rVKmSGDFihLh3716WsgshxN9//y0aN24s7O3tTSZdNEVwcLDo1KmTcHV1FSVLlhRvv/22Pjw+/X2eVQK7jGQMExdCeY6MGjVKVKxYUX/fdOzY0SjxbGhoqOjVq5dwdnYWJUuWFOPGjbM46WJejxNTSRdLly4tpk2bZpSi4/vvv9cnaK1Vq5ZYtWqVyWuZWWh9RhkTExPFBx98IOrXr69PnFi/fn2xdOnSbK+NudcnN0LrhTBvvAth/jW6dOmSaNOmjXBycjJIuiiEEHv27BEBAQHC3t5e1KxZU6xbt86iMSuEeWN0xYoVok2bNqJEiRLCwcFB+Pn5iQ8++EBERUWZda1yG5UQueThJpFIJBKJBQwdOpRNmzYRExNjbVEkxZxC7TMkkUgkEolE8qxIZUgikUgkEkmxRipDEolEIpFIijXSZ0gikUgkEkmxRlqGJBKJRCKRFGukMiSRSCQSiaRYI5UhiUQikUgkxRqpDEkkEolEIinWFJpyHKbQarXcvXsXNze3HBVXlBRvhBBER0dTrlw5oyrkBRU55iXPghzzkuKGuWO+UCtDd+/ezdXqwJLiSVhYGBUqVLC2GGYhx7wkN5BjXlLcyG7MF2plSFfMbUu5qrgUkllOfuPfqQwzW83k7L7rudrvmMnt6OG5jx2Rnfh6zqFc7Tu/SEmO58TW0dYpCphDdLI27bsEWzsnK0tTMPjp61YADBzzt5UlKfjIMS8pbpg75gu1MqQzmbqo1biobawsTcHE3d4We2dXbO3Mq1ZtLs5ubri7OeGsccv1vvObwmR618lqa+dU6K97buHmrjzk5PUwHznmJcWN7Ma8NKdIJBKJRCIp1khlSCKRSCQSSbFGKkMSiUQikUiKNVIZkkgkEolEUqyRypBEIpFIJJJijVSGJBKJRCKRFGukMiSRSCQSiaRYI5UhiUQikUgkxRqrKkOfffYZKpXK4FOrVi1riiSRSCQSiaSYYfUM1HXq1GHfvn3677a2VhdJIpFIJBJJMcLqmoetrS1lypSxthgSiUQikUiKKVb3Gbp69SrlypWjatWqDBw4kFu3bmW6b2JiIk+fPjX4SCQSiaRgsmzZMurVq4e7uzvu7u60bNmSnTt3WlssicQIqypDzZs3Z/Xq1ezatYtly5Zx8+ZNWrduTXR0tMn958yZg4eHh/5TsWLFfJZYIpFIJOZSoUIF5s6dy6lTpzh58iQdOnSgd+/eXLhwwdqiSSQGWFUZ6t69Oy+//DL16tWja9eu7Nixg8jISDZs2GBy/8mTJxMVFaX/hIWF5bPEEolEIjGXF154gR49elC9enVq1KjBrFmzcHV15Z9//rG2aBKJAVb3GUqPp6cnNWrU4Nq1aya3Ozg44ODgkM9SSSQSSe4QFX6ZsPPbiI28i4tnOSoG9MbDp6a1xcoXNBoNGzduJDY2lpYtW2a6X2JiIomJifrv0h1Ckh9Y3WcoPTExMVy/fp2yZctaWxSJRCLJVaLCL3N230wi7mtJih9CxH0NZ/fNJCr8srVFy1POnTuHq6srDg4OvPPOO2zduhV/f/9M95fuEBJrYFVl6P333+fw4cOEhITw999/07dvX2xsbBgwYIA1xZJIJJJcJ+z8NqAOiBPAPBAnAf/U9qJLzZo1OXPmDEFBQYwcOZIhQ4YQHByc6f7SHUJiDay6THb79m0GDBjA48ePKVWqFM899xz//PMPpUqVsqZYEokRycnJ3L9/n7i4OEqVKoW3t7e1RZIUMmIj74IYAtilttiB6EZs5BpripXn2NvbU61aNQAaN27MiRMnWLRoEStWrDC5v3SHkFgDqypD69evt+bpJZIsiY6OZt26daxfv57jx4+TlJSEEAKVSkWFChXo0qULw4cPp2nTptYWVVIIcPEsR1L8JuAccBGoDVzGxbOcdQXLZ7RarYFPkERSEChQPkMSSUFhwYIF+Pr6smrVKjp16sRvv/3GmTNnuHLlCseOHWPatGmkpKTQpUsXunXrxtWrV60tsqSA4+RRDrgJ3AZeAcKAm6ntRZPJkydz5MgRQkJCOHfuHJMnT+bQoUMMHDjQ2qJJJAZYZBm6ePEi69ev588//yQ0NFS/ZNCwYUO6du1Kv379pHlTUiQ4ceIER44coU6dOia3N2vWjDfeeIPly5ezatUq/vzzT6pXr57PUkoKA7oIsoh754AA4BTKUtlMoBHhN4/i1/h1q8poilu3bhk85+vUqWPx8z08PJzBgwdz7949PDw8qFevHrt376Zz5855JLVEkjPMUoZOnz7Nhx9+yF9//UVgYCDNmzenb9++ODk58eTJE86fP8/HH3/MmDFj+PDDDxk/frxUiiSFml9++cWs/XQRMhKJKXQRZFAHsAG6Y+AzRA9SEhdbSzwjQkJCWLZsGevXr+f27dsIIfTb7O3tad26NcOHD6dfv36o1dkvLHz//fd5Ka5EkmuYpQz169ePDz74gE2bNuHp6ZnpfseOHWPRokV8+eWXTJkyJbdklEgkkkKJYQRZRWAHikXIDkgGdmDr4GhFCdMYO3Ysa9asoWvXrsycOZNmzZpRrlw5g0nvn3/+ydSpU5k+fTqrVq2S/nKSIoNZytCVK1ews7PLdr+WLVvSsmVLkpOTn1kwiaSgkJCQwNdff83BgwcJDw9Hq9UabD99+rSVJJMUdAwjyKYBo4BGQA8UxegCvvXfsKKEabi4uHDjxg1KlChhtM3Hx4cOHTrQoUMHpk2bxq5duwgLC5PKkKTIYJYyZI4i9Cz7SyQFmTfffJM9e/bw0ksv0axZM1QqlbVFkhQSXDzLkZSwC8RMYCSgAd4DrmDr4Ixv/Tdw9qjI+QNfWD0r9Zw5c8zet1u3bnkoiUSS/+QotP7EiROZzpIXLFiQK4JJJAWF33//nR07dhAYGGhtUSSFjIoBvYm4PxNUTUB0A9UuQEv9zp/iXqqGoU+RGEJSwi4i7s+kXqdPik2ZDomkIGCxMjR79mw++eQTatasSenSpQ1myXLGLCmKlC9fHjc3N2uLISnAZFZzzMOnJvU6fZK6bQ0unuWoVFdRhCCjT5GdYkFSNSHs/DY8Onxotd/z+PFjpk6dmumk98mTJ1aSTCLJGyxWhhYtWsQPP/zA0KFD80AciaTg8eWXXzJp0iSWL19O5cqVrS2OpICRnXXHw6dmpopNQc1KPWjQIK5du8abb75pNOmVSIoiFitDarVaLhdIihVNmjQhISGBqlWr4uzsbOQTJ2fJxZtnse4Y+hSlRpipdlk9K/Wff/7JX3/9Rf369a0qh0SSX1isDE2YMIFvvvmGhQsX5oE4EknBY8CAAdy5c4fZs2fLWbLEiGex7pj2KQqmUt1P81LkbKlVqxbx8fFWlUEiyU8sVobef/99evbsiZ+fH/7+/kaz5C1btuSacBJjzibGsTbqMaHJSVS2s2eQRwnqOThbW6wizd9//82xY8fkLFligM5PKDkxGliBEi5vC8wCDqPV2hMVftnAEdqUb1FWPkXWYunSpXz00UdMnTqVgIAAo+e8u7u7lSSTZEdm/ms53e9ZjyksWFybbOzYsRw8eJAaNWpQokQJPDw8DD6SvONsYhxjH4QRlxDH65oUYhOU72cT46wtWpEmN2fJd+7c4fXXX6dEiRI4OTlRt25dTp48mSt9S/IPnZ9QxH0tQjsaJaFie6AtSu2x0aQkVuTsvplEhV82OiYpfggR9zWpvkYQ0OFDmr+4kIAOH1pdEQLw9PTk6dOndOjQAR8fH7y8vPDy8sLT0xMvLy9riyfJhMzGmG4MWrrfsx5TmLDYMrRmzRo2b95Mz54980IeSRasjXpMHSDVM4GZQJPU9vk+0jqUV8ydO5f33nuPWbNmUbdu3RzPkiMiIggMDKR9+/bs3LmTUqVKcfXqVflyyUPyaiYbdn4bCF+gHLABpQL9TaAqhrXH0nyHCmrkmCkGDhyInZ0dP//8s1waLkSYO8ZyMhYL0/jNCRYrQ97e3vj5+eWFLJJsCE1O4nUMKxt1A9YlJ1lPqGKALsFcx44dDdqFEKhUKjQajVn9zJs3j4oVK7Jq1Sp9W5UqVXJPUIkBeZnDJ/pxCPAUcEWpQL8bSMKo9lg636GCGjlmivPnz/Pvv/9Ss2bRWAIprFiqzCtjrCPQBwgG/EEEEBu538R+lo3FwjR+c4LFy2SfffYZ06ZNIy5OLs3kN5Xt7NmFUtGI1L+7UtslecfBgwc5ePAgBw4cMPjo2sxl+/btNGnShJdffhkfHx8aNmzIt99+m+UxiYmJPH361OAjMQ/Dmew8ECcB/9T2zIkKv8z5A18QtGU85w98YXKJISU5FvAHjit9cwJwQimxke4OTRcZ5uJZLtVB2vT2gkSTJk0ICwuzthjFmpwsSzk4uwPrgbsoSvodYH1qexr2zh4YjVV2pLabpjCN35xgsWVo8eLFXL9+ndKlS+Pr62u0ZCDrNOUdgzxKMDYhjiYoFiEl7gS+9ihpXcGKOG3bts2Vfm7cuMGyZcuYOHEiU6ZM4cSJE4wdOxZ7e3uGDBli8pg5c+Ywffr0XDl/cSMnM9nsrEn67cIO4wr0zwPrM40MK6iRY6YYM2YM48aN44MPPjC5NFyvXj0rSVZ8yMmylEClHMNx0pZqG6FYLdNQIVDeHk2BriiWzWBUZL7qU5jGb06wWBnq06dPHoghMYd6Ds4sLl2RtVGPWZcaTfa1R0nqOjhZW7QizapVq3B1deXll182aN+4cSNxcXGZKjIZ0Wq1NGnShNmzZwPQsGFDzp8/z/LlyzPtY/LkyUycOFH//enTp1SsWDGHv6R4kVkOHwdnd6NaYKC8fCIfXARRG71nXoYXkP4FRTlgM3AOuIjiM3QZtxJ+2NrbmIwMyy4bdUHi1VdfBeCNN9KKyKpUKouXhiU5JyfKfFJcFJDhGHqQGGd4TGLcU6A/EIni8+YPBJAYty/TvgvT+M0JFitD06ZNyws5JCbILIxeOkvnL3PmzGHFihVG7T4+PgwfPtxsZahs2bL4+/sbtNWuXZvNmzdneoyDgwMODg6WCSwBMpnJigtEPwEISLP83JsBKl2bM+b5/fiiVKB3RlmO2AHcpHTVNyhbo1OmMmWVjbogcfPmTWuLUOyxJCGn6TQPbdEtf2m1iQZpHlw8y5EUHwToFJknwGGSEjT8+fMQEGDr4Ezlei9SrkZn/XlyMn7vXtlL6NktpCQmYOvgaNRnQcFin6ETJ04QFBRk1B4UFCRDhHMRGUZfcLh165ZJR+fKlStz69Yts/sJDAzk8mXD9f4rV67IEh95hG4m61XGBnunNXiVscGthB+K0pPej8gVRGkQ94AYYDlwOLUXw5dJmt/E/4B6KJFj84DTQACPbxcNN4HKlStn+ZHkPYrFMlhR5pmU+jeYSnX7GOxnOs1DB2AgyhLZRVISSxj4G3lXaAjcQEkD8QrKuI8H8SKIGoCGlEQvrp9Yxd0re3P8G+5e2cv1E6tISawIjCUlscIz95lXWKwMjRo1yqRj3Z07dxg1alSuCCUxDKOfByiun0q7JH/x8fHh7NmzRu3//fcfJUqUMLufCRMm8M8//zB79myuXbvGzz//zMqVK+V9k4d4+NQ0yOGTGBcFoiuGywhJwD2gNDAeqERmLxPlJRKMoixl7Ke7YjkqAsyZM4cffvjBqP2HH35g3rx5VpCo+GFKma/f2XhZyihQgNMob4sNKIrREZSl3LTggSe3/wXqYqjM10VZNlMUe6gF1CH0bM4TKSvHBmQ4z7P1mVdYvEwWHBxMo0aNjNobNmxIcHBwrghVXMgqm7QMoy84DBgwgLFjx+Lm5kabNm0AOHz4MOPGjaN///5m99O0aVO2bt3K5MmT+fzzz6lSpQoLFy5k4MCBeSV6scJUGHJs5C29iV5to0arSQQWAquBz4C3UJQh3QM7vdPpBqAzyrJDU6AJT27/S71OnxB8ZCEpiTtS9y04NcVyixUrVvDzzz8btdepU4f+/fszadIkK0hV/DBnWcqkbxE9gDCU9d+B6ELsI+5t4fyBL4h+cgt4M8MxXVHGfPp/v0JK4uIcy5+SmIBxoEGPZ+ozr7DYMuTg4MCDBw+M2u/du4etrcW6VbElu2UwGUZfcJgxYwbNmzenY8eOODk54eTkRJcuXejQoYPeGdpcnn/+ec6dO0dCQgIXL17k7bffziOpixdBf58wDkPe+7mBiV6rqQoI4CXAB8XnxxflMWj8wFbadwCtSPMduouHT03824wH1aVslzAKK/fv36ds2bJG7aVKleLevXtWkEiSGSZD3tkBRJMxxB68ibivISUxEtiZ4ZjdKBYl3b9rAzuwdXDMsWw2draYCuFX2gsWFitDXbp0YfLkyURFRenbIiMjmTJlCp07FzynqIJKdstggzxKEIySYXpS6t9gYLAMo8937O3t+fXXX7l8+TI//fQTW7Zs4fr16/zwww/Y20vltCCwYO7XGOcUcsXQRL8EcEPx96mIogg9QFGQjB/YoCGznCrmLmEUVipWrMjRo0eN2o8ePUq5ckXD+lVUyMy3SBnf6fNg+QOVU+8NX+BC2jE0QomM9Er993ngMnAB3/r9ciybvZOnch4apTvPBRycPHPcZ15hsXr2f//3f7Rp04bKlSvTsGFDAM6cOUPp0qVZu3ZtjgWZO3cukydPZty4cSxcuDDH/RQWslsGk2H0BY/q1atTvXp1a4shMUHwhasgBmB4R9mSZvE5irLkVRvlTtsN3EJZRhAoPhXpc65cTO2nEYqVaCdw0SCnSmGJDMsJb7/9NuPHjyc5OZkOHToAsH//fj788EPee+89K0snSY+pkPfoxx6kJL2EsbVTtwzWD1v7b3EroaSBsHf2QIUfcU//QJMSB8IGW4cn+NZ/g7LVM4+OzA5NcjLQCTgLLAY8gM6kJP/3LD85T7BYGSpfvjxnz57lp59+4r///sPJyYlhw4YxYMAAo8Rc5nLixAlWrFhRrBJ5VbazZ5cmJb3HgdEyWFZh9LJ6fd4yd+5cxo0bh5NT9spnUFAQjx49kvX6rIh/nercv6cLQz6OUjk+FiUyrAfwBaaT0V0DHABvDOuMxaBklA4HFoIKqjUZUmQsP9nxwQcf8PjxY959912SkpQJmqOjI5MmTWLy5MlWlk6SkYyK+fkDXxBxbycYvWFSl8FUu3ByM15lUKlU2No7Aza4eVfC2aPiM1W3T06MQglOCEuTQ9WkQPrWWbxMBuDi4sLw4cP55ptv+L//+z8GDx6cY0UoJiaGgQMH8u233xargpXPsgwmw+7znuDgYCpVqsS7777Lzp07efjwoX5bSkoKZ8+eZenSpbRq1YpXX30VNzc3K0ormfjRGJQ7qDZplePHkRYZ9ifQBeOZshvQEwgF1R0U/4rbyncuoihI40HU4NrJNUWmQnd2qFQq5s2bx8OHD/nnn3/477//ePLkCVOnTrW2aBIzUKIez2O4PHUOnTKi5Nu6ofexi3lsT/Tja6QkPSUlsSIpiW8ScS+Fs/tmcHbvjBxXtxfaF1Huy1Q5CrBvnVnK0D///GN2h3FxcVy4cMHs/UeNGkXPnj3p1Cl7U1xRqtOkWwZzcXRmnY0tLo7OfF26klnLYDLsPu/58ccf2bdvH8nJybz22muUKVMGe3t73NzccHBwoGHDhvzwww8MHjyYS5cu6aPMJNaheaum1Ov0CbYOj1HuhvShvP5AHMZ+QbuAJqA6j1sJP7zK2KBSL0FZPmuCEmp83KCf7OqaFTVcXV1p2rQpAQEBMvlnIUIJnfdD8Y3ThdhXQaX+03S+LU4D7hjeO6dSs7G7WlTfzzDU/yfgABCGSr2kQPvWmaUMDRo0iK5du7Jx40ZiY2NN7hMcHMyUKVPw8/Pj1KlTZp18/fr1nD59mjlz5pi1/5w5c/Dw8NB/CntZAmUZrCIbyvsx36ei2f5AoclJRhlOuqW2S3KP+vXr8+233/L48WNOnTrFxo0b+fbbb9m9ezcPHjzg5MmTvPPOOzg65jzaQpJ7ePjURK12wFRkmNrWGVOOnIqTaDBVG79OQIcPqdvxQ1DFovg4ZLAkpUaTFVXeeecdbt++bda+v/76Kz/99FO2+82ZM4emTZvi5uaGj48Pffr0MUo8Ksl9lHH6IrrM6Mrfl7Bz8Mgi35YNpqMqbbDkPlBC/dP33RYYgZ2DGwEdPiyQihCY6TMUHBzMsmXL+OSTT3jttdeoUaMG5cqVw9HRkYiICC5dukRMTAx9+/Zlz5491K1bN9s+w8LCGDduHHv37jX7ZVIc6jRtjY5gVdQj4rRanNVqhnmUpK+b4fKhOf5GktxDrVbToEEDGjRoYG1RJNmQWQkDJb9QFZQZ8vco/kQ2QCjVmgxFCKGvV+bqXZX4p3fRJBfdPEKmKFWqFHXq1CEwMJAXXniBJk2aGDzng4OD+euvv1i/fj3lypVj5cqV2fZ5+PBhRo0aRdOmTUlJSWHKlCl06dKF4OBgXFxc8uFXFR50fjZKDiANCBVuJXzN8tHJiPF9cBhYTlJ8Asc2jcTOwQ0y3idoUAIF0relj6o07z4wfe4VJCcmc/7AFwa1AC3xQ8przFKG7OzsGDt2LGPHjuXkyZP89ddfhIaGEh8fT/369ZkwYQLt27fH29vb7BOfOnWK8PBwgwSOGo2GI0eOsGTJEhITE7GxsTE4pqjXadoaHcGCiHACUPTzHVotCyLCAQwUIlm9XiIxTWaVtRFqlPxCvYD2KM7UXYAdXDu5WgkoU6XVK4N44GKRrdBtihkzZjB69Gi+++47li5dapRE183NjU6dOrFy5Uq6detmVp+7du0y+L569Wp8fHw4deqUXFpOh87PBuGPkgxxB3CRiHtPibg/k3qdPrFIWTC8DwJQcgzVAbqTkriDlMQLwJ208c0O4Clp/j2pEZSqi6n3hvn3QWbnFtruRNzPWAswtT5gDn5jbqMSQghrnDg6OprQ0FCDtmHDhlGrVi0mTZpEQEBAtn08ffoUDw8PdleohovaJtv9Czq9bl+lvFarz4ObjDIs76rVbK9gGNKdMZpscCZh93W7leWTNv/HmZ1Xc1XW96Z3pJfnLrZHduPLaftzte/8IiU5jmMb3iQqKgp3d3dri2MWujHf8pXvsbUrXtGD10+t4+7lvcrDGS0OLiXQpKTg7qrC1lZLim1FKvj3BdJmnUrIsCD68S2UqDF/IIG0qDLdXRYGPCR9xIuTWwTJidH6ApO+9fplWYS1MGDJmI+IiODWrVvEx8dTsmRJ/Pz8UKlUz3T+a9euUb16dc6dO5fpMz4xMZHExET9d90KQFEe8+cPfEHEfW2qn41uXDYFyoHqDl5lbAiwMI2DztIUce8cSmCB4ZvFxi4E95LVDe6T+OhHoBLooskq1e2DEMLAilOpbp9sl7p05458cBGhrZ7h3KVQAhvStama5Og3moO5Y95qaSDd3NyMbgYXFxdKlChhliJUWFkS8YCt0ZGKsg30dfNktFdpAOK0WpMrtou1Wv3xGZWgT0qWLbQh9aZCNqHgmU8l1uf6qXXcvbQDJYliXWA9ibE+QHeeJO5ApbpAmbIRnN9/joCOUwno8GHabJs6KJFlO1AewBMwvsu+I6NfRPzThaCqDXQlJWkX106uwdmzYrEZj15eXrka4avVahk/fjyBgYFZPuPnzJnD9OnTc+28hQHTJTW6Aj+CcCHiXhjHNo00q+K70XJbJhnWNcmLzVY+ssqnlVnovUeHDwnaMp6k+IznNuGbJLoRG7nGLFnyihyF1ktyxpKIB/waHUkNlHKQ1YFfoyNZEqGUN3FWq03mwXVWK/9NRSmkPn34ZVr5hBmc3WdZGKek6BMVfpm7l3aRlk06kowV44WoQ2yMitJlBLeDtwKpUS3CF4Quf1BFwB7T2aZTTLQ5WRRFI8maUaNGcf78edavX5/lfroKB7qPqcLgRQ3TJTW2oOS5cgbGm1XxXf9cvachJfFNUhIroIxt4zH/LGU2jM6XyTPb9O/SGMtTAPzxClSBkEOHDllbhDxla3SkyXKQW6MjGe1VmmEeJVkQEa5fsd2BEu/ynkcpwDCkXnd8k9T2zJIzFlQMwy/tUp3tSoGohP4XCmXdOez8tiKb6VeSNWnWnfSzyWCUfECGM92nT28QHR2PveMNAKIfh6D4Qbim7r8bxRfoAsbZpjVgdOe9RsbZa1LCTzToXnizkCfFxXBsQ/6fd/To0fz+++8cOXKEChUqZLlvUfcNNYXez4bGpHqMAjdQrJqGb4zQs1sytQ7pn6sGbwl/DH2BlPHtW/+NZ5bb5HM83TPbpA+fiFF8kShY/ngFShkq6ghMBy4uTP2uc5JeFfWIxanRZO95lKKPmydQtCrZmzYLF0zzaWxsLHPnzmX//v2Eh4ejTbdsCXDjxg0rSVZ0SfM5uAzCBWVs6KK7/FGUmPRRL7uBeggRTGJ8IucPfIEQyan7Zsw6fQnDbNPRODjcpG7dYEJCLuPrq+HsWUhK+o/0UTRq9S6aN6nJ1H5l8vFK5C7RT6NZMTz/zieEYMyYMWzdupVDhw5RpUqV/Dt5ISJ9SY3oJ98DGlIS05eTAXMqvpt+rr4I/IgyEViIrYPzM5fZyPJ86Z7ZpkqFVKo7NZ0fkq7N+rmHpDKUj6hIe5ynD1wUwAfhYYQmJ+FtY0tZWzsiNBoq29lTNV0h0KIUUm8yBFpvPi1Y4cxvvfUWhw8fZtCgQZQtW/aZnUglWWPg6yNGo4T7BgMRKMpMPZSRr5vp7kax5AjlGLoRcX8XiERMTz8uo1bfRat9BRubXWg0IbRuLfj4YwEoiu7MmbB//wXSz6a12gu81GUwJWIP5MdlyBPs4uLz9XyjRo3i559/Ztu2bbi5uXH//n0APDw8zCp1U5zIWFLj2KaRpCSmfx4q4fGg4c+fh2Bj64iTexnF8TnmCaAhJSke02+ZJCAaldoWN+8qz1xmQ+ePZPJ8GZ7ZmdXwK2jW/hwpQ/v37890lvzDDz/kimBFkeecXDkQH2NgrDyP4rgVlxDH68AOTQoXgf7AeU0KYxPiWFy6IvUcnItUSH1hMp/u3LmTP/74g8DAQKvKUVwwMr0zE2VZKwXFh2ITAGr1JbTa6yglNexT910MtE1Vskti6sVgawMd28K5i6upW7ssUU99CQkJISVFi60tpKTAjRugVgvc3IKBy1SvruHePfjf7tO8M6xjfl6OQs2yZcsAaNeunUH7qlWrGDp0aP4LVIioXO9Frp9YRdoEIC08HrEDTfJFYh4no5SO6Y+SKDQY4yWxi0BlIASh7U/E/fNE3J9hlE4iu/B20+H/Gc5XQJ7ZOcFiZWj69Ol8/vnnNGnSRM6SLSROaCmH8jhfjJL83AmohuEKb1MUF9GTGPoEFaVK9oXJfOrl5WVRDi3Js5F5ZM13qNQVUPMYVAJvbw0PH2pQ7qChKBakzsBBIBB4HvgFQwvSRbRaNTs3pFVePxp0hQ59ZjNypJomTbQEBUFICHTsCDdvagkN1TJ4MPz9Nxw7al6G5sLOgwcPeP/99/WT3owZWDQajVn95Gbmlp++boWbe3GqAdiaVSurMffzBTx5fBFj/6HU0HvcUd4Yp1PbnFL/fQMl+/OK1PbUN4s4iT6dRCa+PqYw7Y+kO99FVOoleJauWSCe2TnBYmVo+fLlrF69mkGDBuWFPIWOrKrHp9/mbWPLtaQExqLEv+gogbEhvyuKN4Mpn6CsKtlbk5yYXAuL+XTGjBlMnTqVNWvW4Oxc8K59UcP0EuoOVOo4Kga4Uc6+Mo+jQvDwgEeP6iBExhfEbOA3lKWz5igz42XAc0A03l6PDM4X2LwGC2cPYvq8zWzeHIMQMGYM9O2rWIneeQfWrYNHD+Hho2h69p/PlAm9CWxe+B745jJ06FBu3brFp59+Kie92RD09wkWzP2a4AtX8a9TnenjW+Xa2Hh/YAXeH7gAO5+30Wgze1O8QtobQ9fmCryF4dsm/ZvFdDqJrPwzM5+kbEApt7EmT/IE5RcWK0NJSUm0atUqL2QpdOhC3esArwO70i1rAQbbdmhSSEIx8GfjJcNuFLfPwuITZOjjUXAyij4LDRs2NHgBXLt2jdKlS+Pr64udnZ3BvqdPn85v8Yo0prNIX6Jep0+ZvnQk49rXpGWgln377BDC1AviOxSlKBg4BGxLbQsDbjJkgGH25KNBVxg/ZS2+vtChExw/DkuXQrVqULcuNG0KW7dCUhJ07JjC1ZALdOhzgQO/TSmyCtFff/3Fn3/+WaBK0Awc83eBS7po+OwbwIP7uzhyYC4Ht3+Uq2PDy9OBR09MvSlqY/jG0LX9jXFpjfT7pU8nkTbhsHf2yFQG05OU1PMVAN/OZ8ViZeitt97i559/5tNPC9+aYG6TVag7GBsUm2AY1LsDiEGJbdH5AelWYQNS2wqDT1B24ZWFkT59+lhbhCJBg+7VcxB91Zqgv+unzrZ/wb9Odd6bvIlmLZtQInYXK2pW4OTJKHx9NTx5sgOtNqOz6FOU5YOlKHfbCCAKD49I7OwER4Ou0LP/fIIv36ZsaS8uX7tHxYpali4FW1t4803FGvTTT4ojdVAQaDTw1VdQvz6kpGgZOVLN7K+28cf6D3LzchUYKlasmKtLXEWVjM8+oZ2JVt2UmV/+z2Ap9lmZPrkvoz74EWNfoGggBOWN0ShdW8bSGmlvFhubpmg0wYAWtboRWm0P1GolQKCWXwO2fNfapAwde102Ef5/EeUtFloo/YTSY5YylL44qlarZeXKlezbt4969eoZzZIXLFiQuxIWYLILdTe17RFQHmWeGg8sKV0JgdD7AXnb2OKP4GBqNFlh8AnKLryyMDJt2jRri1Aoybhc2qPX55SIDc7ymKNBV5j91TaCL9/Gv2YFpkzojQfgIB7iYBNDXEQoMz6cyL0HEfjXrMDzXRsxfsoF4uMFWq1hxFdaVFkYikVoBGr1BRYtEgQEwIoVsGnTdSpVgmo14MCBCOztoWszRREC5W/TpvDHHzBypJqQEC0dOyqKkG57kybaIu0/tHDhQj766CNWrFiBr6+vtcUpsJh69mm1XTl3cXWunmdkqtP+tDlbeRK5EBs1ODs7U7O6DVCVGyE7EUKDSuWOT8lkbt91JjomCVubKzg7h1Gzug9Qldt391K3dlnOnHchoG400dHB3LhxmapVNbi7C65eDsk0WrLtkOcBDML/Ee64lXAvtH5C6TFLGfr3338NvutMp+fPn891gQoT2YW6p9+2hLR8QgdQlscA5j++T4LQmiytcTYxjh+jHpn0RypIZFYpvLCbTXVUrVqVEydOUKJECYP2yMhIGjVqJPMMpWJqufTj117kue2TMl0y0Dkv+/pCy0AtJ09G0b73BYQQVK2qomWglqCgCEJCoEMHuBoSxfgpF1g4exBLv9vLHe7i5BRMYuJlHBw0zJkjUKvhxx+D+fffy2g0GlQqgRCK/09QEDg4KErRJ5+Anx94e8OJE4pFSBdNdjwINCm2VPetTUnPWKNos5Mn1fjXzDp5YGHDy8vLYGk4NjYWPz8/nJ2djSa9T548yW/xCiSmnn1q9W7q1i6b6+caOawjI4d1ZNmq/Uyft5mo6BhCQu8xbVI/vbIEpicXGe+/nv3nczXkAsuWaZW6fimK4m/umFarbXHxrFSkyiWZpQwdPHgwr+UolGQV6i4Q+m2lgX2QVo2etPmrS0oSL2Hob1TPwTlLf6SCphBlVim8sJtNdYSEhJiMnklMTOT27aJrHbAUk8ulIuslg9lfbcPXF5Yu1XLxIty4oSUsTFmWevddQYMGactWMTGwbJmyRPX77tM8fPyUqlVhxQotgwZpadcuzXozf76WFSu07NwJycnw/vuKohMXB506Kf8ODYU6deDePSWUfvhwaN5cUZjCwtQc2j6ZVs2qG0WbnTypJjQUVi/uky/XNb9YuHChtUUodJh89qku8un7H+XJ+Zat2s/oD9dQpYri3xYUFMPoDxUL/MhhHU1OLkz5t02Z0JsOfS5YNKbvXjlT5HxD02Oxz9Abb7zBokWLcHMzDHGMjY1lzJgxxSrPUHah7rptBxPiTJbhuJKhLX0YfWEqvWE6TL7wm023b9+u//fu3bvx8EhzLtRoNOzfv19m1E1HTpYMgi/fpmWgoghNnAi+vkoUV1AQfPABLFiQ5sR86JDhElVUdCwdUhWbypVNWHeOQ3S00mezZkqfoaEQHKxs9/aGAwegalXF6nTsGGzaBHa2Ski4zmcmsHkNDvw2hdlfbePYUWW2vXpxH1o1K7xlOUwxZMgQa4tQ6DD17Ju0aCutmuWN5Wz6vM1UqaJYNnX+bcOHK+0jh3U0mFwo2037t+VkTJ/4bRVFzTc0PRYrQ2vWrGHu3LlGylB8fDw//vhjsVKGTCEQbI2OYFXUI+JSS2qYKDJBDxRlqA1wH8XHPwDYkRhPr9tXidFqGY+xz9EGF2i5rJ3Jcx8Nvs+cX08RHPYE/4reTH61MW7D+nF4wO+GVYyFCrcSvrlq4swsTL4wo3OiVqlURi8KOzs7fH19+fLLL60gWcEkJ0sG/qnO0DduaPH1hWXLTDsxnzihKDzpl6ji45MJCorhzTdh4EBFmRo+XFFu/vkHEhOVJbExY9BbmIYPVxSiPn3Azk5RlJYvT1Og3nkHSpSAJ09UBi+QwOY1iqyztClsbGy4d+8ePj4+Bu2PHz/Gx8fH7DxDxYGMz75ajZqhrBNkjqmlLCDL5a2jQVeIiIqhbn34+GM1N27YULWqhqpVtfz5Zyw9+8/nwJ/ncXAQ9OmjJj7eBjc3DdWqaTl8NJhW3aZz78ETg/OlRwiR5RLb47AQEAMpSr6h6TFbGXr69Kl+thQdHY2jY1rFW41Gw44dO4xunKKOqaWs0QlxCNItiWm1BGM6QTooVZP6oyyb7QQQggAhiDNxzG4bNf71/VA1N86EfDToCh2nfKuYR9trOXkygY5T7jLLvrmJrKEXibj3tEiZOPMCXXb1KlWqcOLECUqWLNhRfdYmJ0sGOnN9WJhiETLlxDx8uJIEsUoVxa9BZ87/78ItRn+4Rr+8VaoU3Lyp7Ovrq7QdP25oYWreHB49gpIlISwMunUzPuehQ9CuXdF2kM6OzCLJEhMTsU9XIkhiOeb4yWVc3tIdo9XC/v0q1Oo6aLXdU6MpL6BSCa6FXiAgQHDqlApdpuqoqB2cOnUB0PDg8XVaBsLJk1G063UelUpFlSrmy1Cioi+xkUXXN9RsZcjT0xOVSoVKpaJGDePlD5VKxfTp03NVuIKOqaUsV6AmhstfbijKTsaYlyXASpTcoSdQigdUTj32ONA+3TG7bdQEA9+838ekLJmZR+d+vgDTWUPLAS5FxsSZl9y8edPaIhQKcrJkoDPX9xuyUG/lsbWFM2fgf9shKRnu37fF1kbDoUMCGxsVY9/ughCC33efxtPDibt3k9iyRYunuwsBtd1JSL7HsmUiUwuTv7/y7z59FEfp9EtrJ05ApUpF00HaHBYvVoqAqlQqvvvuO1xdXfXbNBoNR44coVatWtYSr0hg6lk9Yjg8CIelS4XJ5S3dMWFhajSaOmi1yhtGSSvRCAhm6VItL72kxlSle7U6mJUr087Xpw+ULi3SpZPIXoamfYZx69w7RdY31Gxl6ODBgwgh6NChA5s3bzYoT2Bvb0/lypUpV65oaIjmYiq0XoXxkpgLynLYIeAq4ImSD3cESlUZXU5Q23THBqIUFRiCEoXWqW0dvnk/8zVdne9F+llukyZaNmyMAdE1g0SpWUPFK0XGxJmX6F4QGVGpVDg6OlKtWjXatGmDjY1NPktW8MjJkkFg8xpsXjM+1UkZKlfWcuCAYt3x9obTp1OoUkXn96NhwbKdfLV8J35+arp20zl/qti8Zjyvv/MNLQOFSQvTiBFw6xaMH6+0t2ih+Ay9846Kpk2F3qcoIUFFeHjRc5A2h6+++gpQLEPLly83GNP29vb4+vqyfPlya4lXJDD1rG7WXBmjGZ/fOuuk7pjr1+3IrPiwra2W6GjTThlCKNt1favVyv1kiQzlajQokr6hOsxWhtq2bQsos+RKlSrJ9OyYDq0XGC9vpQDXAC+U9FSNUJbRMuYETclwbDOUqi9e3q78viFrfwWd78WbbxqG/3p6uvLkSSZZQ9lp0sR598peQs9uISUxAVsHRyrXe5FyNTo/w5Uq3Hz11Vc8fPiQuLg4vLy8AIiIiMDZ2RlXV1fCw8OpWrUqBw8epGLFilaWtnCS3qHz8NGLVKmSwooV8MorULassqx16JBitUlIgAcP4N13tam+QGkzWFP3wfEgiI9Xjvm//4OAAKU9NFRN88a+eHm6cOhgCFqNoFRJFfVq+/Lxt0XPQdocdFbQ9u3bs2XLFv14l+ScjH44ZUt7mxyjWq3yb1PpG3Tj2s1NQ1SUcSV7IaBPHzX29ikkJBg7ZahUGn3fZ84o90NQBqtoUJASbTlpErz+OtSurcilK0Hj3GpckfQN1WGWMnT27FmD7+fOnct033r16j2bRIUIU6H1KRgviT1FyQkagFJOcgdK+bxKKJYhXbbpWBPHXgCWTnkpW1kyC5Wc/eV7fDh+KkJkzBoaDdzE2aOHQT93r+xNrZSseD2lJO5I/U6xVYhmz57NypUr+e677/Dz8wOU8hwjRoxg+PDhBAYG0r9/fyZMmMCmTZusLG3hReekXK7OKJo1i8bWVokGi4oCZ2do105ZxnrwAFQqQ18g3Qx23fJRRvdB2G1YPGcQ46asZckSMoQSDyyWSk92yHQquYMp/6CbNwUqlcpgjN4KU6xxI0eqTIa6657vHh5a0t4ShpXsFSXpAnCejG8RrVbogwsOHIAyZRQrqM7f7p9/lO8dO8L164r1tHRpuH8/rQTNzUMjqNP+0yLrY2qWMtSgQQNUKhVCiGwtQsUpysBUaP03HiW5lpTAqqhHLE6NJvNT2+KSksRJDEPr76JYfn5DRT1HJ5ZkONbNwZals15nxNAO2cqSeahkeWrX2UDvrv3RpFxD8WAqheLZBA9u/kXVxq/r+wk9uwVMJAIIPbul2CpDn3zyCZs3b9YrQgDVqlXj//7v/+jXrx83btzgiy++oF+/fhb1O3fuXCZPnsy4ceNkjhfQJ5N79CRGP2tVqxVrUPoos+HDleUuHx8lEszVFf77D5wcE/hg2i94e7nwNFJw6KCKhnV99SHDdf0rFvnw+GchfaWB7ChOlQaehcx8OUt6KhbJ9GNRCJHp+Ez/fP/n5DViYy+TlGyqkr3iP+ThfpPomIXY2ghcXJ2o6VcGVCqO/R2qt7pevKgUIN68Gezt05ebUe6xsDDDEjQj3lFzO3grHj55k0PJ2pilDKV3IP333395//33+eCDD2jZsiUAx44d48svv+SLL77IGykLAFlVpweI12o5nRDHqYRbuKrVDPMoSV83xcT8yp3r9MN4lVdXb3idjQ3zfZTllboOTvrj6nYri5sZilBGM+y65aMMQjJ71I8GLcAY0qoYHwWGkJIYx58/D8HG1hH3kn6kJMZjas05JXEh/+6ahgpBfMwTBu9Q4WSXSIXaTUm2bwNgcdX6wsK9e/dISUkxak9JSeH+/fsAlCtXjujoaLP7PHHiBCtWrChWltSsSJ9Mrm59ZfY6fDjY2Bj7NjRvDnfuKLPWBw+UkPvnnoMDB+J58Pg67Tugn1lPmdDb4IVSnMLjLSVjpYHTp0+TkpJCzZrKfXzlyhVsbGxo3LixNcQrlGTmy3nsaAR/7/pMPwHo2HcmarUtKckpaLSCsDsR7D10DhsbG0a92YX/+3yA0fhVl3yTzPyHatcoTUjYQ7QagUqtwsvThSkTevP6O9/QrFkEtraKRXXePOjVC3r2NCw3o4u6TN/WrKmW7X/cyp8LZwXU5uxUuXJl/Wf27NksXryYESNGUK9ePerVq8eIESNYuHAhM2bMyGt5rYIuhD4uIY7XNSnEJijft0ZHMPZBGLEJcbwttFRHKbPhodWyICKcrdERAHjb2LIDZfUW0kLry/DsVel1ZthroRdoGRiRWlF7NkeDrhjs5+XpkHrWZBRFqD3gDEwAUQNNciwR96LSSZdR2orEPLYn+vF1UhK78jSiDOEPY7h37RDn9k3n7L4ZRNzXkhQ/hIj7Gs7um0lU+OUc/66CRPv27RkxYoTBy+Lff/9l5MiRdOigKKvnzp0zOwFjTEwMAwcO5Ntvv83WJyMxMZGnT58afAozuiKpVRqOo2f/+fpxmj6Z3CefKMtf4eFK9uigIGW2CmnJFBs0UJQglUo5JiZGKa2xcqXiKL1smZbKlZWZucQ8Dh48qP+88MILtG3bltu3b3P69GlOnz5NWFgY7du3p2fPntYWtUATWD5J/2/F10dtMH51vkC6CYCrewzPPSdISEjG3UMpHePrCy+9BOXKafhq2U7en/qL0XlsbXUeqmnPapVqB05OGoJO3aBOQDQubjE8fBTN2Yvn6dBnNq4uTkb3k1Zr4h5L58OkbzuhxtG9Ul5csgKBxUkXM3voV6lSheDgrAsyFlYyywa9KupRFkHryva+bl4IBBdJq1a/G6VshxOQiHFV+rrdlCR1rr2zt6yYm3HUsOpxPFALQ/OqTvIEjD2XLqPEwjVN/UQCp1GpGlGubDART1TEx/unXYkilpn0+++/Z9CgQTRu3FhfoyklJYWOHTvy/fffA+Dq6mp2AsZRo0bRs2dPOnXqxMyZM7Pcd86cOUUmZUVWpQLSZ5MGRdl54QXYtk3JG6TzbThxQlkimzABjh5VrEO60hrt2mUeCSOxjC+//JI9e/YYKOteXl7MnDmTLl268N57uVeR3VzGTG6Hc4ZkvwWNwPJJBoVOsyp78eLgrwwmAH5+ijXGVIbpb77fw/99PsDgXBXLenAzLO1ZrVbvQKW6wJw5gq+/ViYIK1fqEokKnJxU3LkXQdRTZcKgy8oeF2d4j2Xuw6QioMOL+Xo98xOLlaHatWszZ84cvvvuO33yraSkJObMmUPt2rVzXcCCQGbV6RdrtWQStM4rqdsBIjQa+qOoEBtIyzb9Gyq+Ll3RoCp93W5lcZv9htmyZW6GNXwJpK96/OhJAhjlt9ZJ/jo2tstQ2dwmJXEhUBFFEWqVuq9uPzu02h6EhOisPxnMtUUoM2mZMmXYu3cvly5d4soVxZJRs2ZN/fIBKNYjc1i/fj2nT5/mxIkTZu0/efJkA1+Op0+fFtqItawUdw83F4M8Q7roFiGgVi24fBnu3lXM9uPHK20LFiiWo5QU0+U4imuuoNzg6dOnPHz40Kj94cOHFi0H5yY9PPfh7uaU/Y7WJNbwa1ZlL9JPAHTK/JYtxolAmzeHTZuMfXG1aGncWPDvv8E4OFymbl0NgwYJAgIMy9ekTyS6dWsCHTooitLevYr1x8kJNCk2lC7py7GjTzL1YWrw2ngeXC9YpaByE4uVoeXLl/PCCy9QoUIFvb/D2bNnUalU/O9//8t1AQsCmVWnd1ar2aXVGmaJRln+Wo6yZNbx1mU0wFngdLr9mgD1HJ0MFKGckFlIffqXwPbIbuz/5xaU9WfA4jF8O7ILCdEbgXMokWW1UTIg1QbVLmzsnUhOiEBxNHJCsQZB2pLZbZTyszb4+mq4cAGMEgoUocykOmrVqvVMCefCwsIYN24ce/fuNcjgnhUODg44ODjk+JwFiawU99dfac1Xy3YaFEsNCYFx46B3b+jRQymv8fixYhFauFB5gahUSlbqypW1HD9uOLstisVU84u+ffsybNgwvvzyS5o1awZAUFAQH3zwAS++WHStA3lBZr5qHm4uHD4cw40bEBmpjHlnZ9Mh71otlKn1rkGFev+aFTh7MRJ3dy1eXlpmzTJMHqorX5M+kaiHmzOhoXGp1erRV6uv7utvUsb0beN2+fPg+tU8u07WxmJlqFmzZty4cYOffvqJS5cuAfDqq6/y2muv4eLikusCFgQyq07/kos7v0ZHGiwoXURRIaqghM3XAuqiBEDq9ktf3f5ZMW2GVbFo5QzgKY9dOvDluP0Gx3iXb8ndSztQ8mW/kir5dSAOxH2S4nQFRTJKviNV8gHAf8AFLl4UJCSoQHURKJqZSTUaDatXr2b//v2Eh4fry3ToOHDgQCZHGnLq1CnCw8Np1KiRQd9HjhxhyZIlJCYmmp24sTAsGQD08kxLuJiV4n7xym1KlICICGV27Oys1Ag7dkxx7lSroUxppS0t35CKimWr4uXpTPDl2zRv7KVEzKSb3cposZyxfPly3n//fV577TWSkxWfFFtbW958803mz59vZemKBroJgJMTBAYqQQOuroZLVrpJQY0akJRkWKH++a6N2LX/HGXLGobJ646pUiWtFp8ukeii2f0YN2WtRdXqiwsWK0MALi4uDB8+PLdlKbBkVp3+x6hH+KEsJK1DyTEEihpRAXBHKathBwwHegGLgEaOzgbV7Z+F9GbYnfvvY2NfDv/2L7LrbgW61zPtwxUfdRdF0TEM9rd1uA0aG1JS0vsT6ST/CsVKdBAln3Yy0IjYuEvU7/wpQogim5l03LhxrF69mp49exIQEJDjhKMdO3Y0ytE1bNgwatWqxaRJkyzKYF0olgwykJX/RL+hC3nyRMmD0qSJMpu9eRNiY5WZa0KCIDFRhbOLYu4/eVJNeDj8+u1rUuHJA5ydnVm6dCnz58/n+vXrAPj5+RXZCa81uHjlNlWrqli+XMmY/vzzSu4sb2/jSYGnp1JCJn2F+t93n8bPT8WyZUIfJr91Kzg7OdG8cVnOn3toMpGoTDFhGrOUoe3bt9O9e3fs7OzYvn17lvv26tXL7JMvW7aMZcuWERISAkCdOnWYOnUq3bt3N7uPvEYXUn8lKRGBQPe6Egi9L9G8dPuXQFEztgEOQB9gCkqSxXdQSmvojjfFP/ejWNh/vkHV4LPBYUyft5mo6FicHB0pX9aLmNh4g+1B/97j6ZNoBFFEhX/JlX++YzVPiY1NRG3rQOV6/fR5gmIj76IU+rBDiSybDdwlJVHnC1A3VfJgFA+n54FNqb+gTeo+qWGc4gq3zv1G9OMQUAlIvUKZFXoEiAq/TNj5bQbHuHlXKrDh+OvXr2fDhg306NEj+52zwM3NjYCAAIM2FxcXSpQoYdReFMnKf0JohVHVet2s9mmkM1/P7Scf4lbAxcVFpn/IAVlVf9ehLBsLg6ABJyfo3FlxcNaxYkWa/0/z5rB1a6zR8bow+RUr4NhRR/7e9VmmsskUE6YxSxnq06cP9+/fx8fHhz59+mS6n0qlsijpYoUKFZg7dy7Vq1dHCMGaNWvo3bs3//77L3Xq1DG7n7xCF1Lvi+L87I/iJrwrIY6xCXHUtHcw8iWKI31OUMWHqD2wF12AOkpofkIci0tXNMhVdDYxjnG/X8HXL61qcLte59FqRbr8K/F4eccbVB9WLnkA8AawA03yReKTS6Bkme6PVnPWIIu0i2c5khJ2gegBdE6V9i3Scl7/AtRHWULbjbKwp8K40MgOIIWIe9EoebaVKxRxbycR92dSr9MnRspNVPhlzu6bCcLX7GOsjb29PdWqVbO2GEWCzB7EahuVyXxC4eHg7hnHuClrOfDbFPkQz0NefPFFVq9ejbu7e7Z+QVu2bMknqQofWUVNpleIypb2JigowsA/SKMx9hlK7/8TFKT4GoF5/qIS8zErz5BWq8XHx0f/78w+lmaffuGFF+jRowfVq1enRo0azJo1C1dXV/755x+T++d3zhVdSH11FFvJKRQr0EmUV7gKFcEoztCTUv8mkZYTdB5KsHktlIWmy8CP6Y5fG/XY4Hw/Rj/Gt4pg6VKtPleKo4MyY84sl4q9vYq0jNHzUNy0A0jzVopMbatD6NnNAFQM6I1i9emF4jx9PN2xdVCyVOvaTqRK60tayP2k1L8XgLImrtApwJ+w88Y5XpQ2U1c182OszXvvvceiRYuytHbllEOHDhXZ7NOZ5RQyRcO6vpw4oTLKJxQQAKNGabG319LtlbnZ9iPJOR4eHvolYA8Pjyw/ksxJHzWZdc4rQUiIEvq+YoXyNy5OsYbq2t5+G27cADc3xVIaEgLTP1LKM02Z0JvQUGUZecUK5W9oKHw8sU/+/uAigsU+QwkJCWZHwliCRqNh48aNxMbG6jNbZyS/c67olsF0ofIZQ+vXaVKMfInsEuKMcoJ2B77GMEC9G7AuOS05F0CoJom2TQ1nx2obZYacWS4VZVkq4xnTB/hvQLekpUlaBCiVxet1+oSze2enSpIxg+l3Jto2AN8AHwALsbFV4WjvRGzcQNNXKJPQ+tjIuyCGWHSMtfnrr784ePAgO3fupE6dOvpcQzrkLNkYc2fHOtL8iZS8JsePK/mEevVS/CgqVYJmzVI4efJClv1Ics6qVatM/vtZOXLkCPPnz+fUqVPcu3ePrVu3ZrnCUNgxN93JvQcR+jD3Q4cU60+VKnD6tFKY+I8/IDFRjae7A3/+mYCHmwtL57+kL8+U1bKzxHLMsgylx9PTkzZt2vDpp5+yf/9+4uPjn0mAc+fO4erqioODA++88w5bt27F39/f5L6TJ08mKipK/wkLC3umc2dHZTt7dqHYTnZjmJNZlzm6noMz830qsqG8H/N9KuKiVpvM32yHYYC6qczTlW3sOXk8Q3ZQTVp20MqV4cgRparwgAHKX61Wg3HGaF1V+t0oVh1FCkEyxza9w7FNo7lx+meU4H9T0qaY6M8feAtUfniVrcPmS2E816I6anX6cxn2k5wYxfkDXxhkonbxLJcabWbimAIaju/p6Unfvn1p27YtJUuWlLNkMzB/dqyge7BX963D//5nq68wf+wYesuozCydf/zwww8GZZiehdjYWOrXr88333yTK/0VNHQW0BLVRuBUfhi370YYZXQ2tXzlX7MCly6p0BmchYDgYKWIqhCQlAQuzg7UrF6WMj6eNG5QhYDaFQwsrrO/2saUCb25+e8i/lj/gVSEngGLLUP79u3jyJEjHDp0iK+++oqUlBSaNGlC27ZtadeuHZ07W1bMs2bNmpw5c4aoqCg2bdrEkCFDOHz4sEmFKL9zruhC6mOBEIyryU90cjU6ZphHSRZEhBvtC9mH1g92K8G4kHiDrJ/xCVp9qKW3N9y7B46OioUoKAgSE9NXMU4f4B+dKnUA+iUtIUhJrAR0JyZxB4oylPHYC4DAOJw+AFRNSB8y/8l7L7D38DwgxsQVCkZoBxBx/5yBL1DFgN5E3J8JRld1J3CxQIbj5+Ysubhg7uw4PTp/Ip1VackSePBAS8+eMrN0fjNnzhzefvttypcvT9u2bfXP95z4znXv3r1ABcXkJrqx6umpJTJKsexUqaKEyb/9NrRokXnOK11ovIND2vP83j0luWjVqtC3LwQFxRN06gYdOsDVkCja976AEIKqVVVmWVwl5qMSz+AIkZKSoi84+dNPP+XIbygjnTp1ws/PjxUrVmS779OnT/Hw8GB3hWq4qM0PS7aEs4lxfPLwDrFaLR4o2mN94Arg4+isL7Canq3REayKekRsaj4aR5WaCnZ2qFDxRJNCZTt7BmcSWh/bwJmFcSn6KISIyFiuh94gORmio5XlgpUr05zr3nlHhaNtWa7eeEiKBlxdNahUKjw8BGFhAiFscXLSULmy4MqVALRaXch8Mmp1I0qVOs+jR2o0GlsghdKltcTFQXy8mpQUG1AJnN3KkpKclBoy3wf3UjV4b3pHennuYt5+L74YP1UfGaZJSkQIe2A7+hB8VRO8ytgQkFqaI7NoMl3f+UVKchzHNrxJVFQU7u7uWe+bksKhQ4e4fv06r732Gm5ubty9exd3d3dcXY2V4rxCN+Yjb64okKH1uiiaw0cv4umZQoWKynJXpUpw546KerUDzHKCTt9PmTIprEg35pUkcXWkM3UOeBodj2eVEWaN+Tt37nDo0CGOHDnC4cOHuXr1KmXLlqVdu3asW7cuR+dXqVTZLpMlJiaSmJiYJnNq1vWCOOZ79p/PtdALPHyoxds7rYzGmTPw8ceQkqKiY5sAPp5ovHzVs/98roacZ9kyoR/bw4crQQO//Zb+Ga8sm82cCSOGw4MM2/Prfhi3y58zOwtf0kVzn/M5yjN05coVDh06pP8kJiby/PPP065du5zKq0er1RrcCNamnoMzjio1b6E1CKGfhLHPj46+bl76yvOW0qKMB3+kK8dRpeE46tWD6Gg1J0/aEBWlYdw4Lffvg0ajJiHBBpXqMV5ejrTvEG0UkrlpUzKtWsGRIzZotSEo8Ww2gDda7TUePLDHwyOFatWSuHIF1q/XHa1lxQot/9vlSanKFYi4ew0PH0eqtShPuRrVlWKEsVC7cXO9kgMQtGU8SfFDUBSh1LB9cZvIB3FEhV/Gw6em8unwoV4pUkL9sw7HtyahoaF069aNW7dukZiYSOfOnXFzc2PevHkkJiayfPlya4tYIEjvJ1TbX8vp0+DolDbrvXtX8PF48yqeZ7QSjRyJTBKXz5QvX56BAwfSt29f/vzzT3755Rd++ukn1q9fn2NlyBwKUz0+nQU0YxmNBg0Uf7etW8lUSckYWq+LoPzjD0NLaPrSGs1MbJeW0tzBYp+h8uXL06JFC3bt2kWLFi3YuXMnjx49YuvWrYwbN86iviZPnsyRI0cICQnh3LlzTJ48mUOHDjFw4EBLxcpTdL5DpnyG8pqypb3Yv1/FyZN10Gon8OSJP8HBKp48gaioOiQmTiAhoTrhD2P580/DaJygICVz7/79KpKTK6ME/vsAr6f2ngS8RFSUP6dOqUhIMFzn/vNPFfFPo7CLO073jg+xjQ1i+5wRdC0bZlCMMD1pPkGHUZIK3AXeQmirGVSy14XYF4ZK9+PGjaNJkyZERETg5JQ2M+3bty/79+/P4sjiRcbaY1Wrpvn6rFwJVauq+N/uUxb1md6X6NhRL6r71uHgto+lb0Qes2fPHqZMmUKrVq0oUaIEkydPxsvLi02bNpmsWZab5Ldv6LOgq0qvK6OR8fmrC4PP6tjsqsWnD603tV2G0+cOFluGSpUqxaVLl7h//z7379/nwYMHxMfH4+xseQG38PBwBg8ezL179/Dw8KBevXrs3r3bYr+jvCazchymymnoKs6b4tyue9meK2Ol+iSVF+CSbnlLyRYNtzCsOt+Eu3fP6/2NgoIgLAzKl1cTGloH45zYukr1kSgh9Y1ITg5m+HCtvrbTvXtaqlZVsXSpxqCw5jdzptIjk9lOmk9Qb9LC9tNk1FWy14fYi4Jf6f7PP//k77//1hcm1uHr68udO3esJFXBI72fkKnIx6ZNRY5msDJJXP7TrVs3SpUqxXvvvceOHTvw9PTMt3MXpnp8uihIDw/TZTSWzn8p22PTZ2TPWC0+fWmNkSMzqyYvLaW5gcXK0JkzZ4iMjNSvI0+ZMoXg4GAaNGhA+/btmTVrltl9ff/995ae3ipkVo4jo89Pq9MfM26X6Ug4gKmLymRqUcmMu3fCUWqBZR3+LkQ3Spa4TXXf8mzadA4fH/jqK5g+XRd6bypBQFoFeuiBSnWFxMQktm5V0aF1HWKjQ2jSJNoik6wubP/c/i8Q2gxh++lC59NC7At+aH1mvnC3b9/GrRDUB8sv0ieBk1XkCzcLFizgyJEjfPHFFyxatEjvQN2uXTtq1JCOujrSh7f/c/Iad+8msWWLFk93wzD47I5NHxqfvlp82dLelC4huHo5ItNq8jKcPnfIkc+Qp6cnvXr1IjAwkFatWrFt2zZ++eUXgoKCLFKGChNKCH3W1q/tkd04szPzZZOjLSrRyzPddzNStvvXqc79uzsxzvycgKIU6arOX6aqbwkA1GoVkZGClBSoWlXDkyc70Gp1oezp+zEMvXdzS8HZWU19f8UZr2f/+Zw8ecHiDKcePjXxLF2TiPu7FIuPiUr2aVmwC36l+y5durBw4UJWrlwJKE6gMTExTJs27ZlLdBQl0s90ZRX5ws348eMZP348oKQ/OXz4MLt27WL06NH4+Phw+7b5Fr6YmBiuXbum/37z5k3OnDmDt7c3lSpVym3R851nsVxmdmx2/UlLae5jsc/Qli1bGDt2LPXq1aN06dKMHDmSmJgYvvzyS06fPp0XMhZJdI6h10Iv0DIwgqshF+jQZ7ZRdt1+XSoC5zHM/HweiAduo1h7woCbHD91lWuhF3jxRYGPD0ycCBqNFq32AnDJRD/nAC90ofcqldYgg+mzZDjVZ7lWpebn1ofl9zFre0Hiyy+/5OjRo/j7+5OQkMBrr72mXyKbN29e9h0UE9L791y74kXzxlUpXdKPgwfceBrpTElvF2Yt+E1mkC4kCCE4ffo0e/fuZffu3Rw8eBCtVkupUqUs6ufkyZM0bNiQhg0bAjBx4kQaNmzI1KlT80JsiSRHWBxa7+PjQ5s2bWjXrh1t27albt26eSVbtuRHaL0lPDrwJ19Oy9wypAtHh7SQTJ2zaWYhkj37z+fcpfOUKaMiJMSGkiU1XLoEaUU/dFaeRqjVwezerTUI07x1C1Q2Tmi1KrQpGtAnCPBCKRCiBjwBW0p6x/DbunEGJteM1quMIaLbI7tl+pvTR4ulD8s3d3teY2lo/fr16zl79iwxMTE0atSIgQMHGjhU5wcFPbQ+I+kjzBo3TvNxkHlRrIO5ofUvvPACR48e5enTp9SvX1//vG/Tpk2++g9B4RvzRZWsnvUFmTwLrQ8PD38mwSQK5ialC758m+eeE4wYIQAlb1H79vYYl+DogVZ7BVvbJH1/zZtDbFIZarb7Sgl5TxkCRgkCNgA3gUnYO6zWKzqPXTpw9I491OzGiOVj9Uc8ArZHolfqskIXQp/T7QUJW1tbXn/99ex3lBiQMcJM54Q/+6tt0tRfgKlVqxYjRoygdevWBSbLevT0tajsc+TZIckF2vMDL7xWsIpom8PTuCS8NmS/nxxZVsKcisNHg64QH59sVMVYrU5BqzWuIK9Wp5CSkpb0a9s2NQmJT0k+8AUOzu7GPjrpfIZsbHZRt7YSCffYpQMvvvVn1j9gejezFKKsyGgZqhjQu8BUrN++fbvZ+/bq1SsPJSnc5CQTtcT6zJ8/39oiGBG8736BWAEo1pgREV3QiNWalwhaKkNWwlRYZXoHU93yQunSgtBQw5BNrdZUCY4LaLVC77y6f78KZSmtu+LIzA2lyoaqCYhupC+zoVY3RUUwn74/Od9+vy7PkBJeP4SkhF0GZTusjbmFJFUq1TNnXS/KmKP0SyQSibWRypCVyK7icNryguDiRVi3TslmqtFA7doQHi5ITg4mJuYKrq4p2NsLqvn64eXpzMG/rgHV0PsUiZmo1U3xq2eHq4cnoZfXU6KMD9CIx/f/pGkdLz59f3K+hmcW9DxD2tRSKpJnIzulXyKRSAoCUhnKQzIuA13s5UoJ1+zD6cFweaFuXZg3T8nmu20bXL+uxsbGhrp1Nbz+ehJ16yrbjh19wt+7plGu3sfcv2voU6TVdiX85jIibFVERSaiTo5k+uS+jBw21ujcQX+f4PyBL/J0+aow5RmS5JzslH6JpKhwNjGOtVGPCU3NRTfIowT1HCxPRiyxDhaH1kvMw1S5iY8H9KVdr1nZhtMDlC3tbZTe/dAhiI9XkZRUh/j4CZw86c+ECSrOnDFcevCvUz21JEZaARGVajNRT2N49KQyGu04Hj2pxKgPfmTZKsPogKNBV+jd9ZU8L5ORVrYjXZGTAppnSPJs6HKp3Px3EX+s/0AqQpIix9nEOMY+CCMuIY7XNSnEJijfzybGWVs0iZmYZRl68cUXze5wy5YtORamKGFqGUgrGuPsfMHMyBpBSIhSsbhpUyWb7/37atKH1Gu1M1GpGjFlSjApKWlLDxM/GsOBfS+n+QepdiHETSAAwxIejZg2Zysjh3XUn3Xml/8DUQfEcfJy+UpftiOdjEqeoU9z7RwSicR8nj59ava+2aWiKG6sjXpMHSD1aZ9afEhpzy5Zr6RgYJYyVFBCKwsTJpeB6I4Ql7C1VfxRsoqsufcggg4dICZGsQhVrgy3b9uQmGi4/CVED7TaGxzc9p5+xt28VVPqdfokdYluDS6e5Yh6YIdWaxyOHxG5yOC85y7eQ6MZSl4vX+nKdqSXsVLdT/M1z5BEIknD09MTlUqV5T5CCBk0YILQ5CRex/Dp2g1Yl5xkPaEkFmGWMrRq1aq8lqPI4eJZjqR4U2U0NPrw96wia/xrVuDyjUhWrBD6fXv10qT2kdanjc1u2gVWN1p6yJjD58KuMTx5bByO7+VpWBCxbu2yPAjfjUaT92UyClOeIYmkqHPw4EFri1BoqWxnzy5NitHTPkKTwgfhYdJ/qBAgHahzkV6eu9jfXSnUqrbrwuHVX5Ax/D0pCbMia2rXqMCu/ecMQurj4w1D6lXqXaC6yKfvf5StbB9NnciH4z42kmfGlCEG+33y3gvsOzKv2C9fySUDSXGjbdu21hah0DLIowRjE+JogmIR0iUuGQCcS4hjbEIci0tXlApRASZHytCmTZvYsGEDt27dIinJ0AxYnOuTiaCjLOqm/Lvn6h1UqABlygQTEnIZX18N9+5BxbJV8PJ0yTayZt2GPylbFkqVUpbJKlWChAR4EK7CxTOJxLg1lKlejWULN9KqXvYv7mHDB+OSdJlpc7YSEbkIL08HZkwZYlRVObB5DQ5tm8TAyQe5fzXz5avA8kkQm+NLVeCRSwYSCcTFxZl8zterV89KEhVM6jk4s7h0RdZGPeabhHhsERwE2qBYiaT/UMHHYmVo8eLFfPzxxwwdOpRt27YxbNgwrl+/zokTJxg1alReyFjgMCeEMmMZjXPnYO5c+PfcTTq0DmDd8lFZ1maKio6lb18YMSKtbcUK2LxZ0KDbdAAadK9Os5ZlIPaAWXLX869I00aV9WH9AbVNJ74LbF6DU0HvKOU4MqFE7LNlny7oyCUDSXHm4cOHDBs2jJ07d5rcLicAmaNB4AF8BNxDyfEfABxK5z+0JOIBW6MjlTy4QF83T0Z7lQZkiL61sFgZWrp0KStXrmTAgAGsXr2aDz/8kKpVqzJ16lSePHmSFzIWKHQhlHWA14FdmhS9CbRluv3SZ969eFGpIF+pEvTtKzh58gId+lzIslils5MjQUHxBmU4goIAtWOO5A76+wR9UwtmtgzUcvJkVJYylIg9QC/PHJ2qSFDQlwxknSZJTohOSjFrv/HjxxMZGUlQUBDt2rVj69atPHjwgJkzZ/Lll1/msZSFj/TvhReB9UAJ4BWUoke7gMoqJZPNkogH/BodSQBKhckdwK/RkQC0cXbL9P0iFaK8xeKn6a1bt2jVqhUATk5OREdHAzBo0CBatGjBkiVLclfCAkZWIZTpjDgGmXfj4rRUqqRYdswtVlmutBfBV+INfIZCQsDJ3TtHci/8YpEsmPmMFKQlA1mnSZITzK3TdODAAbZt20aTJk1Qq9VUrlyZzp074+7uzpw5c+jZs2ceS1q4SP9e6APUA1KTk6QmMYFbGiWn2tZURehUhu1boyMJTU6SIfpWwuKki2XKlNFbgCpVqsQ///wDwM2bNxFC5K50BZDQ5CS6YhxCGZohhFKXebe6bx0eP1bRrBlGxSqDL2derDImLp5GjSAyErZsUf42bgxoc5bE6/LFSzRubFwwMysZJAoPHz7k+eefx83NjTp16tCwYUODj0RS1IiNjcXHxwcALy8vHj58CEDdunWLtV9oZqR/LwQDXciYxASSUt+PAsUilHG7wPz3iyT3sVgZ6tChg76i97Bhw5gwYQKdO3fm1VdfpW/fvrkuYEGjsp09GfImKyZQO2P/Gl3m3Q6tAzh5Um2QTTq7YpX+NSsQFaVmwwbYvRs2bIDISDWO7pUyPWbZqv341BhNaddqHNv0Dnev7AWUbNjx8YlGGa1lwUzzSL9k4OTkxK5du1izZg3Vq1e3qLq9RFJYqFmzJpcvK1nn69evz4oVK7hz5w7Lly+nbNmyVpYufzibGMcH4WH0vX2VXrev0vv2NT4IV7JK67a9cuc6H4SH4WVjw2YUpeYhsBPDd8QOwFmtvG5Vqd8zblehvEd0/VRJ/bsZ0+8XSe5i8TLZypUr9UUsR40aRYkSJfj777/p1asXI9J7+xZRMoZQKoHn8LVHyUyPyUmxStPHqPBvbzob+LJV+xn1wY+QuhKt1ezg+olVxEc/4P6VnZQrB6Gh6JfdTpxQceuWLJhpDnLJQFLcGDduHPfu3QNg2rRpdOvWjZ9++gl7e3tWr15tXeHygfQ+QMNQlJWLQHhCHGMS4lCBgV/PJUADOAO9UXyGDJOYQEdHF0Bxlv41OtJoe383L8rZ2rEgIQ5nFH+jHcANoK+TWz798uKLxZYhtVqNrW2aDtW/f38WL17MmDFjsLcv+tqrLoTSxdGZdTa2uDg683XpStR1cMr0mPRLZseOelHdtw4Ht32cZY0mU8f8tntzphmap83ZSlq5jXnAaaAO967swdcXVq3SsnChEqq/dStER7lkK4NEITeXDObMmUPTpk1xc3PDx8eHPn366GfgEklB4fXXX2fo0KEANG7cmNDQUE6cOEFYWBivvvqqdYXLB9L7AOmepgFADcAVqJ1u28nU7x4oT9+fgAPALWAhEA6UBU4nKLlIRnuV5lU3T66mbr+KogiN8vLh7/gY6mH4FK8LHI2PzvPfXNzJUThKREQE33//PRcvXgTA39+fYcOG4e2dM+fewkY9B2eLndl0S2bPcsxjlybw/Z8m942ITMTUSrTQXqZZM8VXqG5dmDdPV+HeTipCZqJbMvD19dUvGfj6+uZoyeDw4cOMGjWKpk2bkpKSwpQpU+jSpQvBwcG4uLjk0S+QSCzj888/5/3338fZWXnOOTs706hRI+Lj4/n888+ZOnWqlSXMG9KHvFdEcYIGmA2EoawCqDDt87MMxXk6GEX5cQUS0n20Wi0db13GWaXGy8YWe5WK+FQ/oi3REeyOjSJFwDsY+wwtToiTmazzGIstQ0eOHKFKlSosXryYiIgIIiIiWLx4MVWqVOHIkSN5IaPEDJSyGsYr0So1BB23zF9JYkjGJYOdO3dSqVIlFi9ezOzZsy3qa9euXQwdOpQ6depQv359Vq9eza1btzh16lReiC6R5Ijp06cTExNj1B4XF8f06dOtIFHeowt5rwGMB5yAtkA74C7wFlAT5cm6EcMn7UaUHLR3UZa3ooH7wEtAJSAJ0KJYlt4WWpxTkogVghQUq9I4oJxWS7TQsg5jf6LyQGyCsnR3NjFnQTSSrLHYMjRq1CheeeUVli1bho2NEtqr0Wh49913GTVqFOfOnct1ISXZM31y31SfIcOV6HI1ehJ6ZYdF/koSQ15//XX9v3VLBpcuXaJSpUqULJm5r5g5REVFAWRqVU1MTCQxMVH/3ZIyIflFdkniZBK5wocuu3pG/vvvvyK7AmAq5L0kUBnjMPlgoCnQFeVJG4riQ5R+v6ZAJMpSl66fjOH0t0y0BYNBWY/LwKHU/mSYfd5hsWXo2rVrvPfee3pFCMDGxoaJEydy7do1i/qS/hO5x8hhHflm/mBKet9CbbMYW4cwqjV7g6qNBxLQcSq+fm3M9leSGPL5558TF5c2G9MtGbi4uPD555/nuF+tVsv48eMJDAwkICDA5D5z5szBw8ND/6lYsWKOz5cX6BxN4xLieF2TYjR7zW67pGDh5eWFt7c3KpWKGjVq4O3trf94eHjQuXNnXnnlFWuLmSeYCnm3NdHWA3BDsdYsBuIBexP7dUVRbLLqx9ZEmx3g4uis7/sQ0AoZZp/XWKwMNWrUSO8rlJ6LFy9Sv359i/rS+U/8888/7N27l+TkZLp06UJsbNErenU06Ao9+8+nSsNx9Ow/n6NBV3L9HCOHdST8yhIexFyj5UvLKVu9E6BUh//lt3Xc/HcRf6z/QCpCFpJXSwajRo3i/PnzrF+/PtN9Jk+eTFRUlP4TFhaW4/PlBRkdTU+ilB9YG/XYrO2SgsXChQtZsGABQgimT5/OV199pf8sX76cv/76i2+++cbaYuYqujB5DcaOBikm2nQFSn4DOqD4EGFiv90oYz2zfnaktmdsc1Grme9TkYaOzrigWIR02zNL4yJ5dixeJhs7dizjxo3j2rVrtGjRAoB//vmHb775hrlz53L27Fn9vtll5t21y7C+1erVq/Hx8eHUqVO0adPGUtEKLEeDrtDBglIYkoJFXiwZjB49mt9//50jR45QoULm/lsODg44ODjk6Bz5QWhyEq9j7PC5LnX2mt12ScFiyJAhAFSpUoXAwECDyOGiSPoQ+g7APgwdDZ6ihL2nbwtOPbYRiqP0dZScQBdN7BeQ2vY09XvG7VqMQ/Df8ygF5CyNiyTnWDzSBwwYAMCHH35ocptKpcpxNe+i4D9hitlfbZOlMAohXl5eqFQq/ZJBeoVIo9EQExPDO++8Y1GfQgjGjBnD1q1bOXToEFWqVMltsfOVynb27NKkMBNF0ck4e81uO8DW6AhWRT0iTqvFWa1mmEdJ+rp56bdLn6P8p23btly/fp1Vq1Zx/fp1Fi1ahI+Pjz54oE6dOtYWMVfIWF5pCfAecAXF4jPAzQuBYGt0JAtRlBcVYANcAy6RltDkODArtY+E1P02pp7nG6B+uu0qVLioIF4ILqGE17uo1bznUYo+bp5AWhqXtVGPWZc69r/2KJllGhdJzrFYGbp582ZeyGG2/0RhjGQIvnybloHGpTCOHZWlMAoyCxcuRAjBG2+8wfTp0/Hw8NBvs7e3x9fXl5YtW2bRgzGjRo3i559/Ztu2bbi5uXH//n0APDw8cHIqfA+57GavrZxcWZAQZzT7nejkCiiK0IKI8LSilVotCyLCAejr5pVlYWSpEOUdhw8fpnv37gQGBnLkyBFmzZqFj48P//33H99//z2bNm2yqL9vvvmG+fPnc//+ferXr8/XX39Ns2bN8kh688louRyNEkK/zsaWDeX99PvpKsoDvHLnOq9rUpiHYhHS+QIFoozvSSi+RPsq1dTv+27qsbrt62xsDPrPjJykcZHkDIuVocqVK+eFHHr/ib/++ivTfSZPnszEiRP1358+fVrgHEpNkb6Cva4CvQxvL/jkxZLBsmXLAGjXrp1B+6pVq/RJ7goLOouNEyqCUWa4KuAlN0/qOjixJOIBm6IjUaMsIVxFSUxXFvg7Poa+bl6sinpksmjlqqhH9HXzyrIwsnxJ5B0fffQRM2fOZOLEibi5pWU/7tChg8XFuH/99VcmTpzI8uXLad68OQsXLqRr165cvnxZn8zUWphjuczqGH8U36D0x6cvvZGT/iXWwWIHaoC1a9cSGBhIuXLlCA0NBZRZ9LZt23IkhM5/4uDBg9n6T7i7uxt8CgNTJvQmNBRGjlSzYoXyNzQUPp7Yx9qiScygbdu2hIaG8sknnzBgwADCwxXLxc6dO7lw4YJFfQkhTH4KoyI09kEYDxPiiENQGyU3S3Xg1+hIxt2/xa/Rkegq6fmj5FIpDTwAriQpy91xWq3JKJu41JI/snCldTh37pzJWpM+Pj48evTIor4WLFjA22+/zbBhw/D392f58uU4Ozvzww8/mNw/MTGRp0+fGnzyikEeJfSh7JNS/wYDg7Pwy0l/jCdwDkWBn5T69wLwRjq/H0v7l1gHi5WhZcuWMXHiRHr06EFkZKTeL8jT05OFCxda1JcQgtGjR7N161YOHDhQ6P0nMiMn5TgkBYfDhw9Tt25dgoKC2LJliz6y7L///mPatGlWli5/0UXevPcgjNooyk9d0soHLEEJOz6TFE8ASpK69NtPoSSZi9Bq6HjrMloUf4qWpBWm3EiGmTXmFUaW5B6enp76RKPp+ffffylfvrzZ/SQlJXHq1Ck6deqkb1Or1XTq1Iljx46ZPCY/00nkpLxS+mMO2dhS2daeWyoVi4G7ajXveZU28vuxpH+JdbDY7v/111/z7bff0qdPH+bOnatvb9KkCe+//75FfRU1/4msyEk5DknBIDeXDAoz6f13nFB8JTagZNy1A44CnVGUndsmtpP6tx7K7LhG6j47UJxP+wNngRCgo4NSmiQ7nyNJ3tC/f38mTZrExo0bUalUaLVajh49yvvvv8/gwYPN7ufRo0doNBpKly5t0F66dGkuXbpk8pj8dofIiV+OJcdIv5/CQY4cqBs2bGjU7uDgYHF+oKLkPyEpupw7d46ff/7ZqD0nSwaFiYxRXE+1GkoD94AYYDnK8tcWlKWCvwAHlMrdicAmIA7FWnQO+BhoBvyOkq03vZ9QU+AmSj2oG8Cf8dGcTYzj7/gY/FLbN6AoWvGk+RxJ8obZs2czatQoKlasiEajwd/fH41Gw2uvvcYnn3ySp+cu6OkkJEUTi5WhKlWqcObMGSNH6l27dlG7dm2L+hKpReokkoKMbskg4zKupUsGhQlTUVy6/CoBwFDSLDpaFEvRyHRtjVESLKbfty3gi6LMmMrWuxBFuRqduv/YB2F4qG14E2WJTcckZJ6ivMbe3p5vv/2WTz/9lPPnzxMTE0PDhg2pXt2ypf2SJUtiY2PDgwcPDNofPHhAmTJlclNkieSZsFgZmjhxIqNGjSIhIQEhBMePH+eXX35hzpw5fPfdd3kho0RiVXJryaAwsTbqsd4KtBjF2mODYpnJrr5S09S2ehhGgTUCwlGsRzsxjMDZiVLSoCyGFqAIBLsy7Ct9hvKPSpUq6ZeoTCUezQ57e3saN27M/v376dOnD6CkUdm/fz+jR4/OTVElkmfCYgfqt956i3nz5vHJJ58QFxfHa6+9xrJly1i0aBH9+/fPCxklEqsye/ZsatWqRcWKFYmJicHf3582bdrQqlWrPF8ysBYXEhO4ixL9NRYohxI2b059pa5AVOrfjPtGo1T31mXj1UXgnEdZUruH4mN0F8V3SCuEjMaxEt9//z0BAQE4Ojri6OhIQEBAjia8EydO5Ntvv2XNmjVcvHiRkSNHEhsby7Bhw/JAaokkZ+QoccrAgQMZOHAgcXFxxMTEWD1XhESSl+TWkkFhIlFojfL/lMLYopO+vpKubXdqHztM7JsENEdRdMKBr1OPt0OJPMtYHfyuSsVinwqsjXrMqqQENIAnKn6MeiQzUechU6dOZcGCBYwZM0afWPTYsWNMmDCBW7duWVSg+NVXX+Xhw4dMnTqV+/fv06BBA3bt2mXkVC2RWBOLlaH4+HiEEDg7O+Ps7MzDhw9ZuHAh/v7+dOnSJS9klEgKBM+6ZFDYyGgF6gmsR1kG60paZJfAuOaSA0qiRd2+u1O/g+J8PRhluesRMAA4aOJ8PYBVqKjn4MwgD/Q+TF2BXQlxMhN1HrJs2TK+/fZbffklgF69elGvXj3GjBljkTIESi45uSwmKchYvEzWu3dvfvzxRwAiIyNp1qwZX375Jb1799ZHh0kkRY3cWjIoDJxNjAOMq2xfQIkGK4cSIXYx9e9SFCvPQpRaTY4oFqBKqftuSP2rC7mIt7XX51zxt3fgHErdJlP5hKrZK1FF6TNRz0NxzvZPbZfkPsnJyTRp0sSovXHjxqSkpFhBIokkb7FYGTp9+jStW7cGYNOmTZQpU4bQ0FB+/PFHFi9enOsCSiTWZurUqYwbN44XXniBjRs3snHjRl544QUmTJjA1KlTrS1erqKLIitJWrVunV/POcAPuIOi7ACsQFnyKoMSVfZ16UrsqVQTT7UNt1B8f3Q+QKGAl9qGNeWqsKG8H/N9KvKulw/BKIUx058vo2+QzESdvwwaNMjk5HblypUMHDjQChJJJHmLxctkcXFx+sRze/bs4cUXX0StVtOiRQt9aQ6JpCiR20sGBZn0FpjvgM9QLD4C8LW1Z482Re+3U8bWDg2CdRqNUUXtGvYOhCfE6S1DtVGWx3zsDfPHpK/MHZmUyF0Eq1BRzd7BoD9Z4yn/+f7779mzZw8tWrQAICgoiFu3bjF48GCDpIgLFiywlogSSa5hsTJUrVo1fvvtN/r27cvu3buZMGECAOHh4YWmVphEYgnFackgfRXvkakfpcq2Le+XKG3ot5OkIRhM+u3oqtm7oFiGdqFYht43EQVmToZeXX9NUCxCu1AsR1/LqLI84fz58zRq1AiA69evA0rOoJIlS3L+/Hn9fsXBd05SPLBYGZo6dSqvvfYaEyZMoGPHjvpIgz179pjMTC2RFHZ0SwYZZ8BFcckgKwuMJRXk01t81qVmsE5v6bGU3O5PkjUHDx60tggSSb5isTL00ksv8dxzz3Hv3j3q16+vb+/YsaPJKscSSVGguCwZZGWBmfHort5qBGl+O5llg87tmkyyxpNEIskrcpRnqEyZMkap1Js1a5YrAkkkBY3itGSQlQVG+u1IJJKiSo6UIYmkOFHclgwys8BIvx2JRFJUsTi0XiKRFE90ViMXR2d9nqCvS1eSfjsSiaTQIy1DEonEbKTfjkQiKYpIy5BEIpFIJJJijVSGJBKJRCKRFGukMiSRSCQSiaRYI5UhiUQikUgkxRqpDEkkEolEIinWSGVIIpFIJBJJsUYqQxKJRCKRSIo1UhmSSCQSiURSrJHKkEQikUgkkmKNVIYkEolEIpEUa6yqDB05coQXXniBcuXKoVKp+O2336wpjkSSL3zzzTf4+vri6OhI8+bNOX78uLVFkkgkkmKNVZWh2NhY6tevzzfffGNNMSSSfOPXX39l4sSJTJs2jdOnT1O/fn26du1KeHi4tUWTSCSSYotVlaHu3bszc+ZM+vbta00xJJJ8Y8GCBbz99tsMGzYMf39/li9fjrOzMz/88IO1RZNIJJJiS6GqWp+YmEhiYqL+e1RUFACxWq21RDLgaVwSquj4POs/WhNNSnKc/ntSXAzRT6Oxi4vPcj/A5H65QVy08bkKCynJyvUQQuTL+ZKSkjh16hSTJ0/Wt6nVajp16sSxY8dMHlPQx7ykcKEbN/k15nMDnaxyzEtygtljXhQQALF169Ys95k2bZoA5Ed+cvUTFhaWL2P8zp07AhB///23QfsHH3wgmjVrZvIYOeblJy8++TXmc4OwsDCrXy/5Kfyf7MZ8obIMTZ48mYkTJ+q/a7Vanjx5QokSJVCpVDnu9+nTp1SsWJGwsDDc3d1zQ1SrUZR+C+Tt7xFCEB0dTbly5XK139xEjvnsKUq/BeSYz0i5cuUICwvDzc1NjvlU5G8xH3PHfKFShhwcHHBwcDBo8/T0zLX+3d3dC/3A0lGUfgvk3e/x8PDI9T4zo2TJktjY2PDgwQOD9gcPHlCmTBmTx8gxbz5F6bdA0RjzuYFaraZChQq51l9RGifyt5iHOWNe5hmSSPIJe3t7GjduzP79+/VtWq2W/fv307JlSytKJpFIJMUbq1qGYmJiuHbtmv77zZs3OXPmDN7e3lSqVMmKkkkkecPEiRMZMmQITZo0oVmzZixcuJDY2FiGDRtmbdEkEomk2GJVZejkyZO0b99e/13nGzFkyBBWr16db3I4ODgwbdo0o+WIwkhR+i1Q9H7Pq6++ysOHD5k6dSr379+nQYMG7Nq1i9KlS+erHEXpuhal3wJF7/cUFIrSdZW/JfdRCVGIYiwlEolEIpFIchnpMySRSCQSiaRYI5UhiUQikUgkxRqpDEkkEolEIinWSGVIIpFIJBJJsUYqQxKJRCKRSIo1UhlKZe7cuahUKsaPH29tUXLMnTt3eP311ylRogROTk7UrVuXkydPWlssi9FoNHz66adUqVIFJycn/Pz8mDFjRqEqLlkYkGO+4CDHfP4gx3zBoaCN+UJVjiOvOHHiBCtWrKBevXrWFiXHREREEBgYSPv27dm5cyelSpXi6tWreHl5WVs0i5k3bx7Lli1jzZo11KlTh5MnTzJs2DA8PDwYO3astcUrEsgxX7CQYz7vkWO+YFHQxnyxV4ZiYmIYOHAg3377LTNnzrS2ODlm3rx5VKxYkVWrVunbqlSpYkWJcs7ff/9N79696dmzJwC+vr788ssvHD9+3MqSFQ3kmC94yDGft8gxX/AoaGO+2C+TjRo1ip49e9KpUydri/JMbN++nSZNmvDyyy/j4+NDw4YN+fbbb60tVo5o1aoV+/fv58qVKwD8999//PXXX3Tv3t3KkhUN5JgveMgxn7fIMV/wKHBjXhRjfvnlFxEQECDi4+OFEEK0bdtWjBs3zrpC5RAHBwfh4OAgJk+eLE6fPi1WrFghHB0dxerVq60tmsVoNBoxadIkoVKphK2trVCpVGL27NnWFqtIIMd8wUSO+bxDjvmCSUEb88VWGbp165bw8fER//33n76tMN8kdnZ2omXLlgZtY8aMES1atLCSRDnnl19+ERUqVBC//PKLOHv2rPjxxx+Ft7d3obzhCxJyzBdc5JjPG+SYL7gUtDFfbJWhrVu3CkDY2NjoP4BQqVTCxsZGpKSkWFtEi6hUqZJ48803DdqWLl0qypUrZyWJck6FChXEkiVLDNpmzJghatasaSWJigZyzBdc5JjPG+SYL7gUtDFfbB2oO3bsyLlz5wzahg0bRq1atZg0aRI2NjZWkixnBAYGcvnyZYO2K1euULlyZStJlHPi4uJQqw3d2WxsbNBqtVaSqGggx3zBRY75vEGO+YJLgRvzVlHBCiiF2Xx6/PhxYWtrK2bNmiWuXr0qfvrpJ+Hs7CzWrVtnbdEsZsiQIaJ8+fLi999/Fzdv3hRbtmwRJUuWFB9++KG1RStyyDFfMJBjPv+QY75gUNDGvFSG0lGYbxIhhPjf//4nAgIChIODg6hVq5ZYuXKltUXKEU+fPhXjxo0TlSpVEo6OjqJq1ari448/FomJidYWrcghx3zBQI75/EOO+YJBQRvzKiFkilOJRCKRSCTFl2KfZ0gikUgkEknxRipDEolEIpFIijVSGZJIJBKJRFKskcqQRCKRSCSSYo1UhiQSiUQikRRrpDIkkUgkEomkWCOVIYlEIpFIJMUaqQxZgaFDh9KnT59Mt69evRpPT898kyc7fH19WbhwocXHPX78GB8fH0JCQnJdJh2PHj3Cx8eH27dv59k5JM+OHPO5hxzzhQM55nOP/BjzUhmS6Mntm3PWrFn07t0bX1/fXOszIyVLlmTw4MFMmzYtz84hKbrIMS8pbsgxbxqpDEnyhLi4OL7//nvefPPNPD/XsGHD+Omnn3jy5Emen0siyQw55iXFjaI05oudMrRp0ybq1q2Lk5MTJUqUoFOnTsTGxuq3f/fdd9SuXRtHR0dq1arF0qVL9dtCQkJQqVSsX7+eVq1a4ejoSEBAAIcPH9bvo9FoePPNN6lSpQpOTk7UrFmTRYsWPbPc27Zto1GjRjg6OlK1alWmT59OSkqKfrtKpeK7776jb9++ODs7U716dbZv327Qx/bt26levTqOjo60b9+eNWvWoFKpiIyM5NChQwwbNoyoqChUKhUqlYrPPvtMf2xcXBxvvPEGbm5uVKpUiZUrV2Yp744dO3BwcKBFixYG7RcuXOD555/H3d0dNzc3WrduzfXr14E0s/Ls2bMpXbo0np6efP7556SkpPDBBx/g7e1NhQoVWLVqlUGfderUoVy5cmzdujUnl7bII8e8HPPFDTnm5Zi3GKtURLMSd+/eFba2tmLBggXi5s2b4uzZs+Kbb74R0dHRQggh1q1bJ8qWLSs2b94sbty4ITZv3iy8vb3F6tWrhRBC3Lx5UwCiQoUKYtOmTSI4OFi89dZbws3NTTx69EgIIURSUpKYOnWqOHHihLhx44ZYt26dcHZ2Fr/++qtejiFDhojevXtnKueqVauEh4eH/vuRI0eEu7u7WL16tbh+/brYs2eP8PX1FZ999pl+H51cP//8s7h69aoYO3ascHV1FY8fPxZCCHHjxg1hZ2cn3n//fXHp0iXxyy+/iPLlywtAREREiMTERLFw4ULh7u4u7t27J+7du6e/LpUrVxbe3t7im2++EVevXhVz5swRarVaXLp0KdPfMHbsWNGtWzeDttu3bwtvb2/x4osvihMnTojLly+LH374Qd/PkCFDhJubmxg1apS4dOmS+P777wUgunbtKmbNmiWuXLkiZsyYIezs7ERYWJhB36+++qoYMmRIpvIUV+SYl2O+uCHHvBzzOaFYKUOnTp0SgAgJCTG53c/PT/z8888GbTNmzBAtW7YUQqTdJHPnztVvT05OFhUqVBDz5s3L9LyjRo0S/fr103+39Cbp2LGj+P/2zjsuyvoP4O87NggIMhyg4EYBF0pKpqlpZuVomFqZmSstR/YzzRxlopmzTG1py8xcWTlymwtxpYii4gAnDmSvu3t+fzzcwcGhdwgccN/364V4z/g+n+M+33s+z+f7GTNnztQ75qeffpJq1Kihew1IkydP1r1OTU2VAGnz5s2SJEnShAkTpMDAQL0xPvzwQ90kMXRdLXXq1JFeffVV3WuNRiN5eXlJS5YsKfI99OzZU3rzzTf1tk2cOFHy9/eXsrOzDZ4zcOBAqU6dOpJardZta9SokdS+fXvda5VKJTk5OUm//vqr3rljx46VOnbsWKQ8lorQeaHzlobQeaHzxcG6dPxN5ZNmzZrRuXNngoKC6NatG127duXFF1/Ezc2NtLQ0YmNjGTx4MEOGDNGdo1KpcHV11Runbdu2uv9bW1sTEhLCmTNndNsWL17M999/T1xcHBkZGWRnZ9O8efNiy/3ff/+xf/9+Pv30U902tVpNZmYm6enpODo6AhAcHKzb7+TkhIuLCwkJCQDExMTQunVrvXHbtGljtAz5x1YoFFSvXl03tiEyMjKwt7fX23bixAnat2+PjY1Nkec1bdoUpTJv9dbb25vAwEDdaysrK6pVq1bo2g4ODqSnpxv9fiwFofNC5y0NofNC54uDRRlDVlZWbNu2jQMHDvDPP//wxRdf8OGHHxIREaFTtG+++YbQ0NBC5xnLqlWrGD9+PHPnzqVt27Y4OzszZ84cIiIiii13amoq06dPp0+fPoX25VfEgsqnUCjQaDTFvm5+TB3bw8ODxMREvW0ODg7Fuo4x17537x6enp4PHd/SEDpffITOV0yEzhcfS9Z5iwugVigUhIWFMX36dI4fP46trS3r16/H29ubmjVrcvHiRerXr6/34+/vrzfGoUOHdP9XqVQcPXqUgIAAAPbv30+7du14++23adGiBfXr19cFjhWXli1bEhMTU0iu+vXr61nXD6JRo0YcOXJEb1tkZKTea1tbW9Rq9SPJqqVFixZER0frbQsODubff/8lJyenRK6Rn6ioKFq0aFHi41YGhM7nIXTeMhA6n4fQeeOwKGMoIiKCmTNncuTIEeLi4li3bh23b9/WKfj06dMJDw9n0aJFnDt3jlOnTrF8+XLmzZunN87ixYtZv349Z8+eZeTIkSQmJvLmm28C0KBBA44cOcLWrVs5d+4cH330USFlNJUpU6bw448/Mn36dE6fPs2ZM2dYtWoVkydPNnqMYcOGcfbsWSZMmMC5c+dYvXo1K1asAOQvDpCLbqWmprJjxw7u3LnzSO7Ibt26cfr0ab2nhlGjRpGcnMwrr7zCkSNHOH/+PD/99BMxMTHFvg7IGRBHjx6la9eujzROZUTovNB5S0PovND54mBRxpCLiwt79+7lmWeeoWHDhkyePJm5c+fSvXt3AN566y2+/fZbli9fTlBQEB06dGDFihWFnhhmzZrFrFmzaNasGfv27WPjxo14eHgAsjL26dOHvn37Ehoayt27d3n77bcfSe5u3brx119/8c8//9C6dWsee+wx5s+fT506dYwew9/fnzVr1rBu3TqCg4NZsmQJH374IQB2dnYAtGvXjuHDh9O3b188PT357LPPii1zUFAQLVu2ZPXq1bpt1apVY+fOnaSmptKhQwdatWrFN99888C1ZWP4448/qF27Nu3bt3+kcSojQueFzlsaQueFzheLUgnLrqRoswyOHz9ublFKhBkzZkg+Pj6lNv5ff/0lBQQE6GUNlAahoaHSL7/8UqrXsFSEzpuG0PmKj9B506gsOm9RAdSWzldffUXr1q2pVq0a+/fvZ86cOYwaNarUrtejRw/Onz/PtWvX8PX1LZVr3Llzhz59+tCvX79SGV9QsRE6L7A0hM4XD4UkSVKpjV7JuHz5Mv7+/hw/fvyRUijNxdixY/ntt9+4d+8etWvX5rXXXmPixIlYWwubWGAYofMCS0PovGUijCGBQCAQCAQWjUUFUAsEAoFAIBAURBhDAoFAIBAILBphDAkEAoFAILBohDEkEAgEAoHAohHGkEAgEAgEAotGGEMCgUAgEAgsGmEMCQQCgUAgsGiEMSQQCAQCgcCiEcaQQCAQCAQCi0YYQwKBQCAQCCyaCt2sRKPRcP36dZydnVEoFOYWR1DBkCSJlJQUatasiVJZMZ4LhM4LHgWh8wJLw1idr9DG0PXr10utS67AcoiPj8fHx8fcYhiF0HlBSSB0XmBpPEznK7Qx5OzsDEDr3l9ibeNgZmkEFQ1VTgaR60fp9KgiIHRe8ChUZJ1fV7MuThXAmxX67/t0n1Chb62VCmN1vkJ/YlqXqbWNA9Y2jmaWRlBRqUiud6HzgpKgIuq8k1KJk9LKzNI8HBdnB6xtbMwthqAAD9P58m9mCwQCgUAgEJQiwhgSCAQCgUBg0QhjSCAQCAQCgUUjjCGBQCAQCAQWjTCGBAKBQCAQWDTCGBIIBAKBQGDRCGNIIBAIBAKBRSOMIYFAIBAIBBaNMIYEAoFAIBBYNMIYEggEAoFAYNEIY0ggEAgEAoFFI4whgaAMWbJkCcHBwbi4uODi4kLbtm3ZvHmzucUSCEoFoe+CioIwhgSCMsTHx4dZs2Zx9OhRjhw5QqdOnejZsyenT582t2gCQYkj9F1QUajQXesFgorGc889p/f6008/ZcmSJRw6dIimTZuaSSqBoHQQ+i6oKAhjqJyQlBBDfNQfpN2/jlPVmvgG9sTVq5G5xRKUImq1mt9//520tDTatm1r8JisrCyysrJ0r5OTk8tKPIGgRDFG30HovMA8iGWyckBSQgwnt88g8aaG7IyBJN5Uc3L7DJISYswtmqAUOHXqFFWqVMHOzo7hw4ezfv16mjRpYvDY8PBwXF1ddT++vr5lLK1A8GiYou8gdF5gHoQxVA6Ij/oDaApSJDAbpCNAk9ztgspGo0aNOHHiBBEREYwYMYKBAwcSHR1t8NiJEyeSlJSk+4mPjy9jaQWCR8MUfQeh8wLzUKxlspycHG7evEl6ejqenp64u7uXtFwWRdr96yANBGxyt9iA9DRp938wp1iCUsLW1pb69esD0KpVKyIjI1m4cCHLli0rdKydnR12dnZlLaJAUGKYou8gdF5gHow2hlJSUvj5559ZtWoVhw8fJjs7G0mSUCgU+Pj40LVrV4YOHUrr1q1LU95KiVPVmmRnbgFpBrJBlAOKLThVrQmUj3ii8iBDZUWj0ejFSAgElRmh74LyiFHLZPPmzcPPz4/ly5fTpUsXNmzYwIkTJzh37hwHDx5k6tSpqFQqunbtytNPP8358+dLW+5KhW9gTyAaFCHAhNzf0dQO6lUu4onKgwyVhYkTJ7J3714uX77MqVOnmDhxIrt372bAgAHmFk0gKHGEvgsqCkZ5hiIjI9m7d2+RqZBt2rThzTffZOnSpSxfvpx///2XBg0alKiglRlXr0YEd5mc63n5AaeqNakd9BEung2J2vkZefFENrL3SBEixxMF9iwTb41+TJO+DK6d/lfi1yuPxMXFceXKFd3ScNOmTYvlyk9ISOD111/nxo0buLq6EhwczNatW3nqqadKQWqBoPiUhM4LfRdUFIwyhn799VejBtNmCwhMx9WrkUHDoqh4opR733Fy+wxkI2Ug2ZlbSLw5g+Auk0vcILLUmKbLly+zZMkSVq1axdWrV5EkSbfP1taW9u3bM3ToUF544QWUSuNyEb777rvSElcgeGRKWueFvgsqCiKbrJzjVLUmKLYAOblbcnJfqymrDLSiZNDGNFVG3n33XZo1a8alS5eYMWMG0dHRJCUlkZ2dzc2bN9m0aROPP/44U6ZMITg4mMjISHOLLBA8EkLnBZaMydlkmZmZfPHFF+zatYuEhAQ0Go3e/mPHjpWYcAI5nijxprwshfR0rlESDZIzSN0oC29NUTLUDvqoxK9VXnBycuLixYtUq1at0D4vLy86depEp06dmDp1Klu2bCE+Pl4kDwgqNELnBZaMycbQ4MGD+eeff3jxxRdp06YNCoWiNOQS5FJUPFHcqQ0k3iw6A62kZagX8jpXTq5DlbUIa1t7/IIH4uLZsMSvVV4IDw83+tinn366FCURCMoGofMCS8ZkY+ivv/5i06ZNhIWFlYY8AgPkjydKSogh7tQGUu7FgXQfCABeKFVvTVJCDLFHfgSaAt1QZW/hwpEfcKzqK9LrBQKBQFDhMTlmqFatWjg7O5eGLIKHkD/FXZU1GGgCXMLa9hvcqlvR7KmPSsVbY+kVsu/evcvIkSNp0qQJHh4euLu76/0IBJUNofMCS8Nkz9DcuXOZMGECS5cupU6dOqUhk6AICqW4I8fxOFezIrAUU9wtNZtMy2uvvcaFCxcYPHgw3t7eYmlYUOkROi+wNEw2hkJCQsjMzKRu3bo4OjpiY2Ojt//evXslJpxAH3MZJQ+rkF3Z+ffff9m3bx/NmjUztygCQZkgdF5gaZhsDPXr149r164xc+ZM8cRQRmhbYeRkpQDLgGeADsAeYBk5WTlE7fzskYsuFtVywxKzyfLTuHFjMjIyzC2GwASun9uWG/CfibWdPXWC+1CzYeFCf6LNjGGEzguMwdD8AXkVI+XuZVBIgBXO7rXL/dwy2Rg6cOAABw8eFE8MZYQ2TkheHhsFbAI6AZ2BHUBTJE13Em8+WtFF/esULuJYVIVsS+Crr77igw8+YMqUKQQGBhbyhrq4uJhJMoEhrp/bRmzkciAQ6I4qa1Pua/QMoofpvCUjdF7wMAzPn09AAvAHkpHjWruTeGNzuZ9bJhtDJf3EcO3aNSZMmMDmzZtJT0+nfv36LF++nJCQkBK7RkXGYJwQLYFd8naOYqhFRn6L3dbRFQUSWenJRT79PqzlRlEVsi2BqlWrkpycTKdOnfS2axsVq9VqM0kmMMSVk+uQDaHcuZE7Z66cXKdnDIk2M0UjdF7wMAzOH1oC8UADoApwmLw5WL7nlsnG0KxZs3jvvff49NNPCQoKeqQnhsTERMLCwnjyySfZvHkznp6enD9/Hjc3N1PFqrQYjBPiGeAC0B1D8UOFLPaMTUA08ArZmVEGLXRLD5J+EAMGDMDGxoaVK1eKpeEKgCork0Jzg2dQZS3SO07ofNEInRc8jKLvTd8CZ4CXqUhzy2RjSFtsq3Pnznrbi/PEMHv2bHx9fVm+fLlum7+/f5HHZ2VlkZWVpXudnJxs9LUqKkUFL1vb2qPK2oxsceduZzNOVWvKFrvkB9QEViPXIsoE7stp8Qaefi09SPpBREVFcfz4cRo1Kp/u3cqIsbE8ho6ztrNHlbUJ/bmxCVDpxdYJnS8aofOWizFz7/q5bWRnJiLPq4LzTI18z9mqv6+czy2T6wzt2rWLXbt2sXPnTr0f7TZT2LhxIyEhIbz00kt4eXnRokULvvnmmyKPDw8Px9XVVffj6+trqvgVDjkgLVoOXmZC7u9ovPzDgChkt+SE3N9RuPu0kAPXuATcQLbOr+e+PkaehX7dqOvUDupVBu+yfBMSEkJ8fLy5xbAY8tfTys4YSOJNNSe3zyApIcao4+S5cRr9uREN9NUbS+h80Qidt0yMmXu6mDzJB9kDpJ1nrUBxBkgFzqF/f2pFeZ9bCil/W+Iyxt7eHoBx48bx0ksvERkZyejRo1m6dCkDBw4sdLwhz5Cvry9tX/4OaxvHMpO7pCju02/toF5yO44b8ciKp0G2a6vgVsOXlHuXUGX5khczkYOslLeBeFCE4Fa9cG0iQ9epzEHSqpx0Dq4eTFJS0gOXd3///XemTZvG+++/b3BpODg4uLRF1ZGcnIyrq2uF1XljiNr5GYk3Nfni5HIM6uyDjnP3aZGbTZYO2AN/Ak8UGkvovGHKo85v9amPk9KqzK5bXNod+5AOo20efmA5xJi5d3DNiHz3l8PAp8AeUKho9tSHSJJkMJvMXHPLWJ03eZls+fLlVKlShZdeeklv+++//056erpBI6YoNBoNISEhzJw5E4AWLVoQFRVVpDFkZ2eHnZ2dqSKXS0zJZDEUvJx8+wKyISRnzMjuydMk385CYWWLoZgJWJbv6bdwWrwlB0k/iL59+wLw5ptv6rYpFAoRTPqIFPUw8KBYnvwp86ACngR6IXt+qoN0l8Qbt0m8eZqajZ7izpUjZGd0BmYDA4EmIAWSdn8HIHS+KITOWyby3OuM/OBwSt4oaUi8kcPBNSMAK1RZqUAQ8vwMQ773TABpHrFHfyE7PQmnqjWp06w3964e161CGOt3MVe5C5OXycLDw/Hw8Ci03cvLS2fUGEuNGjVo0qSJ3raAgADi4uJMFavC8agtLtTqLPIyZmYjL4E1Ra3Owtm9NrAZ2SMEcj2ipUAGcJZajZ6u1E+/Jc2lS5cK/Vy8eFH3W2A6D3LHO1WtmVvLSqu/cryBlY0NsZHLc59K3wUaA9uRXfLtkJ9S3YExIDXk+tlNaDRZwCrkpeKXgWvAKuwcRWr4gxA6b5nI8+JX5LlUDxgJVAfUqLJ8c9tANUaeU3tyz8oBfgfUpN61k+fzDRWxkctJvJHywKXughi7RF4amOwZiouLMxjkXKdOHZONmLCwMGJi9N/kuXPnLKLNxyNnskgKDHp/pPO4+7Qg8cZy5KWxYGTFbSofL23i2tm/sXf2MliETlAYS9DHsuZBae1FFfnMzrDHUMo8+AL3kXX9sN4+VdYZ+ToFtkN2Wb7dCofQectEQgE4A3XJmzOnkNPkC86754HhyJ6hK8jzr2AJGF/kh33jylaYs9yFyZ4hLy8vTp48WWj7f//9R7Vq1Uwaa+zYsRw6dIiZM2dy4cIFVq5cyddff83IkSNNFavCUfjpV1tNOoWonZ8VClg7uGYE//4yiAOrh3Dkz/eRI/Y3off0zO+gkLh4dBXgiKzAv5NXjyjPgxR75Gci1o0pdC1BYcLDw/n+++8Lbf/++++ZPXu2GSSq+MgPA90o/DBwXVfk0626FbYOP+iaEKtzVBhe/j2DvEzW1cA+K4PnZKUnlfp7rMgInbdMstOTkH0k+efSGQzPu0xgIXIsajWgm4FjzuS9NpC4U5AHfS+UNiYbQ/369ePdd99l165dqNVq1Go1O3fuZPTo0bzyyismjdW6dWvWr1/Pr7/+SmBgIJ988gkLFixgwIABpopV4dDPZBmAXFW6NpJmlJ5rUBu5r10aUOf4kZF8DaiPfsZMI+AySI2RNKOQLfujFHUzQLIuczdkRWXZsmU0bty40PamTZuydOlSM0hU8SlqKayo1FtJkrC2s6fwA8Am5DTeJsipvAX3qU26jpakhBiidn5msQ8MQuctE3leqNGfSwEUnnebQQHy/aUl8v3I0PwLyHv9gHmnnW85WUmFr1VGKfkmG0OffPIJoaGhdO7cGQcHBxwcHOjatSudOnUyOWYI4Nlnn+XUqVNkZmZy5swZhgwZYvIYFZH8T78K5Tr0vDf54of0q+nmeXYgEXgJ+Yl4AbKbMhA4knvcUfIU0dANpEOhawkMc/PmTWrUqFFou6enJzdu3DCDRBWfB6W1FxU34OodQOGU+dNADFAV2Z2vv8+jTmiR1ykKc8YtlBeEzlsm8rxMRZ5LrZDn0ln0510rIAokX2AUcsXpCPTnX6vcc87xsHmXf75Jmj7I97SWDz2vpDHZGLK1teW3334jJiaGX375hXXr1hEbG8v333+Pra1tachYaXH1akRgp/9hY+eK4WrS14uspivHSGiXwMYAdgaOexrZfC94AzkDTC50LYFhfH192b9/f6Ht+/fvp2bN8ltErDxT1FKYi2fDIpMLkm6dQS4kmgAsyv1dDWu7e9g67MDW0QMUMcACUJyjVkAPAh5/p8jrFMWjJjdUBoTOWyauXo0IfuojnKvVQ6E8j0LxBVY2t7GyroK13VWs7b7D2i4eObj6DHkP6E0AV+TQjC+xtounXutBuNVweei8059vvwA7gXgUyi+Nmq8lhckB1FoaNGhAgwYNSlIWi+VBlXA1mqwiqumq0Q8mXYHhaqA5OLjUIjvjMuqcRaDQgOSde9wZZO/RuXJdGdTcDBkyhDFjxpCTk6Pr1bRjxw7+97//8d5775lZuopLUWntRSUXyO00hiJ/AWuZgFL5A6F9Fph8naIQbTqEzlsyrl6NaP70x0Xuj1g3BuhD4Qf01cBBtHOyZsOnjErSKTzfOgDDsLH7oVAtvNLEKGNo1qxZjB49GgcHh4ceGxERwZ07d+jRo8cjC2cpFJU9UzvoI1ITr+R23G6JrHByPSHZqZffE6QizwOUd5yVdRVCnpuju1ZeR+9qyKnGm4CLVPPJqyci0Of999/n7t27vP3222Rny1lI9vb2TJgwgYkTJ5pZusqHU9WaZGcUbjUjt9n4GdnwTwZcADUajZqIdWP0apI8Sq0S0aZD6LxAH+18SrkXhyo7BXlVouCDdwZyIpBc0iIpIeahcy4pISa3/EWBB3kzzDejlsmio6OpXbs2b7/9Nps3b+b27du6fSqVipMnT/LVV1/Rrl07+vbti7Ozc6kJXBkxtGRQL+R14k5tID7qbxxcamFlcxlYhJXNZRxdaiEvf+WPBbIG/JFTGVfn/vZHrcrQCwC9d/U4csGs/DFIgdy9eqwM33HFQqFQMHv2bG7fvs2hQ4f477//uHfvHlOmTDG3aJUSd58WGGo1Y+vohtxixhu5zpATcA9Vlq9ebM/1c9seKeZHtOkQOi/IQxfTc0Mt1xmSGgKXkZN28sfuWSEnAkWjyqr20DmnHVeV5UGhth5mmG9GGUM//vgj27dvJycnh/79+1O9enVsbW1xdnbGzs6OFi1a8P333/P6669z9uxZnnjiidKWu9KhjR8K7bMA38CexB75UfdlnpHijlqVSfBTH9Du5W9o9dwc6rV+Hf1YoBTgRWQD6VLu7xcBJ72bgRwb9DT6Ls7uImbICKpUqULr1q0JDAwsdiX08PBwWrdujbOzM15eXvTq1atQrS1LRzbY66Fv2NclPTEO/WSCxvqvc2N75KSD4sf8PCieydJ4VJ0X+l7x0cX06JJztEk8aeTNz3pAs9ztocjGzYPnXN640cgeJV+08UbmmG9Gxww1a9aMb775hmXLlnHy5EmuXLlCRkYGHh4eNG/e3GBVakHxMKbwVM2GT3H/1hnuxh1GjthXAT+j77rcAoSBtEF3vlgCMI7hw4czefJkfHx8Hnrsb7/9hkqlMqokxJ49exg5ciStW7dGpVIxadIkunbtSnR0NE5OTiUhernGmOUr2TAfSMHYIDlrMoi89hu3gZ4Uji1aQKGaJ48Y82PGFo5lRmnovKXre0Ug/xIYqJE0GkCDWpWDQmGFJKlBegHDMUKXcrdNyH39cu5vw3Mu//zPyUoCKX/skQKwR52Tbpb5ZnIAtVKppHnz5jRv3rwUxBGAcQGc189t427cIQr2JgM/4FXkdhxngSV65zd+fESR8UmCPDw9PWnatClhYWE899xzhISEULNmTezt7UlMTCQ6Opp9+/axatUqatasyddff23UuFu2bNF7vWLFCry8vDh69Gil96ga24/PysYWMgwnA8jV1IORv3Q3574eihx0qT3OhkeJQTClb2BlojR03pL1vSKg03WpCTAYed5E5+5tiiRp7y2G5lm+GkJszX29FTmzrPCcKziv8sZtC4yTt/MWkmYTJ7eX/XwrdjaZoPSQA0jXINdtOIMcGxFDdoaSg2tGUCe4T+5SgD/gg2yJByC7LeOQn6AB5iP3bJIDUJ2q1tQtAcjW+Q+5nbotcwngQXzyySeMGjWKb7/9lq+++oro6Gi9/c7OznTp0oWvv/6ap59+utjXSUqSKyG7u7sb3J+VlUVWVpbudXJycrGvZW6MLbWfnZGI3EOsNbKHZyvyF7QS+Yu2YGuN/G0BYpDT7t9Fl0xgosFvzpYA5qQsdP5h+g6VS+fLO3lLVfnbaHgCtXlw+43TQDqyR2gT8n0qBTmWKNBgQ/BC80o37nvI96/887rs55swhsohSht74ASyEaTN+NIAL6PKOpmbDWaNrHwuucdsRTaEFMh1hzYh3xAOACeB07j7DAJEp25j8fb25sMPP+TDDz8kMTGRuLg43dJwvXr1UCgUjzS+RqNhzJgxhIWFERgYaPCY8PBwpk+f/kjXKS8Ym7Iut914BbmW1mpkAygQOYOlcM0thfJLJM0ioBawG/kBIAh5qW0RbtUDTDL4LTm1vjR13hh9h8ql8+Udg7peRNcChfJLrGy+A9RI6ioorO4hab5GoQQkF+ydrVFQj6z07QYfsg1f6xnkMI8CcaxmmG/CGCoFjE3rvX5uG1dOrkOVlYm1nT11gvtQs+FT3I0/SuGGlK2Rbw7HkK3paAw3oLyAHGsxAzkq/w/gCSCDe1ePi+asxcTNzQ03N7cSHXPkyJFERUWxb9++Io+ZOHEi48aN071OTk7G19e3ROUoK2SP51ryPJ76Na5ij/7M9ZhtgISst0G5Z0rAgdzy/4Xj3ap6y3Mr8aYapNa557QBhRNu1QNMrlUi4upkSlrnjdF3qFw6X97R1/XDwKfIZSsKL1Nb2djS5Il3jF660rbY0N4HbR1dDc4ra1tHVNnmn28mV6AWPBhjS/kX7DmmyvIhNnI5189tk7/7C1nm3ZANoAc3oJQ7Dmtfd0d2eW4CXhAZY+WIUaNG8ddff7Fr164HBqza2dnh4uKi91NRkVPmLwJXkb2Z8cg1rlrKhtDZTSA1AsYiLwEfRvbyxAOX8agtu94NpbyXZDq8SK0veYzVd6hcOl/e0ek6jZHjgbRzM19LjNyuBcaky2sxdB9MvRsLnC40r+oE57bgMPN8E56hEsbYeAP9nmN5np0rJ9fJT8BSQcs8X2Aam5AtJkNBpiG5V9A/R6ncStPWQbw3vXOpvv+KRHpKCgdXl+01JUninXfeYf369ezevRt/f/+yFcCM5NW4OkKezrfi7tVjJN48zYO9oa1QZ2c8MN6tpGLhRFxdyWHJ+l4R0Op69N4FqLKakDf/hiJnai5B9pm0QU5/Ny6Wp6j7oLN7Bta2VoXmlVPV2mafbyYbQ2lpacyaNYsdO3aQkJCARqPR23/x4sUSE64iYmy8gSo7A0OeHVXWQqrVbpWbKZa/mnQ08s1CLnClUCqRNIUrTusHtcnnWFmFoCCaLz6YSLuq+tkdlkyyVUaZX3PkyJGsXLmSP/74A2dnZ27evAmAq6urURXeKzJ5KfMFa1z98ABv6GrdcYk3FpB48xRIEmBFdsYd1JpsrJS2pN2/jpWNLdkZ99CoVKTcyyQ18Uqxv1BFXF3JYMn6Xt7RLUtLIP+jnX/7gc+QVx9UwHPIsac2IHmTeGMX//7yBigkrKzscHStjoSC7PQknKrWxN2nBfdvxYA0ioL3wZS7i3CrEUDjx0foLbeVh/lmsjH01ltvsWfPHl577TVq1KjxyEGklQ1j4g3WfdueGqtBlWNgXdYaNDmZQF3yis45IVvnvyN3566BJN1BrsSrbVrpArhjbX2ZqlXn4+ioJj5eAn7HzdWejycOoF0b0UvO3CxZsgSAjh076m1fvnw5b7zxRtkLVIbYOrqSbSBlXo4luGeEN9QKpBzyl5NIvnUaqAEMzE3Hvwa8ki/RABEnZ0YsWd/LM7plad1cWoo8x54BnkKb5p5XvqIN8CWwPe8caRNq1WlS7p5Hm7CQnbGWxBvLke9HBVvqbAJqkXhTXS5LVZhsDG3evJm///6bsLCw0pCnwvOgPmP5cXZSkni/sGfH0alKgaJz+4EnkW8KTyPfIKJAUgC3kJXwDd12OzsFU6bkMG4c+PtDmzbZHDmiYvSknwhq4ktYqHD1mxNLKN5XFAokZG+lfsq8gnrYOriRnZ5/X35vaGvyap8UXlqWHwi0SQP6iQa3L2xk7q+zyuT9VQTKemnYkvW9PCMnKuSfS88gt9J4nsJp7i2RY4new/D8O5dv2ynAEfgC2ajKP59jgN1ykkM5LFVhsjHk5ub2wBoRlo6x8QaPhdQn8sQpsrOjSU09R5UqKmxtJeo0bc3Fc4n5vEszMZw1ZjibLCsrmg8+ADs7eOcdaN4cBg/WMHQodHtxFh3CApg0tqcwikzg1q1bjB8/Xrc0XPALXq1Wm0myikVWejKGUuaz0rcjB8pp9y1CbiQcivww8ODUevl47ev8S2vPkJa0kOfF0rAOY5eGhc5XcgotS3dAnn9rKNyuKf8cMzT/zufbdgY5ALsDsAv5/vUtcg283cgJEZTLUhUmG0OffPIJU6ZM4YcffsDR0bE0ZKrwPGz9c/81W54YMpNtA3pRxw+eeSabI0eUXLmi5KWR41j5zZF83qWryO7KggoYgyHF1GhieP55DYcPw/vvw7x5EBQEoaHw998qzl8+Tadep9m5YZIwiIzkjTfeIC4ujo8++kgsDT8Ccmr9KfKeInOAVrol5OzMqNw+Yr2Ql7v25DuuNaDGcNKANtuo8NKaW9Xi9ZCzdITOVy4KlntBIRlYlj6NHIZRuHq7W/UAUu5dQpVlaP5Juf+3QfYqaY8JAzYgP7zHI89hKK+lKowyhlq0aKE3GS5cuIC3tzd+fn7Y2NjoHXvsWNl3Pw/uUg9bxyplft3icGLzeeZO3QFAkyc/4mr0ejb+HYdX3cbs+qML7drc41r2szRo60PkhuXEnTRc8wHUKBSbkCT97S1bqhk2DAYPhuHD4ZdfYMYMiIyEJk1gxgwNI0YomTn/D/5e9b55/ggVjH379vHvv/+KFjSPiLtPi9x4Av2lYXefQThVrU3izRkolCFImkDkvnoFEwh8yGtOnD9poAYFkwa0iQafTBpYlm+x0iB0vvJgqL0MkorCc+kMchxq3naFYgsoztB5yFJux51jz4rPKDz/pHzbziGXz2gFdM8NEzkjH1LOW0AZZQz16tWrlMV4NGY/FYOLc/nPTLjr1Ik+m8/rXrt6NcLV6wMAmndvQLs2clzEwqej4WlbeHcYrv6RpKQUvgHY2UlkZRXe/vjjsjvb2hpat4a//4ZhwyAuDsaMkbeHhGg4uP9qmb3vio6vr6+IfSgBCnejDyB/MdDgLpPxYC9no7bjU6MuCkUW8ddXUNXFjqvXHUhOuQm6uCO5ObG9vUQVx1Rs7eTjrl23JzVtDW5V7fhk0kCGvdHJbO+3IiN0vvJQVJq7rcNNsjPOgaRt9K3AxeUmTz8tcfFiNMePx+Dm6sCGnz+gXRsboCkj/Gfx3cxwVFkLc7PJquDoWh3IJitdDgup5vMmd68eyxcmMgVJksyeOv8wjDKGpk6dWtpyVCr2R5xj5vw/iI65SpNGPkXG6OR3XV45VZ+X3Z4sdJyDnS22ttmoVNGkpMTg4KAmJ0dCrYaaNSVq1ozm8uUY/PzUXLsmceAA9OwJKhVEREBaGqjV8PnnEBgob//3XwU5WTn4txj9QPkEMgsWLOCDDz5g2bJl+Pn5mVucCktR3ejzxw4olRpsrHNw8fSh85vvEdAqVLdvaIdgghrfJCUFLl6UqFsXnJ3hwjkrLh3/XO9a2jk4a+Ef1PB2AxTcuHVP6LuRCJ2vPBRV7iUnYyHuNQPwadKbC4cW8nyPRIYNg1On4MoVDY6OGtIzJD2juPuAQUSfq603vvY+lpUu951zrOpLjYZdCslRnoKlDWFyzFDdunWJjIykWrVqetvv379Py5YtLb7O0P6Ic3TqNRM/P2gbpuHIkSRdjE7jTnlPqQVdl/FRW3jy+Vns2viB3hd1jkrN/STw89MQEqJh507w84OEBHjiCRg2TIPctwyWLYP16+Xfhw7BlSvQsiUcOwYLFsBjj8mG0PXrEvXqpRESIunJJ24Qebi5uektDaelpVGvXj0cHR0LLQ3fu3evrMWrkDyo7ERSQgxROz6mXj0FbcPURBzey85+ewnsPEWXfpuRVYUdOxQolU3RaLpz794mNJrThLTx07tO/jlYv6GGnTsTc+cjQt8fgND5kkGK2E/z7kPMLYaOc4e8uRVbONTCzS2HqvanOL0rCi//AA5HJhEaquH99+V7TI8ecDhCRadeM4ucL4aW4Mpj2rwxmGwMXb582WAmQVZWFleviqWXmfP/wM8PvvpKg7W1nMn1xhsKXhi4ADvHb0lTe+PTpHch16WkmYGKljz1whyefLyh7unVxtqKGjXA0xP27JGV9Ouv4eWXZc/P4MHy0pfWE5STA2vXynXp3nkHeveGEydg8mT4809rnBzsqVcvjSVLJJ18IoaoMAsWLDC3CJWOB5WdiD+9LnfeqHV6OXSYkqvR63VLyZKkBALRaOQAbI1GzqCUJCu96+Sfg5MnQ716sGQJQt8fgtD5kiH1jximLKxubjF0HFxkxa3YUyiVLdFonkGplB8iqleXWLgQRoxQUtXNhv+OK5g8GWrXzj9fYMQIipwvxnZcqAgYbQxt3LhR9/+tW7fi6uqqe61Wq9mxY4cotQ5Ex1ylbZhsCAGcOQM3bkj4+aXSpk0qEYcTiNpxCqWNG0iDKdQZWDFfL+NLaaXI9QDBiy/KWWHW1qBQyJ6f4cPl2KDISPm1QgEvvACHD8NXX0H9+nJ6/XPPwcH9ct+ykBBJJ5+IITLMwIHlO/D2nYkdcXR2fviB5YrOnDnaitWLF3I6Uj92IDM5jjY9NHp6GdpGw8a/43RnZ6cnUbiC9TNcu/ar3lXyz8ErV6BjR4S+G0F51/mKRLW0neYWQcftG5fp3FkiJSWaixdjqFtXjYuLRFRU/vlwmZ0bJvL0y7No00ZVaL7sP3Cbu06d2LElTm9sYzsuVASMNoa0QdQKhaLQpLGxscHPz4+5c+eWqHAVkSaNfDhyJInBg+Uv459/lr05y5blPZkOG67keoKEOmcLkibPdalUbiIoSM2nn+Y9vbYI8iMyMorBgyXq14e9e+HiRcjMlGsJ2drC7t2yNZ+eDnfvYjCb7MgRJU0ayQ0S88unUunvExTGysqKGzdu4OXlpbf97t27eHl5maXmyjNVt5erpIEly3cwNXw9ifezcKtqx/SJvRkxqHAfvGpVzrHX9hbX3BWo0OjiEexdahNxOE8vT5yAP/5QkpGRRsS6YThXq4ZGlYShzMp7t2/iUGsgjg721PR24/adFA7nek3r1JEfFPJ7UIW+P5zyqPOCh2MoXrVJIx/Oxiby9dcarK01nDgBH34oHz9hAsTGQmpqMp16zSBHZcXq1VZs2iShUFjRoIGaa9clshTe9Hnr30LXM6bjQkXBaGNI24PM39+fyMhIPDw8Sk2oisyksT3p1Os0I0YoCQnR8N9/8lJVfku7TWsNO3fB3YwzoGiNWt0NhWITCsVpXntN0nt6/XnpyNzxFFhZabhxA+zt4fnn5bigs2ehUye4fBlu3YIqVfKuo80mGzFCyZUrsGJRLyRJ0pNPrm8k7xMYpqismqysLGxtbctYmvLHkuU7GPn+j2jL9N+5tyn3NXoGUf5Ynk7tNTovaWDnKfg06U3UjlOMGKGkTh0NO3YokIuKdic7YxN3r56mUSOJmJjCGZTVPCQ6dICIiAyiz2XQsCGcOwdDh0LdurKXdOhQ2asq9N04hM5XPIqKV/Wv483ly3nzQRt3Ghoqh1bcuwfu7mru3VOAVAeNdIXkZHnuHTkiz7H6rUMMXtPYjgsVAZNjhi5dulQaclQawkIbsnPDJGbO/4OD+6/i6pxDZGQagwdLek+mrYJqMWlsT2bM/ZPd+79EqcwkPFwqlPH16vDFtGrmD0gcOXGxgJdJVvDdu+WsGjc3OT4C5DEOR4BaZU0DvwBWLOql602WX74mjXz09gnyWLRIrrqqUCj49ttvqVIlr5aVWq1m7969NG7c2FzilRumhq/HUJn+qeHr9YwhQ/F02rigph0/ILDzFLyUu9m57V/kwol54ymVLbl5M5patTTUqCFnUObkqHFykvjhB/35kJAgFxudPBkSEqwJbVUbFAoO7r8n9P0hCJ2vuBiaXyNGKLlw8YYu7nT/frlNU8F7yL17ShSKpkiSD3IR0/ydDVpx9+oxwxliRnZcqAiYbAxpJ0tBFAoF9vb21K9fnyeeeAIrKyuDx1kCYaENdcFmWmt9xAhFIU9MuzYN2Lz6Pd0xX34pe2sMZXxduQJKRV7MEOTGVYTKX/5ubrJ3SKGQFf3IESXxV2HXHxMLffHnl09QNPPnzwfkp+SlS5fq6bStrS1+fn4sXbrUXOKVGxLvZ2GoGnri/YV6xxmKp8tIh8RbZ4na+RnuPi3AHTQaZaHxNJpnSEmJISREQ2qq7O5PTYWQkMLzYd06/Ti5A1umle4foBIhdL7iUHBJ7Pipy3R8Uj/uztVVg1KJLu70hRfkEIvXXpOXkAcMkOfMb79ZIUndket/vYz+XO5eKAaoYEVr38CeFS57rCAmG0Pz58/n9u3bpKen4+bmBkBiYiKOjo5UqVKFhIQE6taty65du/D19S1xgSsaBT1Fhp5MCx6Tk5VD3bp5GV+hoXJWTFYWbNwoK2/z5nkZZLa2cobZ0KFw/boVB/e7iCfgEkDrBX3yySdZt26dTt8F+rhVtePOPQOpuwVaYeSPpztzBsaOVaBWy+74xBubSbyxnCwfBe7ucK/AeErlJhwc1OzcKXs/O3aUdX/nTnj22bz5cOiQHEunnRs1vEUfRVMQOl8xMLQklnBbQ2SkQrcKsW6dXFbFykqeC6GhcP++vETWpo0cSzduHHh5gUKhRpI2IRdC3UrBdhz5Y4AqUzp9fpSmnjBz5kxat27N+fPnuXv3Lnfv3uXcuXOEhoaycOFC4uLiqF69OmPHjjVp3FmzZqFQKBgzZoypIpV7tJ6YS8cX8veq9w0aKPmPUVopaN1aVuhTp+QeY97ecjaZt7eswDNmyAHS2gwy7VOxJGkeeB2B6ezatUvcFB7A9Im9ySvhP4G8Vhh99I6bNLYnV67IMWwzZypRq7VLa7NzfzelenWoXl0DnEapzBtPozmNtbUGPz857XfYMPkBoE4dORh02bK8+aBUyv+/fBlyO1IKTETofPkm/5LYsGGwZImGGjUUXLokMWKEkmXL5Hni7y+nxl++LM8TbZjFsGGwdCn4+sLNmzBqlDzn4CwQRd5cboUcA9RLd239dPrZuf0Em+Rur7iY7BmaPHkya9eupZ42OAWoX78+n3/+OS+88AIXL17ks88+44UXXjB6zMjISJYtW0ZwcLCp4pRLCrovn+3Wkr+2HitUkTr/cfmr5CbeT9fVEPrlF3Q3gPxrvPv3Q3Cw7PL08cl7ElYoTLZvBQYYN26c0cfOmzevFCUp/2jjguRssoVFtsLI7wHduvM8hpbWLl+Owdpao5cK7Omp5vZtifv3oXt3/WWxxx6T62rt3i0bRv7+csalh4f8//MxiWXzR6gECJ2vOBRccra2hvbtJXbtdKaBnx8H919FoUikTRvo00f2Dn31lewRKrisnJwsJ/mcPi2xY+cVlEpHlNZXge9wdq9dKAaoMqXT58dkY+jGjRuoVKpC21UqFTdv3gSgZs2apKSkGDVeamoqAwYM4JtvvmHGjBmmilPuKOi+jIy8z6j/naJWLQWPP55X8XnBzNcYM+kng1Vy9+yRLflhw+D2bbkSaEEF/vtved/16xAQkPdU7OYqunSXBMePH9d7fezYMVQqFY0ayW7gc+fOYWVlRatWrcwhXrljxKDOOqNoyfIdTJ21hjEf/oCrsxNTJ7xQKM3e2kZBtoEO2Dk5atLTZWO/bl05NigjAxITZQ/omjVyIGifPnkPAE5O8Ouv8uvhw6FFC9lzOmKESKE3BaHz5ZOi0uW1S85//y17e7KyQKlMYevOUygUcuHdtWthyxbZW+roWLhQb2SkXItOpYIrV5S41wyiaccPHihPZUqnz4/JxtCTTz7JsGHD+Pbbb2nRogUgT6IRI0bQKbfdxKlTp4wuwDhy5Eh69OhBly5dHmoMZWVlkZWVpXudnJxsqvilTuGIfomhQ8HTU8qt/yNH+E+fvZY6dSS++koyUCUXXn8drl7N+8IvWGk6PV12bzZuDFFRcp2hzEwIDqj3UBkFD2fXrl26/8+bNw9nZ2d++OEHvTi5QYMG0b59e3OJWC5ZsnwHo/73A/7+0KkLRESkMup/8hNjcBNf3YNC+8c17NiRL01esRmk0zg5SQQEyLFAGRlybNChQ6DRyCUkLl6EL7+EAwfklODLl8HFRb4ZRETIr+WlAZFCbypC58sfRaXLyw/Tp3nlFbm2nEIh632bNnnzoEYNOXA6IiL3QTk3yWbIENmjmn++DB+mIC5eQZMn+zxYICpXOn1+TF5T+e6773B3d6dVq1bY2dlhZ2dHSEgI7u7ufPfddwBUqVLFqAKMq1at4tixY4SHhxt17fDwcFxdXXU/5TFAOzrmKq1aaQp5cuLi8l6HhGi4n5ymqwR95UrhrJgOHWQFf/JJdDUi8sdFuLg4olIpyclR0rGjnBqZkKDkw3G9zPCuKzdz584lPDxcL4bCzc2NGTNmiEKjBZg+e60udVcb1+PnJ28v2CZj/nwJB4do7Oy+wt39BrVqyWnyqanyw8HXX8tjfPONXB8lNTVvvBMnZA9RjRpgZ1OFg/vd8K5Wj9BWdTkf40YDv6bs+uNDETdXTITOlw8MxQbVqQN/bT3Gzg2TuH9f9vjUq1d4zvn45L3295ePqV5dNp42boRr12SP0b59ShrWDWTD1rVGpcRr0+ndqlth6/ADbtWtaPZUxUynz4/JnqHq1auzbds2zp49y7lz5wBo1KiRzpUKsvfoYcTHxzN69Gi2bduGvb29UdeeOHGi3rp2cnJyuTOIClag1npyauc2+j1xAv78E9RqSefxMVQlNyICmjWTa6U8+6z8e+NGufN8ZqaC4IB6TBrbU9QLKgOSk5O5fft2oe23b982ejnYUkhKSaNTl8LLuuvXpxWKc7hyBUCDWp1KakoaXZ+iyBYarVvLcUHa8eLj5YeFgAC4cM6GS8cXFhZGUGyEzpcPDMUG5W8no1DIP4ZKTOzenfe6dWv5/tG2rXwPql8/L9lArXLi71Xvc9cpBL4rXGXaEK5ejSpc77GHUexo28aNG/P888/z/PPP6xlCxnL06FESEhJo2bIl1tbWWFtbs2fPHhYtWoS1tbXBcu92dna4uLjo/ZQ38mfMyJ4cBZcvw7VrCmbMyEtl7Ngxz+NTpYrs/td6f4YOlRX1tdfkMbU1U6ystB4gBR+O62VUlprg0enduzeDBg1i3bp1XL16latXr7J27VoGDx5Mnz4PdytbEq7OTkREyAY95Bn2rs5OuQ8KSlQq+OMPWLhQ9uy8+CJYW0scOiQfr304yD9GZKS8XTuejQ3cuSMvp8nJB4KSROh8+SD/nIG8or01vN3o1GsmVlZyyIShOad9AFep5CrsCoU8X+7dk71DHTvKZShu30lhf8Q5s7y/8oTJniG1Ws2KFSvYsWMHCQkJujYdWnbuNK5BXefOnTl16pTetkGDBtG4cWMmTJhQYYs2Gqor9N7wlvy59Rh79p+hRg0Vnp5w+rQc76Nto6H9/59/WqNSSXh7q9EWetUqd0YGNPBrKjxAZczSpUsZP348/fv3JycnBwBra2sGDx7MnDlzzCxd+WLqhBcY9b8fdO0vtHEJX815kcAAH10rmGvXNHqVcEND5QeF/C00tLENhw7JDwf+/vL+y5dhzBh45hn5NQqFed90JUTofPmgYHsnbdFeDzcFfn7yQ0FiouwpLTjnMjPzYuni4+Hzz2HBAvl+kz8+dfhwBTPn/8GPnYab+d2aF5ONodGjR7NixQp69OhBYGAgimJ+ETk7OxMYGKi3zcnJiWrVqhXaXtEwVOF5+KDO1Ggykps3U3B0lK3yyEg5A8DODnJyZOt/1x8T+e90XBE3lDcKpSsLSh9HR0e++uor5syZQ2xsLAD16tXDycnJzJKVP7RZY9Nnr2X9+jRcnZ34as6LOr3VPihcvnxKL823eXM5QHr3btnoDwiQK6uvWyenySuV8j4bG9kQev55+bzQUDi4/15Zv81Kj9D58kFRRXsHDPuStmEadu+W7yXt2sH//ieHYNjayqsNmZnakAyYP18OsXjsMTkTWX8JWtItu1kyJhtDq1atYvXq1TzzzDOlIU+lRtJIBmsGxcfre3y0Xh9DNxRDaZZhoRU7cK2i4OTkVGlqYZUm+dPsQc6I6fHKHKJjrlLF0YHrt+TaPwWrqV+8KPfYy58mX69eXpp88n1HXKqm88wzsjdadKAvfYTOlw6mfI8XfLjeH3GOjIwc3VKYNt60WTN56Xjp0rzYUzmTGV3Py8MRcmamSpV3jJhDMiYbQ7a2ttSvX780ZGG3NuKrkqK0UhgsepWZ7lzIk1TwhgJFp1nu3DBJGEQlTJ8+fVixYgUuLi4PjZFYt25dGUlV8civs/nraXV7Wl7+GjcuL2X+8mWoVq3oNPmFM19k9KQfCy0ZiPT5kkHofNnwKN/j2nO9vDRcuSJ7f27d0l9e1q4oREYquHxZIjtbwbJlEkeOKImLl/vOGeqVaemYHED93nvvsXDhQiRJlLnXPvH6txhNj1fmPDQIrUWQH5GRigKBoQqaB/kZNVZRaZYz51fsMujlEVdXV90ScP5yDoZ+BEWTX2cNpcz7+cGuXZCWUoVxb3endfOgItPkhw/qxM4Nk2jg15SD+0X6fEkjdL5seJTvce25K1bI8T++vnJizbVrsG8fuLo44O1Rj4P73WjoH8jizwYSHBComy+7N37I7o0fijlkAJM9Q/v27WPXrl1s3ryZpk2bYmNjo7ffUp4YimPd5wXD5VnlcXHw3vCWRo31sDRLQcmxfPlyg/9/VPbu3cucOXM4evQoN27cYP369fTq1avExi9tinLvF9Va5vadFAKayHWFjh+XK0cX9IzGx0Or5v7U9fMiOkbWZbeqjgaXDgzF4wlKBqHzZcODvseXLN/B9NlrSUqRwyOebB/Irn+jSEpJw8HenoyMLGzt5Pk0YADMni17UrWxQ2vXZoAEPy8dqZs7wwusMABiDhnAZM9Q1apV6d27Nx06dMDDw8NinxiKY91rg+EKWuV/bj1m1FhFpVmK9d7S5fvvv9d1835U0tLSaNasGYsXLy6R8coS7QPAhSunaRuWyPnLp+nUayZLlu/Qba/fMJGIoxe5dTeWtmGJeFdXceyYXE3dw8NwCrCXF5y7FMWo//3AqbNRemOLlF/zIHS+9Cjqe7yKkwOj/vcDVVxS6d1bwsk5ldUbDmFtm8rjj0skJWdQs5aGHj3k2KBx4+SaQZGRcuxQRIQcH3TrbixP9hRzx1QUUgVe70pOTsbV1ZX7l5bh4uxQptf2bzGatmGJDBuWt23ZMji4383kAnDGjqW9GdWpg956r3BzFo/klAyq+g8jKSnpgTWrGjRowMWLF6lVqxYdOnSgQ4cOdOzY8ZFj5xQKxUOfkg21oPH19TWLzvd4ZQ4XrpzWtZpRqfICm13d0nWVpe/ezUsSyB/E+eqr8he4r29elmR8vJzp0rhx3nGzZ+eN3cCvqXiKLUEqss5v9amPk7L8l1wJeroGzjPfLHJ/Ud/jVRztcKuWoSs3kX/uKBSF59WwYXJLpsxM8PaWe1Vq59KwodCoXpBJc+euUyf6vGVc0cWKhConnYOrBz9U54tVdFGlUrF9+3aWLVumq0Z6/fp1UlNTiydtBcQUL03+eKB2T0+n3dPT9GKDjB2rKM+SMIRKl/PnzxMXF0d4eDiOjo58/vnnNGrUCB8fH1599dVSvXZ5akFjqNVMSIiGpJQ03XZDrWW07WiCgmDePLmcxJo18m9tym9RbWu0y2aCskXofOlR1Pd4emamwQSbuDjD86pNG8jOzqtCnX8utQlFzB0TMdkYunLlCkFBQfTs2ZORI0fqSrbPnj2b8ePHl7iA5ZWClaa1GS8Fe4MVXFq4dTeWiKMXqd8wbyng2W4tjRoLEFWnzUStWrUYMGAA8+fPZ+HChbz22mvcunWLVatWlep1J06cSFJSku4nPj6+VK/3IIoy2l2dnXTbDVWPzl8NNyBArqtlZWWFo6OyUGHR/FVzxRKweRE6X3oY+h4vqnp77dqG59WRI0rcq1bB3h4cHNCbS4cjEHPHREw2hkaPHk1ISAiJiYk4OOS56Xv37s2OHTtKVLjyjLFemoKxRV9/ndd0smDTPeHxKZ/8888/TJo0iXbt2lGtWjUmTpyIm5sba9asMdi/qSQpTy1oinoAmDbhRd32gq1lhg2T29HEx+e1mrl8GUa91a3ItjUPeyAQlD5C58ueqRNe0GvKrZ0r8fF5LZuGDKHA3HuBzEwFFy/Ky2ba8+LiRdNuUzE5m+zff//lwIED2Nra6m338/Pj2rVrJSZYRcCYzBZDmQP5m05qswhElkz55emnn8bT05P33nuPTZs2UbVqVXOLZBaKqobbrk0Dgpr46LLJQlu5gULBwf33aNLIh97dffjpt38LFRDt9UxLg21rRONh8yN0vuwxVL29b69Adv4bxb59abi62FPFwY2D+zMKzD1f3p+6kpOn4/jzTwhqUoefFvcXc8dETDaGNBqNwSaqV69exdnZuUSEqkwY6mKfv+mkWAoo/8ybN4+9e/fy2WefsXDhQl0waceOHWnY0LKKXRZltD/MmJ8zvZ9R5xhKAxaUPULni4/Gxo7MHFsoRmrSoAHdGTSgu9HHZ2ZDqxaB7Nw40+A+U8ixVuHlZvPwA8sRkgTJaSqych49D8xkY6hr164sWLCAr7/+GpCzA1JTU5k6dapo0WGAgo32DFXWFdU/yzdjxoxhzJgxAJw6dYo9e/awZcsWRo0ahZeXF1evGh+omJqayoULF3SvL126xIkTJ3B3d6e2NmBGIDAzQueLgUKB1QvPc+25LijvVDW3NCajTkxm5Mt1zC2GaUiQo9YQcSqR7YcTi2N/6jDZGJo7dy7dunWjSZMmZGZm0r9/f86fP4+Hhwe//vrrI4hSOSm4tFDD2x3vahLnYxKp4e2Gh5uCAcO+FH3GyjmSJHH8+HF2797Nrl272LdvHxqNBk9PT5PGOXLkCE8++aTu9bhx4wAYOHAgK1asKEmRBYJHQui8aVi98DwOz3bHq4YHTjWrUcwe5mZDpXRCaZNubjFMRq3KoqOdbMpsO5xY7HFMNoZ8fHz477//WLVqFSdPniQ1NZXBgwczYMAAvYBqQR6GlgNEn7GKw3PPPcf+/ftJTk6mWbNmdOzYkSFDhvDEE0+YHEvRsWPHEmllkzL9JxS2Jk9fgYWTkq0y6rjyqPPlGgcHbDt3xMPNDXd7G5QOtg8/x8xcvhxH3QYtORa5i+bNg1Ap7VFaGacf5QmllS1V3SA0SMXe4/eLvWRWrG9Ta2vrUq81UdnJn2Umd7DXMGKEkpnz/xCB1OWMxo0bM2zYMNq3b19uqqxHb79ZIQrQCcoXaZrC8Z6GKI86X55RVHVFYWODfUVzB1USrKztsLFS4uJkze37OcUawyhjaOPGjUYP+PzzzxdLEEtD9BmrOMyZM8fcIggEZYrQeRNRKlBgmiG0f38E4TPnEh11hiaBAUyc9B5hYaEmX3rN2o18/MkcLsRewtHRgRbNg9iw7iecnJz49rufmLfgKy5disPPz5d3Rg7l7RFydey6DVoC0LK1vIT5RIcnWPb9ejQaDUsWz+X3VT9w795d6tVryLj3p9C+g5zckJ2dzeyZk/lny18kJ92nmocnr/R/g6EjxgKw4ruvWLdmJVfjr+DqWpWOnbsxfsI0nJyqmPzeTCK3+GRxMcoYMrapnkKhMJhpJiiMoSwzkVkmEAgElZ/9+yPo1OV5mkrwslrN1lsJdNq5h53bN5pkEN24cZP+rw5ldvhUevfqQUpKKv/uO4QkSfyy8nemTp/FFwtn06J5EMdPnGLo8LE4OTky8PVXiDjwD6HturJt6zqaNmmE0t6dxGT4ccUyVny7mGkz5tGkaRBrf/+FkcMGsHHzfvz86/HzD1+za/sW5n/xHTVq+nDzxjVuXs8rq6NQKvlwSjg+vnWIj7vMx1Pf5/PZ05j68eel8acsMYwyhjQaTWnLYXEUzDLT9qcRmWUCgUBQuQmfOZemEhxWq7EBZqjVtLayInzmXP76e7XR49y4cQuVSkWf3s9Sp47ctiQoqAkA0z7+jM8/+5g+vZ8FwN+/DtFnYvj6mx8Y+PoreHp6AFDN3Y3q1b1RKZ1JTE5l+bdf8tawd+nxXB8Axk+YxuFD+/hxxVKmTJ/DjetXqeNXl1Yhj6FQKKhVS79dysBBw3X/r+VTm9HjPmTaR+9VDmNIUPI8qICdQCAoPU5mpfNT0l2u5GRTx8aW11yrEWznaG6xBOWEstCP6KgzvJxrCAHYAN3UalZHnTFpnGbNAunc6QmCW7SnW9dOPNWlIy++8Dy2tjbExl7iraFjGDp8nO54lUqFq2vRFb1TU5JJuHWTFq30vVMtWoUScyYKgF4v9GPwwBfo3iWU9k90omOnboS1z8sWPLB/N98sWcDFi+dJTU1BrVKTlZVJRkY6Dg7ld54JY8iMiKrTAkHZcjIrnXdvxdMUeBXYolbxbmY6i7x9hUEkKDP9aBIYwNZbCczINYhygK1WVjQJDDBpHCsrK/7ZspYDBw7zz/ZdfLn4WyZPmcnG9b8A8PXSeYS2aVXonEehaWAztu8+xt49Ozi4fw9j33mTtmEdWLh4BdeuxjHirf68MmAQo9/7ENeqbhw7EsHkD94lJyeH8pxwLowhgcAAycnJRh9rKb2TKgM/Jd2lKRCJ/DQ+AwjJ3T7Hy7KNIaHzZacfEye9R6ede2htZUU3tZqtVlZEK2Dxh6Y3O1coFISFhRIWFsqUye/jV685+w9EULNmdS5eusKA/i8ZPM/WVvZL5Y/zreLsgpd3dY4fjaBNaJhu+/GjEQQ1a6l33DPP9uaZZ3vTrfvzDBn0EvfvJ3I66gSSpGHCpE9QKuXWp1v+3mDyezIHwhgSCAxQtWpVFA9JTZAkSSQNlFOKWuq4kpPNq6C3PPE08HOOib0LKiFC5ylSPxZnZnAyK73EvENhYaHs3L6R8JlzWZ2bTbb4w/G0a9fGpHEiIo6yY9deunbpiJeXJxGHj3L79l0CAhoybcoERo+dhKuLC09360RWVjZHjp4gMfE+48a+jZeXJw4ODmz5Zyc+PjWxdtQAVrw55B2+XDAL39r+BDQJZN2alZw9E8Vn85YBcraYp6c3AU2DUCiVbNn8Bx6e3ri4uFK7Tl1ycnL4+YdveLJzN44djeC3X1eUyN+stBHGkEBggF27dplbBEExedBSRx0bW7aoVcwA3fLEFqCOTfkvklfaCJ3HoH5sBqyRePdWfIkul4WFhZoULG0IFxdn/v33IAsXLSM5OYU6dXz4/LOP6f50FwAcHR34fO5i/vfBNJycHAkKDGD0u3KAs7W1NQvnz+STTz9n6rRZPN7+cZZ9v57XBg4lNSWZz8I/4t7dO9Sr34jFy37Bz7+ePKZTFb775guuXL6IUqkkMLgFy75bhVKppHFAIBM+nMG3Xy9k/uefENK6LWPHf8QH499+tD9WGaCQjCgNWl7dp8nJybi6unL/0jJcnMvxYqSgXJKckkFV/2EkJSVVGLe/Vue3+tQXRReL4P2EeNIz03VLHTnISx1O9o685lqNd2/F0wT5iX8LEA184V2bILvK/x2SplHT7eoFofNFoDWkGwPPAFuR9WM78A6yDs3x8i10nqJWDRynT8bXy5OqVe1Q1vIoVTlLA5XSmdjLqeYWo1ho1NncunmVxauvkJCoX3RRlZPOwdWDH6rzRnmGhPtUIID09HTi4uLIztZfUgkODjaTRAJDPGgpLNjOkUXevvyUdJefc5fQvnD1sAhDqDhYms5r9eO9W/F8CzwGfAW0QyynVnaMMoaE+1Rgydy+fZtBgwaxefNmg/vFA0D5ouBSx5fAAgC1iuevnmeQq4fBp/v8cUZuVlYoUHBPraKOjS11bGz5Jy2ZdI0GR6WSQa4e9HZ2M1qmipbObwk6X/AzcVQo2ZeRigSoAQ0g5f7sAZYBKrWa9xPiy/3nJzAdo4yhDh06lLYcj0RE+zliyUBgMsb2aRozZgz3798nIiKCjh07sn79em7dusWMGTOYO3duKUspMJXXXKvxbmY6IYA38hJHINAd2KTRMC8xAUDPmMkfZ9QRWKVW6WKONqtV/JaZjj/w1gPGKIqKmM5f2XW+4Gfys1rFDfLpCXAGOAs8kXtOU6A7Elsy08v95ycwnWIHUFua+1RguezcuZM//viDkJAQlEolderU4amnnsLFxYXw8HB69OhhbhEF+ci/FLYrM51A4Ch5qdItgeVJd/QMmfwp1b2AYOBwgXN8gdkPGKMoKmI6f2XX+YKfyQoopCetgZrAXaBOgX3l/fMTmI7JxpAluE8FgvykpaXh5eUFgJubG7dv36Zhw4YEBQVx7NgxM0tX/ijNJSFjxw62c2SOlyNd4mLojn78UDBwRqOhS1yMbskrf5xRNPBygXOeAVYXeL3IyDZFFTGdv7Lr/JWcbJoiG7iJgApIRzaEJwFhQDfkz9waCulQef/8BKajNPWE/O5TBwcHtmzZwg8//ECDBg1M6m4vEFQUGjVqRExMDADNmjVj2bJlXLt2jaVLl1KjRg0zS1e+0C4/pGem86paRVqm/PpkVrpZxrZVKNmEnFEGcuzHKqAJ8C5QM3fJy06hZEvucU2Qs4i05+QgL5s4FXjtqDTu67OOja1ubO355T2dv7LrvJ1CwXbkz1SNvAT2NnAVeBJZT7YCAciGUn4dqgifn8B0TPYMlaT7NDw8nHXr1nH27FkcHBxo164ds2fPplGjRqaKJRCUGqNHj+bGjRsATJ06laeffppffvkFW1tbVqxYYV7hyhmluSRUnLGrWVlxWqWhJbI3Zynyja/gslmcOof43PECkW922nO06dVKYALyjfE08J6rp1Fy549h0kvndy2/6deVXefvqtUEAj6AC/pLoq2A54E0ICX391ny9KEifH4C0zHZM2TIfQoUy326Z88eRo4cyaFDh9i2bRs5OTl07dqVtLQ0U8USCEqNV199lTfeeAOAVq1aceXKFSIjI4mPj6dv377mFa6ccSUnm24UXlK4UgJLCsUZO0uS6ALcBhYBmRRe8ngGyJYkFnn74mTvyBqgOuCMvExSC+iH/GW5CLiuVPKemze9nKsaJbc2hsnJ3pGfraxxsncs93WNKrvOZ0sauiMHSXdFXx+6I+uJPXAL2AvsBjKQP/+K8PkJTMdkz5DWfern56dzn/r5+RXLfbplyxa91ytWrMDLy4ujR4/yxBNPFHGWQFC2fPzxx4wfPx5HR9n74OjoSMuWLcnIyODjjz9mypQpZpaw/FAaFZ61cUKJahWbQG/sTUCCWkWnuBhyAAXyl5oKcFEoyZEkdiEvhdiQt+RRcIwcYPyteBTI9dSSkFOqyf19AKiiVLLRp4FOnpXJdwvFLRUV06SNYaooVDadL/i5KIDfkeOEfkIOoNaWFtYg64ka+bPvAjQDbgJWufoh8dBaxYIKhsmeoYLu082bN1O7dm0WLVrEzJkzH0mYpKQkANzd3Q3uz8rKIjk5We9HYDons9J5PyGel6/F8n6CcfEcxTmnsjB9+nRSUwtXZk1PT2f69OlmkKj88pprNaKRl5sm5P6OBl4v5pJC/jihQOTlqZa5Y7fMfe2FfLMCeQnsxdz/+0oaRiLRGNlIehE5K8jQGA2QvQF1kRgD+CMvnbQD4oHLQDcn1wfGLZVmvFRZU5l0vuDnkpCZTgbyZ6pGNnK8kWPI6uduq5N7bh1kvTgE1AVGIlXoz/VRuXY1joB61TgTfapcjvcomOwZevXVV3X/17pPz549S+3atfHwKP4aqkajYcyYMYSFhREYGGjwmPDw8Ao3Ecsbxal5UhHrpJQk2urqBfnvv/+KNNwtlZKu8Jw/TsgX2UjxRV6+CkB+sm8E7CIvNboXhVPjWwP3gZjc4xMKjHH5AeccQ44juZyTxeWkrCLjloAKl0JfFJVJ5wvGmp1C/rybIC+DFZVS75L72wnZoBep9VC9Ri32HorGza2auUUpcUz2DH388cekp+dZxFr3qZOTEx9//HGxBRk5ciRRUVGsWrWqyGMmTpxIUlKS7ic+Pr7Y17NU8n8xzAaOIH8paL/MS+qcyoCbmxvu7u4oFAoaNmyIu7u77sfV1ZWnnnqKl19+2dxiljvkJSFfVteqxxwv30eKrcgfJ5SE7N3ZBFzK/f0ictyHhrxYoGgKx4F0y91uA7wEOOaO8SGy10jzkHO658pSMG7pMHKA7fHMdE5mZpRavFRZURl1vuBndgawQv5MkykcQ9Yt9xjt72eQvQYV+XM1lpycnAfut7KywtPTG2vr8tPjvWCtw+JisjFUGu7TUaNG8ddff7Fr1y58fHyKPM7Ozg4XFxe9H4FpFCcItTSDYsszCxYsYN68eUiSxPTp05k/f77uZ+nSpezbt4/FixebW8xKTf60dFsKpzhvQvbuKPPtM5QavzV3e/5z9iOnUTsie5sKjp3/HG3cU3558p//LnJn84qegl0Zdb5gaYMA5KWwTcjeH0Ofe0C+35uQY4hK+nPdvz+CHs+9jH+9IHo89zL790eYdP7X3/xArdpN0RSod9Wrz6u8+da7APyxcROtWj+JQ5Va1GvYiumffIZKpdIdq7TxYMnS7+nZewBVnd1Y9tU8kpLu8/7YYbRr3ZDmTWrRrVNr1q35BTC8rHX+3FmGv9WPkGZ1aBVcm1f79iDuyiVAXvFZ/MUcOoYFEhxQg97PduDfPTse+L4OR+zn5d5dCA6oQfvHmjD3s+l6Mr/e/3k+mfY/Zn4yibYhDRjyxksm/d2KwmTzriTdp5Ik8c4777B+/Xp2796Nv7+/qeIIjCB/8GCmpGEzmBTgaigodhOQqFZV6j49AwcOBMDf35+wsLBy9TRkKeRPS88kL97nGfJS3NOBIGSPZUvk5a4tBY6LRl4OaZnvnIFAY+Tlj8PIho2hc7RxT1+4eiAh6eRJy3e+NiutExU7Bbsy6rxWh7SfyzlkY+g0ctZgQZ06g5xSfyX39yXkQOqS/Fz374+g01PP4+cHbdupOXIkgU5P7WHnto2EhYUaNcZLL/bk3TET2bV7H507yQlH9+4lsmXrTv7+cxX/7jvIwEEjWTg/nPaPP0bsxcsMGzEOgKkf/U83zvRP5hD+6UfMmb+AazezWTR/JrEXYvj6u99wc6/GlSsXycrMNCjDrZvXea3fs7QJDWPFTxtwcnbm+NEI1GrZePlxxTJWfLuYaTPm0aRpEGt//4WRwwawcfN+/PzrGRxv+OBX6PXCK8z6/Csuxp5nyodjsbOzZ9ToCbrjNqxbxSv932Tl6k1G/a2MwWjPUGm4T0eOHMnPP//MypUrcXZ25ubNm9y8eZOMjAyT34jAMAWDB900GqLQDyA9BbRzqFLkGO0cqnCKwkGnj4NFBBN26NCBK1euMHnyZPr160dCgtyXavPmzZw+fdrk8RYvXoyfnx/29vaEhoZy+PDhkha50pA/LR3QS5O/jRz/cwVZh22Qb2Rrcs+9qlDys5U1V1AgIWcPRQNVFUquIQdGa5dIwpDjjjKQm7rGKRT4Wduyq0AqfH55rqG/xNIBeAWIhQqTQl8UlUnntZ9ZLPAtss6MJy91XolcR2hB7m8FckyRLbJu2QBNbe3A1r7EPteZs+bi5wdffaVm2DBYskRNnTrydmNxc6tK96c7s/LXtbpta9ZuxMPDnSc7Ps7Hn8xhwv9GM/D1V6hb14+nunTk42kf8PU3P+iN0++VPgx6oz9169alZk0fbly/RkCTIAKDW1DLpzbtwjryZOenDcqw8ufvcHZ2Ye7CbwkMboG/f336vDgA/7oNAFj+7Ze8NexdejzXB/+6DRg/YRqNAwL5ccVSg+P9+vP3VK9Rk4+mfUbdeg3p0rUHo0ZPYPl3i/U8YHX86vH+B9Pwr9tAd61HxWizf8GCBUiSxJtvvsn06dNxdXXV7bO1tcXPz4+2bduadPElS5YA0LFjR73ty5cv19W4EBRNwXTRdg5VOJCRypWcbNytrJGQuJydjROwEPnLegZyhsQF5BuKfe7P0sTbHMhINejlOZCRSk3koNNF5AUW2iA/jVf2YMI9e/bQvXt3wsLC2Lt3L59++ileXl78999/fPfdd6xZs+bhg+Ty22+/MW7cOJYuXUpoaCgLFiygW7duxMTE6Op3CfTRpqU/f/U8NzQa4snzULYEqiHf1HKQddHJ3tFgV/qCvJ8Qz5bMdJ3Hsw1ysGyrh5yvlafg+TlAFNDcyOuXZyqbzgfbOdLc3pG0zHQ2IH9eMwEPoDZykLz2M9QGUMcj15Ta6FMyN9v8REefoW07NVrHm7U1hISoOXjgjEnj9O/3IkOHj+WrLz/Dzs6Olb+uoe/LvVEqlfx38jT7DxxmZvh83fFqtZrMzEzS09N1ZRNCWjXXG/OV/oMYPfINok+fpN3jT9LlqWdo0aqNweufiY6iVchj2NjYFNqXmpJMwq2btGil7+lq0SqUmDNRBseLjT1H8xat9VafWrYKJT0tjZs3r1OzphxG07RpM4PnPwpGe4YGDhzIG2+8wa5du3j77bcZOHCg7qdfv34mG0IgL5MZ+hGG0MMxlC46LzFB95rsTM5kZ9ETidrAU8gxDoeRjZoGQB/kAML6PDhl9Fx2FjeRvyDeBWogp6Nqv0Aqe/zQBx98wIwZM9i2bRu2tnnLiZ06deLQoUMmjTVv3jyGDBnCoEGDaNKkCUuXLsXR0ZHvv/++pMWudAxy9TCYFm9D8dL4H7UMQEmXEShPVEadN/R5pVG41EIUctbhaeBNI6uMm0qTJgEcOWKFNhRGpYIjR6xo0iTApHGee7YbkiTx96ZtxMdf4999hxjQTy4ukZqaxrSp/+P4kV26n5PH93LuzGHs7e11Yzg5OemN+UTHLuzYe4KBg4ZzO+EGg17rzWczDdeVyj9OWeLgWPIP3iYvCHfo0IHY2FiWL19ObGwsCxcuxMvLS1dvqGnTpiUupKAwhtJFJWSDRZsynImcGnwU+WlnZu4x3shP0mco3JpA6+WpY5PC+pT7SOTV3SjYxfs2+jFHxjbRLM1GnqXBqVOnWLlyZaHtXl5e3Llzx+hxsrOzOXr0KBMnTtRtUyqVdOnShYMHDxo8Jysri6ysLN1rS66tpe0QvzzpDos0GhyVSvo6uXIlJ6tYafyPWgagpMsIlCcqo84b+ry+dPXgQnYmy5PusFCjQY3sIbivVPKeq6fRVcZNZdIH79HpqT2MGGFFSIiaI0esuHIFVnw33qRx7O3t6dPrWVb+uoYLsRdp1Kg+LVvKXpOWLYKJiYmlfv26JsvnXs2DXi/0o9cL/WgVsoI5s6fyv0mFs8UbNW7KhnWryMnJKeQdquLsgpd3dY4fjaBNaJhu+/GjEQQ1a2nwuvXqNeSfrX/qxSYfOxqBU5UqVK9e0+T3YQomG0Ml6T4VFJ+CnbCPIxsnLsgdt7ciB/6lkpcu+i1yQKAKOTBUheFuzF9mpnMot8hdd/KCCg8jx1Zog0W/Iu9peIyDs1G1iCpizaKqVaty48aNQgH+x48fp1atWkaPc+fOHdRqNd7e3nrbvb29OXv2rMFzRG0tfXo7u+mMopLgUStDV7TK0sZSWXXe0OcVZOdQojplDGFhoezctpGZs+Zy8MAZmjQJYMV342nXzvBy1IPo3/9FnuvZn9PRZxnQPy+z6qPJ43muZ39q167Fi32ez106iyLq9FlmfDypyPEWzQ+naWAz6jdoTHZ2Nrt3/UO9eg0NX/u1t/j5x294b/RbDB0+hirOLvx34gjBzVriX7cBbw55hy8XzMK3tj8BTQJZt2YlZ89E8dm8ZQbH6/fqm/y4Yhkzpk9gwGtvceniBb5cOJs33nwbpZGNkYuLyaOXpPtUYDraStCJahW/Ixsl/sgeIK33ZjZyjJBT7va2wGJkw0ide/xR4DEMd+fOJK8Q2Wzk5bAA4FMDx2mDCfdnpBSqRVQHmHz7ml7V6opYs+iVV15hwoQJ3Lx5E4VCgUajYf/+/YwfP57XX3+9VK8tamsJzIHQ+dInLCyUv/9czaXYU/z95+piGUIAnZ5sj7t7VWJiLtD/lRd027t17cSff6xk27bdtGn7FG0f78aChUupU7vo8jUANjY2zP/8E3r1eILX+j2LlVLJ3IXfGjzWzc2dFT9vID09jdf7P8+LvTrx+28/YW0tP2K/NnAobwx+m8/CP6LnM+3Zt3cni5f9YjCTDMC7ek2WfreKU/8do9ezHZj20XheeGkAw0e+V6y/jSmY7BkqKfepwHTye1UCkQ2JKsieoPzeG5BjhAJyj1uFvBxW0MszCTmduFW+fdHIDSoNNbP8EnldfWvuGG5KK12gaEFP1WFyq7xqNHRHo/MAuSqtGExhb9TP5TjmaObMmYwcORJfX1/UajVNmjRBrVbTv39/Jk+ebPQ4Hh4eWFlZcevWLb3tt27donr16gbPsbOzw87O7pHkFwhMReh8xUGpVHItznCGX7eunejWtVOR52pyCt+zR4waz4hRhpfravnU5kys/oNro8ZN+XaF4RUhpVLJyHf/x8h3/2dwv6Hx2oSGsXr99iJl/nHlxiL3PQome4a07tOCmOo+tSRKqq9Xfq9KHEV7b2Yie1uOIHuGgos4LgyYhxwsuAC5/sai3O2GPEYa5HikmshenzSNWvd+ChY2+zRXBu11tR4gCUnvuIpQmM7W1pZvvvmG2NhY/vrrL37++WfOnj3LTz/9hJWVlUnjtGrVih078oqOaTQaduzYUawEBIGgtBA6L7A0TPYMad2nv//+e5m7TysiJRkjk9/7koRcNK6g9+YL5PoYw8hrTfCygeO+BAag7zXaDIxFXmIbjb7H6DSyEaT1Ql0G+gGnMtN5NzOd0W5eLMwtRvc0sAcYRWEP0PJcmbTHVaTCdLVr18bXV/aEGSo8agzjxo1j4MCBhISE0KZNGxYsWEBaWhqDBg0qSVEFghJB6LzAUjDZMzRz5kwaN26Mr68vqampNGnShCeeeIJ27dqZ5D61FEoyRqag98VQa4Is5CDpB7Um0Hp5fiMvm2x27u8mwF/ANuSCY/OB80BnR2dq2zuyGAXxyAXqfs59P42BrxITaGRrhya3MJm9UmnQA1Tf1l5XtK4iFab77rvvCAwMxN7eHnt7ewIDA/n2W8Pr6A+ib9++fP7550yZMoXmzZtz4sQJtmzZUijAVCAwN0LnBZaEyZ4hrfv0o48+IioqitTUVFq0aEGDBiVfmKoyUDCW5lFiZPK3JrBCjttpjZwppo3jcUJBJpJuX1P0WxNsRvby+EOhCrr5s85GIRtV77l566WXvnwtllfVKp7Id84zuedYZWcRDSzylp8k370Vb9ADFGTnUKEycKZMmcK8efN45513dK79gwcPMnbsWOLi4kxuUDxq1ChGjRpVGqIKBCWC0HmBpVHsxjMl4T61BAz19SpujEz+OhmazHRqo19XKAXwtnfgWGY6Prn7DiBX1j2FHBfUyt6RvjZ2/JOWhEqjYRMU6jmWBFBEnQ1D72crcmbaBvJXo/atNDVYlixZwjfffEO/fv10255//nmCg4N55513TL4xCATlHaHzAkujWIn7JeU+tQRKukqtXCfDl7fdvLiMXDL+5dzfl5D7iDkoFMQB13P3ZSCnwTsqlMzx8mWUmxcbfRowxs3LYEXfsW7ebPRpYLDgWMH30yr3/XxI4WrUWllX16rHHC/fCmkIAeTk5BASElJoe6tWrfS6KQsElQWh8wJLw2RjaMqUKYwePZrnnnuO33//nd9//53nnnuOsWPHMmWK4ZLdlkz+xo4lGSNzICOVeoAvsmfIF6ibuz3AzgE/8rxGNQE/IMBOv3R6b2c3xrl5cV2pZBEQp1Dib23DyuS7RWa95X8/i5GNru1AOypGZlhxeO2113R99PLz9ddfM2DAADNIJBCULkLnBZaGyctkwn1qOqVRpVYbizQ737YJyLFIkz1q8G5mOk7InqEtyMHQ4w14o7QVfbVZb84qDS/y4Kw37fvRnvMOFS8zzFS+++47/vnnHx577DEAIiIiiIuL4/XXX2fcuHG64+bNm2cuEQWCEkXovMCSMNkYEu7T8sGDYpGK0zOpYK+z/H3KijLkKnNvpvxERUXRsqXcSyc2NhaQi8l5eHgQFZXXfVnEzgkqC0LnKz7TPp7NH39s5vjR3Y80zuFD+xg4oCcRxy/i4uJq1DkT3x9JSnISXy77+ZGuXZaYbAxp3acFnwaE+7RsyZ9ZZsgrY6o3qrhZb5W1N1N+du3aZW4RBIIyReh8xWf8uJG8M3LII4/TvGUb9h6KxtnZxehzJk0JR5KkR752WVKsbDLhPjU/Je2VKcmsN4FAIBCYlypVqlClStH7s7Oz9fqLFoWtrS2enqbVhDLFcCovmBxArXWfenp6EhsbS2xsLB4eHrRs2ZKoqCiOHz/O8ePHOXHiRCmIK8hPSWZrlXTWm0AgEAhKj6+/+YFatZui0Wj0tvfq8ypvvvUu0z6eTYtWHXXbB705it4vvMan4fOoVbspjZvKzowDBw7TolVHnB1deLFnJ7b/8zcB9apxJvoUIC+TBdSrRnJyEgDr16ykTXN/9u3dSY+uj9EqqDZD3niJhISbumtNfH8ko4a9qnut0Wj4dtkiuj0ZQnBADTo9HszSxXN1+z+fPY2nO7ehRVMfnurYkoXzZpKTk0NZYrJnSLhPKyeWEv8jEAgE5YH9+yOYMXM+p6LOERTYkMmTxhIWFmr0+S+92JN3x0xk1+59dO4kl8G9dy+RLVt38vefq/h338FC5+zYuRcXZ2f+2bwWgOTkFJ7vPYBnnu7CD7/8TMThGMJnTHrotTMzM/j+2y+Z/fkSFEolE94bzpzwqcyZv8zg8fPmfMya337igw9n0DLkMW7fvsXF2PO6/U5OVQj/7Eu8vKtzLiaaKZPG4uRUhbeGvWv03+NRKXbRRUHlwxLifwQCgcDc7N8fwZNdeiFJTVGrX+HWra3s2NmLXds3GG0QublVpfvTnVn561qdMbRm7UY8PNx5suPjBo0hJycnvvl6gW55bOmy5SgUCr5eNh9rR0/sHH25desGUyaNeeC1c3JymPbJXGrX8Qeg/2tvseSLzw0em5aawk8rvmbytNn0ekHOQq9dx59WIY/pjhkxarzu/7V8ajPorQts+mtdmRpDxSq6KBAIBAKBoHjMmDk/1xA6DMxGrY5EkpowY+Z8k8bp3+9F1q3/k6ysLABW/rqGvi/3Rqk0fGsPCgzQixOKOXeB4KAm2Nvn1aALbtbyodd1cHDUGUIAnp7e3L172+CxsbHnyM7Oom27JwzuB9j013r6v9Sd9qEBtAqqzcJ5M7lx/dpD5ShJhDEkEAgEAkEZcirqHGp1V/Ln76rV3TgVdc6kcZ57thuSJPH3pm3Ex1/j332HGNDvxSKPd3IqGc+/tbX+opJCoSgye8ze/sGhFsePRfK/ccN4omMXlnyzkrUbdzHs7XHkFKN/56MgjCGBQCAQCMqQoMCGWFltRc7bBcjBymorQYENTRrH3t6ePr2eZeWva/j1t7U0alSfli2bGX1+o4b1ORV1RudZAjh18rhJMjyMOn51sbd34OCBvQb3nzh2mJq1fBk+8j0Cg1vg51+P69fjS1QGYxDGkEAgEAgEZcjkSWNRKKKxsmoNTMDKqjUKRTQffTjuoecWpH//F/l70zaWr1hJ/wd4hQye2+9FNBoNw4aP48yZM+zbu5Pl334JlFxBTTs7e94a9i5zZ09jw7pVxF25xInjkaxZLRdkrONXlxvXr/L3n+uIu3KJn1YsY/s/f5fItU1BGEMCgUAgEJQhYWGh7Nq+gS6dPalZcxVdOnuye8cftGvXxuSxOj3ZHnf3qsTEXKD/Ky+YdK6LizMb1//Cif+iaN2yDQvmzuDtUe8DYGdnZ7IsRTFi1HjeGPw2XyyYxbPd2jLu3be4lxtj1KlLd14fNIIZ0yfQ+7kOHD8WqRdQXVYopIpWJjIfycnJuLq6stWnPk5KK3OLI6hgpGnUdLt6gaSkJFxcKkaRMKHzgkdB6HzpoKhRHYdPJlPby4uqVe1Q1qp49dlUSmdiL6fy5x+/8+GEdzh84tJD433KCxp1NrduXOXL1Ve4fV+/PpEqJ52Dqwc/VOdFar1AIBAIBI+AdD8JKSeHzAroW/jxp9+o618Hb9/6bN92mLmfTefpZ3pWGEMIQK3KIketITmt+P1RhTEkEAgEAsGjkJFB9o7d3Hm2O7YOHjhlZFNRethevXqdKdPCuXXrNh6eXnR9+llGj/0Ajbpss7mKi1qVxf3Eu0ScSiQrp/jGqDCGBAKBQCB4RNRrN5IBJDzXBWVOhrnFMZo+vXvSp3dP1Ep7bt+Vs8qSk+6RnHTPzJIZgQQ5ag0RpxLZfjjxkYYSxpBAIBAIBI+KJKFe8we1sv7DbvIgqGArZvcdH2PakiPmFsMkJAmS01SP5BHSIowhgUAgEAhKCGVOFvY2FWOJKT82dtYkJJZtc9TyRLlIrV+8eDF+fn7Y29sTGhrK4cOHzS2SQCAQCAQCC8HsxtBvv/3GuHHjmDp1KseOHaNZs2Z069aNhIQEc4smEAgEAoHAAjC7MTRv3jyGDBnCoEGDaNKkCUuXLsXR0ZHvv//e3KIJBAKBQCCwAMwaM5Sdnc3Ro0eZOHGibptSqaRLly4cPHiw0PFZWVl6PVSSkpIASNNoSl9YQaVDqzcVqe6oVlah84LiIHS+9EnOViGlVJxsMi0p6hRUOenmFqPEUeVm9j1M581qDN25cwe1Wo23t7fedm9vb86ePVvo+PDwcKZPn15oe5/rF0tNRkHlJyUlBVdXV3OLYRQpKSmA0HnBo1GRdP7u3btABdL5FRdgxT5zSyEowMN0vkJlk02cOJFx4/Ia2Wk0Gu7du0e1atVKrKmcqSQnJ+Pr60t8fHyFKW9f0lTUv4EkSaSkpFCzZk1zi2I0NWvWJD4+HmdnZ6HzZqSi/g0qos67u7sDEBcXV2EMuIpKRdXrB2GszpvVGPLw8MDKyopbt27pbb916xbVq1cvdLydnV2h5nFVq1YtTRGNxsXFpdIoT3GpiH+DivblqlQq8fHxMbcYQMX8vEuaivg3qIg6D7LcFe1vXVGpiHr9IIzRebMGUNva2tKqVSt27Nih26bRaNixYwdt27Y1o2QCgUAgEAgsBbMvk40bN46BAwcSEhJCmzZtWLBgAWlpaQwaNMjcogkEAoFAILAAzG4M9e3bl9u3bzNlyhRu3rxJ8+bN2bJlS6Gg6vKKnZ0dU6dOLbR8Z0mIv4FlIT5v8TcoS8Tfuuyw5L+1QqpIOZYCgUAgEAgEJYzZiy4KBAKBQCAQmBNhDAkEAoFAILBohDEkEAgEAoHAohHGkEAgEAgEAotGGEPFIDw8nNatW+Ps7IyXlxe9evUiJibG3GKZlVmzZqFQKBgzZoy5RRGUAkLnCyN0vvRZvHgxfn5+2NvbExoayuHDh80tUqVj2rRpKBQKvZ/GjRubW6wyRxhDxWDPnj2MHDmSQ4cOsW3bNnJycujatStpaWnmFs0sREZGsmzZMoKDg80tiqCUEDqvj9D50ue3335j3LhxTJ06lWPHjtGsWTO6detGQkKCuUWrdDRt2pQbN27ofvbts7zeaiK1vgS4ffs2Xl5e7NmzhyeeeMLc4pQpqamptGzZkq+++ooZM2bQvHlzFixYYG6xBKWM0Hmh86VNaGgorVu35ssvvwTk7gS+vr688847fPDBB2aWrvIwbdo0NmzYwIkTJ8wtilkRnqESICkpCchrKGhJjBw5kh49etClSxdziyIoQ4TOC50vTbKzszl69Kje31ipVNKlSxcOHjxoRskqJ+fPn6dmzZrUrVuXAQMGEBcXZ26RyhyzV6Cu6Gg0GsaMGUNYIv1a0AAAErNJREFUWBiBgYHmFqdMWbVqFceOHSMyMtLcogjKEKHzQudLmzt37qBWqwt1IvD29ubs2bNmkqpyEhoayooVK2jUqBE3btxg+vTptG/fnqioKJydnc0tXpkhjKFHZOTIkURFRVncGmt8fDyjR49m27Zt2Nvbm1scQRkidF7ovKDy0L17d93/g4ODCQ0NpU6dOqxevZrBgwebUbKyRRhDj8CoUaP466+/2Lt3Lz4+PuYWp0w5evQoCQkJtGzZUrdNrVazd+9evvzyS7KysrCysjKjhILSQOi80PmywMPDAysrK27duqW3/datW1SvXt1MUlkGVatWpWHDhly4cMHcopQpImaoGEiSxKhRo1i/fj07d+7E39/f3CKVOZ07d+bUqVOcOHFC9xMSEsKAAQM4ceKEuClUMoTOC50vS2xtbWnVqhU7duzQbdNoNOzYsYO2bduaUbLKT2pqKrGxsdSoUcPcopQpwjNUDEaOHMnKlSv5448/cHZ25ubNmwC4urri4OBgZunKBmdn50LxIk5OTlSrVs3i4kgsAaHzQufLmnHjxjFw4EBCQkJo06YNCxYsIC0tjUGDBplbtErF+PHjee6556hTpw7Xr19n6tSpWFlZ0a9fP3OLVqYIY6gYLFmyBICOHTvqbV++fDlvvPFG2QskEJQyQucFZU3fvn25ffs2U6ZM4ebNmzRv3pwtW7YUCqoWPBpXr16lX79+3L17F09PTx5//HEOHTqEp6enuUUrU0SdIYFAIBAIBBaNiBkSCAQCgUBg0QhjSCAQCAQCgUUjjCGBQCAQCAQWjTCGBAKBQCAQWDTCGBIIBAKBQGDRCGNIIBAIBAKBRSOMIYFAIBAIBBaNMIYEAoFAIBBYNMIYMjNvvPEGvXr1KrHxFAoFGzZsKHL/5cuXUSgUnDhx4oHjdOzYkTFjxph8/ezsbOrXr8+BAwdMPteUa/j5+XHkyJFSu4ag9BA6bzpC5ysWu3fvRqFQcP/+/SKPeZjeliXTpk2jefPmxTr3tddeY+bMmSUrUAFeeeUV5s6dW6rXEMZQJePGjRt0797d6OONmbSmsHTpUvz9/WnXrl2JjGcIW1tbxo8fz4QJE0rtGoKKg9B5QWmxYsUKqlatam4xSpSSNML+++8/Nm3axLvvvlsi4xXF5MmT+fTTT0lKSiq1awhjqJJRvXp17OzszHJtSZL48ssvGTx4cKlfa8CAAezbt4/Tp0+X+rUE5Ruh8wKBefjiiy946aWXqFKlSqleJzAwkHr16vHzzz+X2jUs2hhas2YNQUFBODg4UK1aNbp06UJaWppu/7fffktAQAD29vY0btyYr776SrdP63pftWoV7dq1w97ensDAQPbs2aM7Rq1WM3jwYPz9/XFwcKBRo0YsXLjQaPkkScLT05M1a9botjVv3pwaNWroXu/btw87OzvS09OBwlb/4cOHadGiBfb29oSEhHD8+HG99/Dkk08C4ObmhkKh0Gu6qdFo+N///oe7uzvVq1dn2rRpD5T36NGjxMbG0qNHD73t2kaA7u7uODk5ERISQkREBJDnnv3++++pXbs2VapU4e2330atVvPZZ59RvXp1vLy8+PTTT/XGdHNzIywsjFWrVj38DynQIXRe6Lyl0LFjR0aNGsWoUaNwdXXFw8ODjz76iPztOLOyshg/fjy1atXCycmJ0NBQdu/eDcgexEGDBpGUlIRCoUChUOj04aeffiIkJARnZ2eqV69O//79SUhIeCR54+Pjefnll6latSru7u707NmTy5cv6/Zrl5c///xzatSoQbVq1Rg5ciQ5OTm6Y27cuEGPHj1wcHDA39+flStX4ufnx4IFCwDw8/MDoHfv3igUCt1rLT/99BN+fn64urryyiuvkJKSUqS8arWaNWvW8Nxzz+ltz8rKYsKECfj6+mJnZ0f9+vX57rvvgDyv7NatW2nRogUODg506tSJhIQENm/eTEBAAC4uLvTv3183v7U899xzpav7koVy/fp1ydraWpo3b5506dIl6eTJk9LixYullJQUSZIk6eeff5Zq1KghrV27Vrp48aK0du1ayd3dXVqxYoUkSZJ06dIlCZB8fHykNWvWSNHR0dJbb70lOTs7S3fu3JEkSZKys7OlKVOmSJGRkdLFixeln3/+WXJ0dJR+++03nRwDBw6UevbsWaScffr0kUaOHClJkiTdu3dPsrW1lVxdXaUzZ85IkiRJM2bMkMLCwnTHA9L69eslSZKklJQUydPTU+rfv78UFRUl/fnnn1LdunUlQDp+/LikUqmktWvXSoAUExMj3bhxQ7p//74kSZLUoUMHycXFRZo2bZp07tw56YcffpAUCoX0zz//FCnrvHnzpMaNG+ttS0lJkerWrSu1b99e+vfff6Xz589Lv/32m3TgwAFJkiRp6tSpUpUqVaQXX3xROn36tLRx40bJ1tZW6tatm/TOO+9IZ8+elb7//nsJkA4dOqQ39oQJE6QOHToUKY9AH6HzQuctiQ4dOkhVqlSRRo8eLZ09e1ani19//bXumLfeektq166dtHfvXunChQvSnDlzJDs7O+ncuXNSVlaWtGDBAsnFxUW6ceOGdOPGDd1c+e6776RNmzZJsbGx0sGDB6W2bdtK3bt31427a9cuCZASExOLlC+/3mZnZ0sBAQHSm2++KZ08eVKKjo6W+vfvLzVq1EjKysqSJEmeNy4uLtLw4cOlM2fOSH/++Weh99OlSxepefPm0qFDh6SjR49KHTp0kBwcHKT58+dLkiRJCQkJEiAtX75cunHjhpSQkCBJUp5O9unTRzp16pS0d+9eqXr16tKkSZOKlP/YsWMSIN28eVNv+8svvyz5+vpK69atk2JjY6Xt27dLq1at0vu7PPbYY9K+ffukY8eOSfXr15c6dOggde3aVTp27Ji0d+9eqVq1atKsWbP0xt28ebNka2srZWZmFinTo2CxxtDRo0clQLp8+bLB/fXq1ZNWrlypt+2TTz6R2rZtK0lS3o0h/weWk5Mj+fj4SLNnzy7yuiNHjpReeOEF3euH3RgWLVokNW3aVJIkSdqwYYMUGhoq9ezZU1qyZIkkSbLy51fY/BNs2bJlUrVq1aSMjAzd/iVLluhuDJJU9KTt0KGD9Pjjj+tta926tTRhwoQiZR09erTUqVMnvW3Lli2TnJ2dpbt37xo8Z+rUqZKjo6OUnJys29atWzfJz89PUqvVum2NGjWSwsPD9c5duHCh5OfnV6Q8An2Ezh+XJEnovKXQoUMHKSAgQNJoNLptEyZMkAICAiRJkqQrV65IVlZW0rVr1/TO69y5szRx4kRJkiRp+fLlkqur60OvFRkZKQE6Y8lUY+inn36SGjVqpCdrVlaW5ODgIG3dulWSJHne1KlTR1KpVLpjXnrpJalv376SJEnSmTNnJECKjIzU7T9//rwE6IyhgtfVYkgn33//fSk0NLRI+devXy9ZWVnpyRwTEyMB0rZt2wyeo/27bN++XbctPDxcAqTY2FjdtmHDhkndunXTO/e///574PfXo2Kxy2TNmjWjc+fOBAUF8dJLL/HNN9+QmJgIQFpaGrGxsQwePJgqVarofmbMmEFsbKzeOG3bttX939rampCQEM6cOaPbtnjxYlq1aoWnpydVqlTh66+/Ji4uzmg5O3ToQHR0NLdv32bPnj107NiRjh07snv3bnJycjhw4AAdO3Y0eO6ZM2cIDg7G3t7eoLwPIzg4WO91jRo1HugKzsjI0LsWwIkTJ2jRogXu7u5Fnufn54ezs7Putbe3N02aNEGpVOptK3htBweHQq5UQdEInX84QucrF4899hgKhUL3um3btpw/fx61Ws2pU6dQq9U0bNhQT+f37NlTSOcLcvToUZ577jlq166Ns7MzHTp0ADBJz/Pz33//ceHCBZydnXVyuLu7k5mZqSdL06ZNsbKy0r3Or58xMTFYW1vTsmVL3f769evj5uZmlAwFddIY3bezs9P7+544cQIrKyvd36Mo8s8zb29vHB0dqVu3rt42Q7oPlJr+W5fKqBUAKysrtm3bxoEDB/jnn3/44osv+PDDD4mIiMDR0RGAb775htDQ0ELnGcuqVasYP348c+fOpW3btjg7OzNnzhxd7IAxBAUF4e7uzp49e9izZw+ffvop1atXZ/bs2URGRpKTk1NqWSw2NjZ6rxUKBRqNpsjjPTw8OHXqlN42rQKbeh1jrn3v3j08PT0fOr5ARuj8wxE6bzmkpqZiZWXF0aNHC+n4gwKC09LS6NatG926deOXX37B09OTuLg4unXrRnZ2drFladWqFb/88kuhffk/b1P10xSKo/vp6elkZ2dja2sLGKf7Ba9liu4Dpab/FusZAvkPHhYWxvTp0zl+/Di2trasX78eb29vatasycWLF6lfv77ej7+/v94Yhw4d0v1fpVJx9OhRAgICANi/fz/t2rXj7bffpkWLFtSvX/+hTxyGZGzfvj1//PEHp0+f5vHHHyc4OJisrCyWLVtGSEgITk5OBs8NCAjg5MmTZGZmGpQX0CmxWq02SS5DtGjRgrNnz+oFKAYHB3PixAmdIpckUVFRtGjRosTHrcwInRc6b0kUNMIPHTpEgwYNsLKyokWLFqjVahISEgrpfPXq1QFZVwrqydmzZ7l79y6zZs2iffv2NG7c+JGDp1u2bMn58+fx8vIqJIurq6tRYzRq1AiVSqWXMHDhwgWd91eLjY1Niei+ti5RdHS0bltQUBAajUYvqaKkiIqKwsfHBw8PjxIfGyzYGIqIiGDmzJkcOXKEuLg41q1bx+3bt3Vf6tOnTyc8PJxFixZx7tw5Tp06xfLly5k3b57eOIsXL2b9+vWcPXuWkSNHkpiYyJtvvglAgwYNOHLkCFu3buXcuXN89NFHREZGmixrx44d+fXXX2nevDlVqlRBqVTyxBNP8MsvvzzQHdm/f38UCgVDhgwhOjqaTZs28fnnn+sdU6dOHRQKBX/99Re3b98mNTXVZPm0PPnkk6Smpuql/vbr14/q1avTq1cv9u/fz8WLF1m7di0HDx4s9nW0/Pvvv3Tt2vWRx7EUhM7LCJ23HOLi4hg3bhwxMTH8+uuvfPHFF4wePRqAhg0bMmDAAF5//XXWrVvHpUuXOHz4MOHh4fz999+AvHSUmprKjh07uHPnDunp6dSuXRtbW1u++OILLl68yMaNG/nkk08eSc4BAwbg4eFBz549+ffff7l06RK7d+/m3Xff5erVq0aN0bhxY7p06cLQoUM5fPgwx48fZ+jQoTg4OOgtZfn5+bFjxw5u3rxZyFAyBU9PT1q2bMm+ffv0xh44cCBvvvkmGzZs0L2P1atXF/s6Wkpb9y3WGHJxcWHv3r0888wzNGzYkMmTJzN37lxd8ba33nqLb7/9luXLlxMUFESHDh1YsWJFoafkWbNmMWvWLJo1a8a+ffvYuHGjznIdNmwYffr0oW/fvoSGhnL37l3efvttk2Xt0KEDarVaL06iY8eOhbYVpEqVKvz555+cOnWKFi1a8OGHHzJ79my9Y2rVqsX06dP54IMP8Pb2ZtSoUSbLp6VatWr07t1bz9Vra2vLP//8g5eXF8888wxBQUHMmjXLpKUXQxw8eJCkpCRefPHFRxrHkhA6LyN03nJ4/fXXycjIoE2bNowcOZLRo0czdOhQ3f7ly5fz+uuv895779GoUSN69epFZGQktWvXBqBdu3YMHz6cvn374unpyWeffYanpycrVqzg999/p0mTJsyaNauQwW0qjo6O7N27l9q1a9OnTx8CAgIYPHgwmZmZuLi4GD3Ojz/+iLe3N0888QS9e/dmyJAhODs768W1zZ07l23btuHr6/vIXsa33nqr0NLekiVLePHFF3n77bdp3LgxQ4YM0SvfURwyMzPZsGEDQ4YMeaRxHoRCyu/fFRjN5cuX8ff35/jx48UuY14ZOXnyJE899RSxsbGlWoirb9++NGvWjEmTJpXaNQT6CJ03jND58knHjh1p3ry5rsaOJXL16lV8fX3Zvn07nTt3LvHxMzIyaNSoEb/99ptJiQqmsmTJEtavX88///xTatewWM+QoHQIDg5m9uzZXLp0qdSukZ2dTVBQEGPHji21awgExiJ0XlBe2LlzJxs3buTSpUscOHCAV155BT8/P5544olSuZ6DgwM//vgjd+7cKZXxtdjY2PDFF1+U6jUsNptMUHrkr+hbGtja2jJ58uRSvYZAYApC5wXlgZycHCZNmsTFixdxdnamXbt2/PLLL4WytUqSBy1blxRvvfVWqV9DLJMJBAKBQCCwaMQymUAgEAgEAotGGEMCgUAgEAgsGmEMCQQCgUAgsGiEMSQQCAQCgcCiEcaQQCAQCAQCi0YYQwKBQCAQCCwaYQwJBAKBQCCwaIQxJBAIBAKBwKL5P3zNmSO6hndVAAAAAElFTkSuQmCC","text/plain":["<Figure size 640x480 with 6 Axes>"]},"metadata":{},"output_type":"display_data"}],"source":["import numpy as np\n","import matplotlib.pyplot as plt\n","\n","from sklearn.datasets import load_iris\n","from sklearn.tree import DecisionTreeClassifier\n","from sklearn.inspection import DecisionBoundaryDisplay\n","\n","\n","# Parameters\n","n_classes = 3\n","plot_colors = \"ryb\"\n","plot_step = 0.02\n","\n","\n","for pairidx, pair in enumerate([[0, 1], [0, 2], [0, 3], [1, 2], [1, 3], [2, 3]]):\n"," # We only take the two corresponding features\n"," X = iris.data[:, pair]\n"," y = iris.target\n","\n"," # Train\n"," clf = DecisionTreeClassifier().fit(X, y)\n","\n"," # Plot the decision boundary\n"," ax = plt.subplot(2, 3, pairidx + 1)\n"," plt.tight_layout(h_pad=0.5, w_pad=0.5, pad=2.5)\n"," DecisionBoundaryDisplay.from_estimator(\n"," clf,\n"," X,\n"," cmap=plt.cm.RdYlBu,\n"," response_method=\"predict\",\n"," ax=ax,\n"," xlabel=iris.feature_names[pair[0]],\n"," ylabel=iris.feature_names[pair[1]],\n"," )\n","\n"," # Plot the training points\n"," for i, color in zip(range(n_classes), plot_colors):\n"," idx = np.where(y == i)\n"," plt.scatter(\n"," X[idx, 0],\n"," X[idx, 1],\n"," c=color,\n"," label=iris.target_names[i],\n"," cmap=plt.cm.RdYlBu,\n"," edgecolor=\"black\",\n"," s=15,\n"," )\n","\n","plt.suptitle(\"Decision surface of decision trees trained on pairs of features\")\n","plt.legend(loc=\"lower right\", borderpad=0, handletextpad=0)\n","_ = plt.axis(\"tight\")"]},{"cell_type":"markdown","metadata":{},"source":["Display the structure of a single decision tree trained on all the features\n","together.\n","\n"]},{"cell_type":"code","execution_count":47,"metadata":{"collapsed":false,"jupyter":{"outputs_hidden":false},"trusted":true},"outputs":[{"data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAAgMAAAGbCAYAAABZBpPkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAACtD0lEQVR4nOzdd3RUxdvA8e9ustn0XoAUCAFJ6E1AehGQ3hEBaVIFBOmgqEgTpCMYmjSR3hFBfCnSa+gJBAiEFAjpvWz2vn/kl5UlHZJsQuZzjkf2lpnn3tzdfXbuzB2ZJEkSgiAIgiCUWHJdByAIgiAIgm6JZEAQBEEQSjiRDAiCIAhCCSeSAUEQBEEo4UQyIAiCIAglnEgGBEEQBKGEE8mAIAiCIJRwIhkQBEEQhBJOJAOCIAiCUMKJZKCYevr0KTKZjE2bNuVpv+bNm9O8efMCiUnInEwm44cfftBJ3cX1712uXDkGDRqkeX369GlkMhmnT59+6zJlMhljxox59+By8Gbs2dm0aRMymYynT5/mS90///wz5cuXR09Pj5o1a+ZLmULJIJKBt5T+Jk7/z9DQkDJlytC2bVtWrFhBTEyMrkMsUu7fv88PP/yQbx96+eno0aM6+7IW8teFCxf44YcfiIyM1HUohe7vv/9mypQpNGrUiI0bNzJv3rwCqUe8X95P+roOoLj78ccfcXV1JSUlhRcvXnD69GnGjx/PkiVLOHToENWrVy+QesuWLUtCQgIKhSJP+/39998FEk9O7t+/z6xZs2jevDnlypXTSQxZOXr0KKtWrSqwD7iEhAT09cVbrTBcuHCBWbNmMWjQICwtLXUSw4MHD5DLc/c76/PPP6dPnz4olcp3rvfkyZPI5XI2bNiAgYHBO5eXlYJ+vwi6IT6h3lG7du2oW7eu5vX06dM5efIkHTt2pHPnznh7e2NkZJTv9aa3RuRVQX5I5BdJkkhMTCyQ8/auVCoVarU6T+fxbf5OQvGVmy/2uLg4TExM0NPTQ09PL1/qDQkJwcjIqFi8xzOTfk4E3RC3CQpAy5YtmTlzJs+ePeP333/XWufj40PPnj2xtrbG0NCQunXrcujQoQxlREZG8vXXX1OuXDmUSiVOTk4MGDCA0NBQIPM+Ay9evGDw4ME4OTmhVCopXbo0Xbp00Wqaz+weckhICF988QUODg4YGhpSo0YNNm/erLVNen2LFi1i7dq1uLm5oVQq+fDDD7l69Wq252PTpk306tULgBYtWmhuraTf/y1XrhwdO3bk+PHj1K1bFyMjI9asWaM5D+PHj8fZ2RmlUkmFChVYsGABarVaqw61Ws2yZcuoUqUKhoaGODg4MGLECCIiIrKNbdCgQaxatQpA67bPm8e8bNkyzTHfv3+f5ORkvvvuO+rUqYOFhQUmJiY0adKEU6dOZajjzT4DP/zwAzKZjEePHml+wVpYWDB48GDi4+Mz7P/7779Tp04djIyMsLa2pk+fPjx//jzDdul/FyMjI+rVq8fZs2ezPfbXqVQqZs+erTnGcuXKMWPGDJKSkrS2S/9bnTt3jnr16mFoaEj58uXZsmVLrupZtGgRDRs2xMbGBiMjI+rUqcOePXtyHWd2fvjhByZPngyAq6ur5m/55q2pAwcOULVqVZRKJVWqVOHYsWMZygoMDGTIkCE4ODhotvvtt99yFcebfQbSbymeOXOGL7/8Ent7e5ycnLTWvR7jtWvXaNu2Lba2thgZGeHq6sqQIUOyrVMmk7Fx40bi4uI0x/36Z0NurqGzZ8/Sq1cvXFxcUCqVODs78/XXX5OQkKDZJrv3S1b9OjL7rBo0aBCmpqY8fvyY9u3bY2ZmRr9+/YDcv5ff5jwJWRMtAwXk888/Z8aMGfz9998MGzYMgHv37tGoUSMcHR2ZNm0aJiYm7Nq1i65du7J37166desGQGxsLE2aNMHb25shQ4ZQu3ZtQkNDOXToEAEBAdja2mZaZ48ePbh37x5jx46lXLlyhISEcOLECfz9/bNsmk9ISKB58+Y8evSIMWPG4Orqyu7duxk0aBCRkZGMGzdOa/s//viDmJgYRowYgUwmY+HChXTv3p0nT55kecuiadOmfPXVV6xYsYIZM2bg4eEBoPk/pDWtfvbZZ4wYMYJhw4ZRqVIl4uPjadasGYGBgYwYMQIXFxcuXLjA9OnTCQ4OZtmyZZr9R4wYwaZNmxg8eDBfffUVfn5+/PLLL3h5eXH+/PksYxsxYgRBQUGcOHGCrVu3ZrrNxo0bSUxMZPjw4SiVSqytrYmOjmb9+vV89tlnDBs2jJiYGDZs2EDbtm25cuVKrjpv9e7dG1dXV+bPn8+NGzdYv3499vb2LFiwQLPN3LlzmTlzJr1792bo0KG8evWKlStX0rRpU7y8vDRN4Rs2bGDEiBE0bNiQ8ePH8+TJEzp37oy1tTXOzs45xjJ06FA2b95Mz549mThxIpcvX2b+/Pl4e3uzf/9+rW0fPXpEz549+eKLLxg4cCC//fYbgwYNok6dOlSpUiXbepYvX07nzp3p168fycnJ7Nixg169enHkyBE6dOiQY5zZ6d69Ow8fPmT79u0sXbpU8z6xs7PTbHPu3Dn27dvHl19+iZmZGStWrKBHjx74+/tjY2MDwMuXL2nQoIGmw6GdnR1//fUXX3zxBdHR0YwfP/6t4vvyyy+xs7Pju+++Iy4uLtNtQkJCaNOmDXZ2dkybNg1LS0uePn3Kvn37si1769atrF27litXrrB+/XoAGjZsCOT+Gtq9ezfx8fGMGjUKGxsbrly5wsqVKwkICGD37t1A7t4vuaVSqWjbti2NGzdm0aJFGBsba+rI6b38tudJyIYkvJWNGzdKgHT16tUst7GwsJBq1aqled2qVSupWrVqUmJiomaZWq2WGjZsKFWsWFGz7LvvvpMAad++fRnKVKvVkiRJkp+fnwRIGzdulCRJkiIiIiRA+vnnn7ONu1mzZlKzZs00r5ctWyYB0u+//65ZlpycLH300UeSqampFB0drVWfjY2NFB4ertn24MGDEiAdPnw423p3794tAdKpU6cyrCtbtqwESMeOHdNaPnv2bMnExER6+PCh1vJp06ZJenp6kr+/vyRJknT27FkJkLZt26a13bFjxzJd/qbRo0dLmb0V0o/Z3NxcCgkJ0VqnUqmkpKQkrWURERGSg4ODNGTIEK3lgPT9999rXn///fcSkGG7bt26STY2NprXT58+lfT09KS5c+dqbXfnzh1JX19fszw5OVmyt7eXatasqRXT2rVrJUDr752ZmzdvSoA0dOhQreWTJk2SAOnkyZOaZel/q3///VezLCQkRFIqldLEiROzrUeSJCk+Pl7rdXJyslS1alWpZcuWWsvLli0rDRw4UPP61KlTWV4/r/v5558lQPLz88uwDpAMDAykR48eaZbdunVLAqSVK1dqln3xxRdS6dKlpdDQUK39+/TpI1lYWGQ4hje9GXv6Z0Xjxo0llUqltW36uvR49+/fn+PnSlYGDhwomZiYaC3L7TUkSRn/NpIkSfPnz5dkMpn07NkzzbKs3i9Z/Y3e/KxKjxWQpk2bprVtbt/L73KehMyJ2wQFyNTUVDOqIDw8nJMnT9K7d29iYmIIDQ0lNDSUsLAw2rZti6+vL4GBgQDs3buXGjVqaFoKXpfeJPem9HuFp0+fzrFp/HVHjx6lVKlSfPbZZ5plCoWCr776itjYWM6cOaO1/aeffoqVlZXmdZMmTQB48uRJruvMjKurK23bttVatnv3bpo0aYKVlZXmfIWGhvLxxx+TmprKv//+q9nOwsKC1q1ba21Xp04dTE1NM226z4sePXpo/boE0NPT09ybVavVhIeHo1KpqFu3Ljdu3MhVuSNHjtR63aRJE8LCwoiOjgZg3759qNVqevfurXVcpUqVomLFiprjunbtGiEhIYwcOVLrfvGgQYOwsLDIMY6jR48CMGHCBK3lEydOBODPP//UWl65cmXN3x3SfnlXqlQpV9fA6/1AIiIiiIqKokmTJrk+Z+/q448/xs3NTfO6evXqmJuba2KXJIm9e/fSqVMnJEnSOu9t27YlKirqrWMdNmxYjv0D0n+lHzlyhJSUlLeq53W5vYZA+28TFxdHaGgoDRs2RJIkvLy83jmWzIwaNUrrdW7fy/l9ngRxm6BAxcbGYm9vD6Q1rUqSxMyZM5k5c2am24eEhODo6Mjjx4/p0aNHnupSKpUsWLCAiRMn4uDgQIMGDejYsSMDBgygVKlSWe737NkzKlasmKH3c3oT/rNnz7SWu7i4aL1OTwzykoBkxtXVNcMyX19fbt++neGLOF1ISIhmu6ioKM25zmq7/IwNYPPmzSxevBgfHx+tD6Sstn9TdufS3NwcX19fJEmiYsWKme6ffusj/W/05nYKhYLy5cvnGMezZ8+Qy+VUqFBBa3mpUqWwtLTM8RpIjz0318CRI0eYM2cON2/e1OqPkFWSm99yiv3Vq1dERkaydu1a1q5dm2kZb3s95ea6aNasGT169GDWrFksXbqU5s2b07VrV/r27ftWIw5yew0B+Pv7891333Ho0KEMf8uoqKg8150TfX19Td+J1+PNzXs5v8+TIJKBAhMQEEBUVJTmAza9w9ukSZMy/AJO9+aHcV6NHz+eTp06ceDAAY4fP87MmTOZP38+J0+epFatWu9UdrqsftlIkvRO5WY2ckCtVtO6dWumTJmS6T4ffPCBZjt7e3u2bduW6XZZJRPvEtvvv//OoEGD6Nq1K5MnT8be3h49PT3mz5/P48ePc1VuTudSrVYjk8n466+/Mt3W1NQ0D0eRs9x+Ib/tNXD27Fk6d+5M06ZNWb16NaVLl0ahULBx40b++OOPPMf7NnJzzgH69+/PwIEDM932bYcL52Z0jEwmY8+ePVy6dInDhw9z/PhxhgwZwuLFi7l06VKe/+a5vYZSU1Np3bo14eHhTJ06FXd3d0xMTAgMDGTQoEEZOuxmFXtmUlNTM12uVCoz/AjJ7Xs5v8+TIJKBApPeuSb9iz/9F5pCoeDjjz/Odl83Nzfu3r37VvW6ubkxceJEJk6ciK+vLzVr1mTx4sUZRjWkK1u2LLdv30atVmu9MX18fDTr88Pb/PJzc3MjNjY2V+frn3/+oVGjRm81HPFtYtuzZw/ly5dn3759Wvt///33eS4rK25ubkiShKurqybxyUz638jX15eWLVtqlqekpODn50eNGjWyrads2bKo1Wp8fX21OnW+fPmSyMjIfLsG9u7di6GhIcePH9f69bZx48Z8KR/evYXBzs4OMzMzUlNTc7zuClKDBg1o0KABc+fO5Y8//qBfv37s2LGDoUOH5qmc3F5Dd+7c4eHDh2zevJkBAwZolp84cSLDtlmd4/SWrTcf+PRmy1JO8eblvZxf50kQQwsLxMmTJ5k9ezaurq6a4TL29vY0b96cNWvWEBwcnGGfV69eaf7do0cPbt26laEXN2T96ys+Pp7ExEStZW5ubpiZmWUYHva69u3b8+LFC3bu3KlZplKpWLlyJaampjRr1iz7g82l9PHDeXkyXO/evbl48SLHjx/PsC4yMhKVSqXZLjU1ldmzZ2fYTqVS5Vjn28SW/ivr9b/H5cuXuXjxYq7LyEn37t3R09Nj1qxZGf7ukiQRFhYGQN26dbGzs8PT05Pk5GTNNps2bcrVMbVv3x5Aa3QGwJIlSwDeuZd/Oj09PWQymdYvxadPn3LgwIF8KR/e7m/5Oj09PXr06MHevXszTchff58WhIiIiAx/6/SRKdm9j7OS22sos+tZkiSWL1+eocysznHZsmXR09PT9OVJt3r16lzHm9v3cn6fJ0G0DLyzv/76Cx8fH1QqFS9fvuTkyZOcOHGCsmXLcujQIa0HzqxatYrGjRtTrVo1hg0bRvny5Xn58iUXL14kICCAW7duATB58mT27NlDr169GDJkCHXq1CE8PJxDhw7h6emZ6S+9hw8f0qpVK3r37k3lypXR19dn//79vHz5kj59+mQZ//Dhw1mzZg2DBg3i+vXrlCtXjj179nD+/HmWLVuGmZlZvpynmjVroqenx4IFC4iKikKpVNKyZcss7w2mn4dDhw7RsWNHzdC1uLg47ty5w549e3j69Cm2trY0a9aMESNGMH/+fG7evEmbNm1QKBT4+vqye/duli9fTs+ePbOsp06dOgB89dVXtG3bFj09vWzPGUDHjh3Zt28f3bp1o0OHDvj5+eHp6UnlypWJjY19u5P0Bjc3N+bMmcP06dN5+vQpXbt2xczMDD8/P/bv38/w4cOZNGkSCoWCOXPmMGLECFq2bMmnn36Kn58fGzduzFWfgRo1ajBw4EDWrl1LZGQkzZo148qVK2zevJmuXbvSokWLfDmeDh06sGTJEj755BP69u1LSEgIq1atokKFCty+fTtf6kj/W37zzTf06dMHhUJBp06d8vQwm59++olTp05Rv359hg0bRuXKlQkPD+fGjRv8888/hIeH50usmdm8eTOrV6+mW7duuLm5ERMTw7p16zA3N9ckbXmR22vI3d0dNzc3Jk2aRGBgIObm5uzduzfTfiBZvV8sLCzo1asXK1euRCaT4ebmxpEjR/LUxyK37+X8Pk8CYmjh20ofEpT+n4GBgVSqVCmpdevW0vLlyzVD8t70+PFjacCAAVKpUqUkhUIhOTo6Sh07dpT27NmjtV1YWJg0ZswYydHRUTIwMJCcnJykgQMHaoY7vTlcJzQ0VBo9erTk7u4umZiYSBYWFlL9+vWlXbt2aZX75tBCSZKkly9fSoMHD5ZsbW0lAwMDqVq1alrDgF6vL7Ohi7wxdC4r69atk8qXLy/p6elpDUEqW7as1KFDh0z3iYmJkaZPny5VqFBBMjAwkGxtbaWGDRtKixYtkpKTk7W2Xbt2rVSnTh3JyMhIMjMzk6pVqyZNmTJFCgoKyjYulUoljR07VrKzs5NkMplm2FR2x6xWq6V58+ZJZcuWlZRKpVSrVi3pyJEj0sCBA6WyZctme37Shxa+evVKa7s3h5ml27t3r9S4cWPJxMREMjExkdzd3aXRo0dLDx480Npu9erVkqurq6RUKqW6detK//77b6Z/78ykpKRIs2bNklxdXSWFQiE5OztL06dP1xoGK0lZ/61yW8+GDRukihUrSkqlUnJ3d5c2btyoOR9v1vM2QwslKW1IqqOjoySXy7XOJyCNHj06w/Zv1iVJae+J0aNHS87OzpJCoZBKlSoltWrVSlq7dm2O9Wc1tDCzYXBv/s1v3LghffbZZ5KLi4ukVCole3t7qWPHjtK1a9dyrDezoYXpcnMN3b9/X/r4448lU1NTydbWVho2bJhm6OXrnwdZvV8kSZJevXol9ejRQzI2NpasrKykESNGSHfv3s10aGFWsUpSzu/ldzlPQuZkkvSOPb8EQRAEQSjWRJ8BQRAEQSjhRDIgCIIgCCWcSAYEQRAEoYQTyYAgCIIglHAiGRAEQRCEEk4kA4IgCIJQwolkQBAEQRBKOJEMCIIgCEIJJ5IBQRAEQSjhRDIgCIIgCCWcSAYEQRAEoYQTyYAgCIIglHAiGRAEQRCEEk4kA4IgCIJQwolkQBAEQRBKOJEMCIIgCEIJJ5IBQRAEQSjhRDIgCIIgCCWcSAYEQRAEoYTT13UAglCS+Pv7Exoaquswig1bW1tcXFx0HYYgvPdEMiAIhcTf3x8PDw/i4+N1HUqxYWxsjLe3t0gIBKGAiWRAEApJaGgo8fHx/P7773h4eOg6nCLP29ub/v37ExoaKpIBQShgIhkQhELm4eFB7dq1dR2GIAiChuhAKAiCIAglnEgGBEEQBKGEE8mAIBRTp0+fpn379vj7+7N+/XqGDx9Oz549uXnzJomJiQwaNIhffvkl2zISExNzrOfBgwcMGTKEwYMHs2DBggzrg4ODKV++PHfv3gXAzc2NkSNHsnbt2rc7MEEQCp3oMyAIxcT+/ft5/vw5lStX5sqVKzRs2JD27dvj4uLC0KFDGTp0KF5eXhw+fJiaNWsyaNAgzRf064KDg/n999+5c+cOY8eO5cMPP8y23kqVKvHbb78B0L179wzrFy5cSK9evTSvTU1NSUhIwNnZ+R2PWBCEwiKSAUEoJrp168aYMWM4e/YsO3bs4OzZs1rrVSoVK1as4Mcff8yyjA4dOlCxYkWGDRvG5MmTAQgPD8+wz5gxY6hQoYLWsh07dtCmTRutZRs3bqRnz56cOHFCs8zLywtJkujQoQPt2rV7q2MVBKFwidsEglBMSJJEZGQkcrkclUqltS4lJYUvv/yS8ePHZ/uLfPr06QCsXr2aY8eOkZqaCqQlEq//J0mS1n47duzg2bNnjBw5Umv5lStX2L17N8eOHWPNmjUAyOVy9PT0MDQ0RK1Wv/NxC4JQ8ETLgCAUE8uXL+ezzz6jVKlSzJw5k/bt22vWTZ06lYcPH/Lrr7/SqlUrrWb71zVu3JjGjRuTkJDAgQMHuHLlCh999FG2fQu8vLyYNGkSHTt2ZMKECSxZsoQJEyYwffp0fv31VwB++OEHevbsyYMHDzT9Cpo3b45cLn5vCEJxIJPe/AkgCEKBuHHjBnXq1OH69ev58pyB06dPc/fuXcaMGfNW64u6/D5fgiBkTaTtglBMWVpacuvWLfz9/TOsS0xM5MCBAzg6OuogMkEQihtxm0AQiqmaNWuybt06Jk+ezM8//6y1ztDQEHd3d6pXr56nMqdNm0Z8fDzGxsb89NNPmuVqtZqZM2cSHR1N3bp1GThwIOfOnWPHjh3o6ekxbdo0Hj16xLZt21CpVNy/f58LFy7ky3EKglDwRMuAIBQj3t7e9OnTh++++45PPvkEAD8/PwCqVavG4sWL6du3LwkJCbx48YKEhIRcl+3v709KSgorVqwgNTWV58+fa9YdPHiQgIAAFAoFTk5OACxbtgwTExNMTEywtramSZMmeHp60rFjRwYOHJiPRy0IQkETLQOCUIysX7+eBQsW4OjoSNu2bbXWOTk5MXHiRFauXMnNmzcz7JvTEMLAwEDNSAQXFxcCAgI0rx88eEDDhg0ZMWIEPXv2pFWrVty6dYsdO3Zw/Phxtm3bxpAhQwD4448/2LBhQ34fuiAIBUi0DAhCMSSTyZDJZFrLTExMAFAoFCQlJWW6X3ZDCB0dHQkICADg+fPnmhYASEs0rKysANDT0wPSJlzS19fHysqKmJgYIK11wcLCAjMzs3w6UkEQCoNoGRCEYmTo0KFMmzaNDz74QPPln1vW1tbZDiF0cXFBoVAwYcIElEolzs7OLFmyhBYtWtC9e3fGjh3L2bNnadq0KQD9+/dn1KhRxMXFsXjxYgA2bNjA4MGD3/4ABUHQCTG0UBAKSX4MlQsPD2fZsmWEhYXRqlWrTB8P/L4QQwsFofCIlgFBKEasra2zfdywIAjC2xB9BgShhNi0aRNHjhzJ1zJHjBhBtWrVNK9/+OEHPv30U0aOHElQUBCSJDFixAhGjx6tuZUgCELRI1oGBKEI++OPPzh9+jRmZmbMmzeP3bt34+XlRUxMDKtWrWLu3LlEREQQERFB9erVCQsLIyAggK1bt9KxY0datGjBo0ePGDVqlKbMZ8+esXjxYiRJws3NjbZt2zJr1ixcXFwYMGAAVatWzXV8a9asoWfPnprX+vr6GBgYoFAosLS05Ny5c1StWpWxY8fy+eefk5ycjIGBQb6eI0EQ3p1IBgShCHv8+DHVq1enS5cuKJVKIG20QGBgIF5eXgB8+umnVKlShcGDB7Nv3z6GDx9OREQEqampjBs3jpiYGKZNm8ZHH30EpE1SZGRkhJGREXfu3KFWrVpYWVnRt29frUQgt7MZvm7GjBnI5XIOHTrE+vXrsbOz0wxPtLe3JywsjNKlS+frORIE4d2JZEAQirCZM2dy69YtJk+ezOzZs9m1axeHDh1i1qxZxMfHA2Bubo5SqcTc3BwAAwMDkpKSUKvVpKamkpKSolWmWq3m888/13o6Yfny5dmwYQO3bt3SemDQm7Mj5tTfOH1iInt7e+7evUutWrW4desWAK9evcLGxuYtz4QgCAVJJAOCUIStXbsWX19f5HI5NjY2lC5dmoULF3LlyhWaNWuW7b4GBgbMmTMHX19fZsyYwY0bN4C0X/czZsygdOnSmJmZ0bRpUw4fPkx4eDitW7fW7J/TUESAb775Bi8vL0aOHMny5ctZvHgxz58/JzQ0lBUrVlCqVCn++OMPxo0bR40aNcQtAkEoosTQQkEoJIU9VK5nz57s2bOnwOspKGJooSAUHjGaQBDeU8U5ERAEoXCJZEAQirDXe+rnl4YNG3Lo0CEA3NzcGDlyJGvXrgXg1KlTDBw4kH79+hEUFJTp/oMGDWLo0KGMHDmSpKQk4uLiGDhwIMOGDWPbtm2Z7nP69GmaNGnCyJEjOX36NACLFy9mzJgxjBgxAkmSuHTpEjVr1iQ2Njbfj1kQhOyJZEAQdGTkyJGEhYWhVqvp06cPQUFBfPPNN4wcOZIDBw5obZueFHh6enL69Gm8vLwYN24co0eP5vfff89TvWXKlKFz584AmJqakpCQoOnx7+npycaNG5k+fXqWkw0ZGRkhk8mwtLREoVCwb98+evbsybp16zRJxptkMhmmpqYkJibi5OREcnIyN27c4JdffqFatWqcO3eOBg0aULNmzTwdiyAI+UN0IBQEHenduze7du2iYsWKtGzZEn19fZKSknBwcGDbtm107do1y32XLFmCm5sbAF5eXvTv31+z7scffyQ8PFzz+pNPPtFMd/wmLy8vJEmiQ4cOtGvXDkmSkMvllC1bVjNp0ZtWrVqFXC5nxYoVHDlyhICAAM2Dh9InMXpTkyZNaNasGS9fvmTChAksWrQIOzs7gGzrEgShcIhkQBB0pHnz5qxdu5bbt28zb948fvvtNzp37kz9+vXp0qWL1rbpQ/bi4uIASE5OZty4cZqZBF+XmpqqNSRQrVZnGUN6uYaGhqjVauRyOWq1Gn9/f61ZCzPbx97entjYWJycnAgICKBmzZpZ1pW+j5WVFUlJSdjY2BAaGgqkzXT4+jBHQRAKn0gGBEFH0n+BBwUFYWVlRcOGDfH09OT8+fMZhuA5OjqyaNEizp07R506dZg6dSpjx47FwcGBcuXKMXbsWM22s2bNylX9Dx48YMGCBUBaYiKXyxk+fDhDhw4lJSVFs27evHnMmDFDs9/EiRNJSEggIiKC9evXA2nDFf/88086deoE/NcfIP1BSfv27eP48eNERkYyZswYDAwMqF27NuPGjSMpKYkvv/zyLc+iIAj5QQwtFIRCUlSGyuVlyKGPjw8XLlxgyJAheapjypQpLFy4MM+xDRo0iF9++QVTU9Mic74EoSQQHQgFoYSxsrLKsqPfm9zd3fOcCABvlQhcunSJxMTELPsdCIJQcMRtAkEoZN7e3jqtP33SovQnEhYVBgYGTJkyRXN+dH2eBKEkEcmAIBQSW1tbjI2NtXr+C9kzNjbG1tZW12EIwntP9BkQhELk7++v6UVfUHx9fenXrx8jRozgiy++yNey161bx7p16/jjjz+ynb0wv9ja2uLi4lLg9QhCSSeSAUF4j6jVapo0aUJ4eDg3b97U9ObPL0lJSdSoUQMbGxvOnj2rGTIoCELxJt7JgvAe+e2337hw4QKenp75nggAKJVKPD09uXDhAr/99lu+ly8Igm6IlgFBeE+EhITg7u5Oly5d2LhxY4HWNXDgQA4fPoyPjw/29vYFWpcgCAVPJAOC8J4YMGAAf/75Jw8ePCjwTnevXr2iUqVKdOrUic2bNxdoXYIgFDxxm0AQ3gOnTp1i69at/Pzzz4XS+97Ozo6FCxeyZcsWTp06VeD1CYJQsETLgCAUc0lJSVSvXh17e3vOnDlTaJ361Go1TZs2JTQ0lFu3bhVIHwVBEAqHaBkQhGJu4cKFPHnyBE9Pz0Lt3S+Xy/H09OTx48dv9cRBQRCKDtEyIAjFmK+vL9WqVePrr79m/vz5Oolh2rRpLFu2jLt37xbKswcEQch/IhkQhGJKkiTatm3Lo0ePuHv3LsbGxjqJIz4+nipVqlCxYkWOHz+OTCbTSRyCILw9cZtAEIqpHTt2cOLECX755RedJQKQ9sjgX375hRMnTrBz506dxSEIwtsTLQOCUAxFRkbi7u5OkyZN2L17t67DAdKmRj537hw+Pj5YWlrqOhxBEPJAtAwIQjE0Y8YM4uPjWbZsma5D0Vi+fDlxcXHMmDFD16EIgpBHIhkQhGLm8uXLeHp6MmfOHBwdHXUdjoajoyNz5szB09OTy5cv6zocQRDyQNwmEIRiRKVSUbduXfT19bl8+TJ6enq6DkmLSqWifv36qNVqrl69ir6+mCVdEIoD0TIgCMXIihUruHPnDp6enkUuEQDQ19fH09OTW7dusXLlSl2HIwhCLomWAUEoJp4/f46HhwdDhgxhxYoVug4nW2PHjmXjxo14e3vj7Oys63AEQciBSAYEoZjo1q0bly9fxtvbGwsLC12Hk62oqCg8PDxo0KAB+/bt03U4giDkQNwmEIRi4NChQxw4cIDly5cX+UQAwMLCgmXLlrF//34OHz7M48ePSUlJ0XVYgiBkQbQMCEIRFxcXR+XKlalSpQp//vlnsXnCnyRJtG/fnvv37xMbG8vixYsZNGiQrsMSBCEToquvIBRxP/zwAyEhIZw6darYJAIA3t7eALx48QKFQoGfn5+OIxIEISsiGRCEIuz27dssXbqU2bNnU758eV2HkyfW1tYEBweTkpJCcnKyJjkQBKHoEbcJBKGIUqvVNGrUiOjoaLy8vDAwMNB1SHmmUqlYsmQJ06dPx93dnXv37uk6JEEQMiGSAUEoYqKioli4cCHOzs6MGjWKM2fO0LRpU12H9U6ePn2KQqEoUk9MFAThPyIZEIQi5sCBA3Tr1g1zc3M6d+7Mb7/9hkKh0HVYgiC8x8TQQkEoYl6+fAlASkoKe/bsYdu2bTqOSBCE953oQCgIRcylS5cASE5OZuLEifTu3bvQ6vb39yc0NLTQ6itubG1tcXFx0XUYgpDvRDIgCEVMmTJlcHZ25siRI1SvXr3Q6vX398fDw4P4+PhCq7O4MTY2xtvbWyQEwntH9BkQBAGAGzduUKdOHX7//Xc8PDx0HU6R4+3tTf/+/bl+/Tq1a9fWdTiCkK9Ey4AgCFo8PDzEl50glDAiGRBKBHEvXJu49y0IwutEMiC89/z9/fFwr0R8QqKuQykyjI0M8fZ5UCAJwenTp1m4cCGenp48e/aMP/74g8DAQIYMGULXrl2ZNm0aKpWKRYsWZVlGYmIihoaGOdb1zTffsHv3bm7cuIGpqalm+f379zXTPJ84cYLHjx8zaNAg9PX10dfXZ/ny5SiVync/WEF4T4hkQHjvhYaGEp+QyMoeFahoa6TrcHTONzSBsXsfERoami/JwP79+3n+/DmVK1fmypUrNGzYkPbt2+Pi4oKLiwtNmjQhIiKCb7/9lq5duzJy5Eh++eWXDOVERESwY8cOLl26RJ8+fWjXrl2Odc+dO5fAwMAMyytXroynpyc3b97E2toaACMjI1QqFZaWluK5DYLwBpEMCCVGRVsjqpUxzXlDIU+6devGmDFjOHv2LDt27ODs2bNa6zdt2sSWLVuYOXNmlmUMGzYMlUrFyJEjGTVqlGb5+PHjtbbr27cv9erVy3Vs69ev5+uvvwZg1apVyOVyVqxYwZEjR+jcuXOuyxGE95146JAgvIPZx59munzL1Rc8Dc/bbYl5J54x86gf804801oen5zKuH2PmHzwMftuv3rbUAuMJElERkYil8tRqVQZ1g8aNIjjx4+zfPnyLMsYM2YM9vb2/Pbbb+zdu5ekpCQgbW6D1//Ly+Cn+Ph4AgMDcXNzA0AuT/u4s7e3JzY2Ni+HKAjvPdEyIAi55PsqniWnAyhvY8jNwFi2fV4Z/8i0L61Wq27Sq6Y9d4JjWdTFjVexKSSmqHNddmBkEimpErPbuzL7+FMCo5JwtEi7p33UO5wOVaxpU8makbse0r26XYEc39tavnw5n332GaVKlWLmzJm0b99es27fvn2cOnWK+Ph4+vfvn2UZNWrUoEaNGqhUKv766y9OnjxJu3btMr2d8LolS5Zw8eJFxo8fz48//sj58+dRKpV07tyZnTt30qtXL822EydOJCEhgYiICNavX//uBy4I7xGRDAhCLv1xPYRvWpellJkBfbfe11pX2lzJyEZl+O1SMPdeZHxoT0R8CkvPBGgtG1yvFK42aX0YgmOSKfO/L39HCyXB0cmaZCA4Khl3e0sA9IpgW97rTfl16tTh9OnTmtfdu3ene/fuuS5LX1+fTp065Xr7CRMmMGHCBM3r17/8Bw8erLXt4sWLc12uIJQ0RfCjRRCKNpkMZDKZ1jIjg7S3kr6ejGRV5i0CqWpJ67/XG7xLmxkQHJ3WyhAUnUxp8/+mKy5tYUBwdDIA6mLwiDBLS0tu3bqFv79/pus3bdqkaboXBKFoEC0DgpBLfevYM+/EM8rbGGGsyFsebWWsYG6H8lmud7RUopDL+OHYU5R6MhwtlKy5EEQjVwvae1jzzZ9+/N/DCFpXsnrXwyhwNWvWZN26dZmumzx5Mj///HOG5Z6enrRu3TpPScK0adOIj4/H2NiYn376SbP89OnTzJw5kypVqtCnTx+aN2+e52MQhJJGJAOCkEu2JgpcbQwJj0+hV820+/brPq2k9f8BH5YCoKGrRZ7Ln966rNbrEQ3LaP69tFuFt4pZl7y9vZk1axYffPABV65c4dixY/j5+QFQrVo1Bg0axPXr19mwYQMvXrwgISEh12X7+/uTkpLCihUrmDx5Ms+fP8fZ2RlIa7UxNTUlMTERJyenAjk2QXjfiGRAEHLJyljB5JbiqX25tX79ehYsWICjoyNt27bVWufk5MTEiRNZuXIlN2/ezLBveHg4P/74o9ayMWPGUKFCWlIUGBio+fJ3cXEhICBA87pJkyY0a9aMly9fMmHCBDEFtCDkgugzIAgFZKdXCCceRORrmQ2X3WDq4Sf8fu0lAOf9ohi37xFj9vjy4n/9CooamUyWoY+FiYkJAAqFQjOM8E3ZDSt0dHQkICCtQ+bz58+1WgDShxBaWVllWbYgCNpEy4AgvGb/7VdcfBqNiYEe0z524ci9MO4GxxGXnMrcDq6s+DeQqAQVkYkqKjuYEBGfQlB0Mit7VGTANm8alrPgaXgiAz500JQZEJnEmgtBSBKUtTakeQULlpwOwNFCSc8adrg7GOc6PmMDPRJT1JSxSOtguPXqS1b3rMjDVwlsvxHC182LTrP40KFDmTZtGh988IHmyz+3rK2tsx1W6OLigkKhYMKECSiVSpydnVmyZAktWrTAz8+P48ePExkZyZgxY971MAShRBDJgCC85ml4Eh4OxrRxt0apn/YLU6EnIzg6mbvBcQB0qmpLJXsjJhx4zPo+lZhy6DGRCSrUahjaoDSxyanMO/GMOs5mAGy+8gJDfTmGCjk+L+OoWtoYC0N9ulaz1UoEchp+CPD3yOpIwIBt3rSsaIUEyOUynCyVmtEIRYWDgwMVK1YkNDSUgQMHArBnzx6t/48cORLgrTr5zZ8/X+t1+hDDWrVq5Wk4oyAIIhkQBC1fN3fi3os45vz9jMktnTl8L4xNfd1Zcuo5Cf97iJCZUg8DPTmmSj0gLVlIVqlRSxKpkoQqVXv8n1qS6FHDjsql/vt17GJlyI4bIdx/GUfvmvaa5alvjB18cyShXJ7W3K7Ul6NWS8hloFZLBEYlUdq8aE28Y21tneG+vyAIRZNIBgThNb9fe4lfWAJyGVgZ6WNvqmD1uUC8AmNpUM48230VenKWnwnALzyRsU0cufO/loTB9Uvz0z/+2JspMDXQo345c048iCAyQUVTt/9GHeQ0/PBRaAKrz6VNyvNROQvkchn96jgw6dBjUlIlvnljNEJxsWnTJmxtbenYsWO+lTlixAguXLjAnTt3ALh7966mJWH69OlUrVo1y6GJglASiWRAEF7Tv66D1uuFndPGvX/Z2BHQHjK47H/D/dK/wA30ZUxp9d9og9dbAn7pWVGr3EZvMfSwgq0RS7pqDzFsXN6CxuXzXta7+uOPPzh9+jRmZmbMmzeP3bt34+XlRUxMDKtWrWLu3LlEREQQERFB9erVCQsLIyAggK1bt9KxY0datGjBo0ePtCYlevbsGYsXL0aSJNzc3Gjbti2zZs3CxcWFAQMGULVq1VzHt2bNGnr27Kl5vXz5clatWoVMJmPKlCl88803WQ5NFISSSCQDgpBP0p81UBI8fvyY6tWr06VLF5TKtNsTCoWCwMBAvLy8APj000+pUqUKgwcPZt++fQwfPpyIiAhSU1MZN24cMTExTJs2jY8++giA1atXY2RkhJGREXfu3KFWrVpYWVnRt29frUQgp2GHmYmKisLS0hKAmJiYbIcmCkJJJJIBQRDybObMmdy6dYvJkycze/Zsdu3axaFDh5g1axbx8WlzM5ibm6NUKjE3T7u9YmBgQFJSEmq1mtTUVFJSUrTKVKvVfP7551SvXl2zrHz58mzYsIFbt25pOiECGWZHzGk2QwsLC6KiopDJZJiZmWUYmti1a9e3PheC8D4QyYAgZGLYzgf5/ku/8/o7jGnsSBt3axouu0ETN0uqlTahf10HzvtFscvrFalqiW/blKXUa3MTpHubbeQymHbkCZ/VdsjXRxmvXbsWX19f5HI5NjY2lC5dmoULF3LlyhWaNWuW7b4GBgbMmTMHX19fZsyYwY0bN4C0X/czZsygdOnSmJmZ0bRpUw4fPkx4eDitW7fW7J/TsEOAb775Bi8vL0aOHMny5csZN24cY8eOBWDKlCmZDk0UhJJMJANCiTP18BOmtnLG0lCf0Xt9+b5tOTZdeUFEgooWFSz5xMNas216UrDl6gsq2BphbqjPLq8QVGqJOs5m9KiR++mEHcwMaOOeVvbbPC/gbbdp626dYbt3NXz4cK3Xa9asAdK+aEF7qOCmTZsANF/gSqWS2bNna9a/3hLw5tMCW7Ro8VbxzZ07l7lz52peV61alS1btmht8+bQREEoyUQyIJQ4narYcPhuGK42hjRytUBfLiM5VY2diYJ9t19pJQNvWnMhiHLWhgDcDY7TSgaWnn5ORMJ/zdctKljSomLmv8bf5nkB+bWNrqU/Y0AQhKJDJANCidOwnDnbrr/E+2U80z52YceNENpUsqaWkylDtvtobSv/32N045PTnjGQkirxRYPSWBplfOukqrWfE5DddMNv87yA/NpGEAThTSIZEEocuVyGk4WSFzHJWBrpU9fFjK1XX3LVPwaFnvZ0HaXMDPA8H8QV/2iqlzFhdOMyzDzqh62JAmdLJUMalNZsO6ll7u475/Z5ASv+DeCrpv/dCnjbbQpSz5498/2XfsOGDZk2bRqdO3fO1bMA3NzcaN26NbVr12b48OGcOnWKTZs2oVKp+PnnnylTpkyGfQYNGoS+vj76+vosX74clUrFl19+iYGBAc2bN6dfv354enryzz//iJYMoUQQyYBQIn3T5r8vyrrOZtT936OD06V3HpzVrhwAIxv994Wysof2MwPyKjfPC3j0KgF7U4N33uZdjBw5krlz52qG9y1ZsoRVq1YRFhbGJ598otUDPz0p8PT0xN3dHQsLC80X8kcffUT//v1zXW+ZMmXo3LlzttMUv87U1JSEhATNOk9PT7Zv3879+/fZsGEDM2fOzLCPkZERKpUKS0tLFAoFu3btomfPnnTq1IlPP/2Ufv36MXLkSP7555+8nzhBKIZEMiAIhcTSUJ+/fcI1nQizU8HOiAp2Ru+8TUhMMjcCYulVM/cdHdP17t2bXbt2UbFiRVq2bIm+vj5JSUk4ODiwbdu2bIfjLVmyBDe3tAc2eXl5aSUDP/74I+Hh4ZrXn3zyCZ988kmGMnL7LAAvLy8kSaJDhw60a9cOSZKQy+WULVtWM3zwTatWrUIul7NixQqOHDlCQEAA1apVA0BPTy/nkyMI7xmRDAglhm9ogk7rT5/J8E5QbKHW27+OvVa9uT0PzZs3Z+3atdy+fZt58+bx22+/0blzZ+rXr0+XLl20tk2fNjguLu0RzMnJyYwbNw4rq4wdKFNTU7WeE6BWqzOtP7fPAkiv29DQELVajVwuR61W4+/vrzW1cWb72NvbExsbi5OTEwEBAdSsWTPLeAThfSaSAeG9Z2tri7GRIWP3PtJ1KEWGsZEhtra22W6T/us6KCgIKysrGjZsiKenJ+fPn8fAQPv2hKOjI4sWLeLcuXPUqVOHqVOnMnbsWBwcHChXrpxmjD/ArFmzchVjZs8COHHiBJaWlnz44YcAPHjwgAULFgBpyYtcLmf48OEMHTqUlJQUzbp58+YxY8YMTdkTJ04kISGBiIgI1q9fD6Q95+DPP/+kU6dOuYpPEN4nMimnR3cJwnvA39+f0NBQXYdRZNja2uLi4qK17MaNG9SpU4fr169Tu3ZtHUWWfafEefPmMXr0aCwsct8vwsfHhwsXLjBkyJB3iqWonB9BKAiiZUAoEVxcXDJ8+QnawsLCdB0CAFZWVhw6dIjOnTtnWPf6r/vccnd3x93dPc/7eXp6UqVKlTzvJwjFkWgZEIQS7uHDhyxevJiNGzeSkpLC77//joeHh67DKnK8vb3p378/H374IbNnz6ZNmzbI/vccCkEo7kQyIAgl1OXLl1m4cCH79+/H3t6eQYMGsXLlSs1EQ0JGhoaGVKxYkTt37lC9enWmTJlC7969USgUug5NEN6JSAYEoQSRJIm//vqLhQsXcubMGT744AMmT55M//79MTQ0FH0rcmBra4uzszOnT59m4cKFHDt2DBcXFyZMmMAXX3yBqamprkMUhLcikgFBKAGSk5PZsWMHP//8M3fv3qVBgwZMnTqVzp07a4bZCXl369YtFi1axPbt2zE3N2fMmDGMGTMGe3t7XYcmCHkikgFBeI/FxMSwbt06li5dSkBAAB07dmTq1Kk0atRI3O/OR8+ePWPZsmWsW7eO1NRUBg8ezMSJEzUPXhKEok4kA4LwHnrx4gUrVqxg9erVxMfH069fPyZNmiR6xxew8PBwVq9ezYoVKwgLC6NHjx5MmTKFunXr6jo0QciWSAYE4T3y8OFDFi1axObNm1EqlYwYMYJx48Zl+SQ+oWAkJCSwefNmFi1axOPHj2nRogVTpkyhbdu2okVGKJJEMiAI74HLly+zYMECDhw4gIODA+PHj2fEiBFYWlrqOrQSLTU1lf3797NgwQKuXbsmRiAIRZboOSQIxZRarebPP/+kWbNmNGjQgPv377Nu3TqePn3K1KlTRSJQBOjp6dGzZ0+uXLnCqVOncHR0pH///lSoUIHly5cTG1u481QIQlZEMiAIxUxycjKbN2+mevXqdOzYkeTkZPbv38/9+/f54osvUCqVug5ReINMJqN58+YcPXqUW7du0axZMyZNmoSLiwszZ84kJCRE1yEKJZy4TSAIxUR0dLRmZEBgYCCdOnViypQpYmRAMeXv78/SpUs1IxAGDRrExIkTqVChgq5DE0ogkQwIQhEXHBzMihUr+PXXX4mPj6d///5MmjSJypUr6zo0IR+Eh4fz66+/smLFCl69eqUZgZA+M6MgFAaRDAhCEfXgwQMWLVrEli1bUCqVjBw5knHjxuHo6Kjr0IQCkJCQwJYtW1i0aBGPHj2iefPmTJ06VYxAEAqF6DMgCEXMpUuX6N69Ox4eHhw5coQff/yR58+fs3DhQpEIvMeMjIwYMWIEPj4+7Nmzh7i4ONq1a0eNGjX4/fffSUlJ0XWIwntMJAOCUASo1WqOHDlC06ZN+eijjzKMDLCwsNB1iEIh0dPTo0ePHly+fJnTp0/j7OzM559/jpubG8uWLRMjEIQCIZIBQdCh5ORkNm3aRLVq1ejUqRMqlYoDBw6IkQECMpmMZs2a8eeff3L79m1atGjB5MmTcXFx4dtvv+Xly5e6DlF4j4g+A4KgA9HR0axdu5Zly5YRGBhI586dNSMDBCEr/v7+LFu2jLVr16JSqTQjECpWrKjr0IRiTiQDglCIgoODWb58Ob/++isJCQl8/vnnTJo0CQ8PD12HJhQjERER/PrrryxfvpxXr17RvXt3pkyZQr169XQdmlBMiWRAEArBgwcP+Pnnn9m6dasYGSDkm8TERM0IBF9fX5o1a8bUqVP55JNPxAgEIU9EnwFBKEAXL16kW7dueHh4cPToUWbPni1GBgj5xtDQkOHDh+Pt7c3evXtJSEigffv2VK9ena1bt4oRCEKuiWRAEPKZWq3m8OHDNGnShIYNG+Lj48P69evx8/NjypQpYmSAkO/09PTo3r07ly5d4syZM5QtW5YBAwbg5ubG0qVLiYmJ0XWIQhEnkgFByCdJSUls3LiRqlWr0rlzZ9RqNQcPHuTevXsMGTJEjAwQCpxMJqNp06YcOXKEO3fuaKZOdnFx4ZtvvhEjEIQsiT4DgvCOoqKiNCMDgoKCxMgAoUh5/vy5ZgRCSkoKAwcOZOLEiXzwwQe6Dk0oQkQyIAhvKSgoiOXLl+Pp6SlGBghFXkREBJ6enixfvpyQkBC6devGlClTqF+/vq5DE4oAcZtAEHLh1q1bJCcnA+Dj48PQoUNxdXXF09OTkSNH8vTpUzZs2CASAaHIsrKyYvr06Tx9+pQ1a9Zw584dGjRooJlaOf134c2bN0XHwxJIJAOCkIODBw9Ss2ZNVq1aRdeuXTUjA+bMmYO/vz8LFiygTJkyug5TEHLF0NCQYcOGaY1A6NChA9WrV2fTpk00atSIL7/8EtFoXLKI2wSCkI379+9Tt25djI2NCQsLw93dnSlTptC3b1/RIVB4L0iSxNmzZ1m4cCF//vknVlZWREREsHTpUsaPH6/r8IRCIpIBAX9/f0JDQ3UdRpFga2uLi4uL5rWVlRWRkZEYGhri6OjI8uXL6dChgw4jFISC8fLlS7p06cLDhw+JiIgA4MaNG9SqVUuzjfis+M+bnxXFnb6uAxB0y9/fn0oe7iTGJ+g6lCLB0NiIB94+mjf5sGHDCAoKwtbWlqSkJEqVKqXjCAWhYBgaGtKoUSPq1atHcnIyr169onz58pr1/v7+uFfyICExXodRFh1Ghsb4PPB+bxIC0TJQwt24cYM6depQa1U/TCs66DocnYr1fYnX6G1cv36d2rVr6zocQShS0j8relb6BXvjCroOR6dC4h+x58GY9+qzQrQMCACYVnTAsrqTrsMQBKGIszeuQBnT6roOQ8hnYjSBIAiCIJRwIhkQBEEQhBJO3CYQ3kno+Uc8XnWSagt78erMAyK9/EmJiKfi160xrWDP7Sl7sKzhhOsXTbIsIzUxBT1DRa7quz1lN3rGSqr80Pm/GC484tmWi+ibKnHqUQebj9w4+8lSLKo7YeRkRcWvPn7n4xQE4d08ibzAuYDVdK7wE48izxAQc5OElEiau4zD1rgCh3yn4WhWnQZlhmRZRoo6EYXcMFf1HfSdioGeMe3Kf69ZFhhzm9PPl2GgZ4KbZRNqO/TmV6/2lDGthqWhE82cx77zcRZXIhkQ8iT46G0SgiIx+6AUkV7+WNUth30rD4ydrCjbrwFl+zUg6k4AL/++h0VVR5w//ZAYn+AM5SS+jCZgzzVivF/g+kVjLGvl3CM36PAtLGs6E/MwRDumI7epPLMjSgdzrg/fgs1HbugZG6BOTsXQQcwQKAi6cD/0L6KSArEz/oCAmJu4mNflA+uWWBo6UbdUP+qW6kdQ7B18wk/QwrQqtRx6ExLvk6GcmOSX3AzZy8s4HxqUGYKTWc0c67776giOZjV4Fe+rtfx5zHUaOY7A2bwOex+Mo7ZDbwz0jEmVkjEzKNkdqEUyIORJ6fbVuTN9L+GX/Kiz5nPCLj3RWq9WpeK3/iyVpnySZRmX+63DpLwtZft9RIXRLQFIjojj4ZK/tbZzHdIYE1c7AJJexRB1N5Cy/RpkSAZchzbBd/k/KCyMSE1Ie4zqR3tGIZPLuT58Cw5tKmNgZfLOxy4IQu5Vtm3H4UczeBp9hU/df+Vp1GWt9amSiotBG2hVdnKWZWy5+zk2Rq7ULdWPJk5fAhCfEsEp/6Va2zUoMxgbI1cAYpNfERx3l7ql+mVIBj6wbsVun9HIZHJaukwAYHC1XchlcnZ4j8DdujXGCqt3PvbiSPQZEPJEkiRSohOQyWWoVWqtdeqUVO5M24vr8KYYOWb9hqrwVSsAnm46T8hJb6TUtHIklVr7v9cGvYZdeExyaAwPl/xN2IVHxD7+LyEwLW9H9YW9qPDVxxhYGQMgk6dd2gpLI9RJqnw5dkEQck+SJBJV0ciQoZa034Op6hQOP5pOwzLDsFQ6ZllGU+cxAFwJ3sTD8FOopVQA1JJK6z+J/z4s/KIuEpsSyin/JfhFXSA0/rFm3fkATz51/5XhNQ5y9cU2AOSytM8KI31LVOqk/Dn4Yki0DAh54rfuXxy71kZpb8aDBX9h3+q/iXm85xwh7vErnm2+gG3jipTpXDPTMmzql8emfnlSE5J5cewuEV7+WNctR7X5PbKst0yXmpTpUpN4/3D8fjuHqZs9974/SIWxrUh4Ho7/9suoYhKpOKENyZHx3Pv2AHJDfQwsjTEsJW4VCEJhuxi0nur2XTFV2PPPs4V8YNVKs+7407mEJjzhSvAW3CwbU9WuU6ZllLOoTzmL+qSkJnA/7BgBMV64mNelU4V5WdZbza4z1ew6E5H4nEtBv2Fr7MbRJz/QzGkslW3bc9xvDgb6pjiZ1iAhJZI/n3yHvlyJkb4l5sqS+1AxkQwIeVJ+eDPNvy1rOBN6/pHmdZVZXfJUlp6RAY7d8vbADmMXa03nwfT6lLamGfoc1Pqlb57KFQQhfzV0HKb5t6NZdZ5EXtC8bl/+hzyVpdAzooZ9tzztY2XorOk8mF6fm0Fj3Cwba23Xs9KKPJX7vhK3CYR3orAwIvp+EPEBERnWpSam8OLYHQxLi1/mglDSGembExx3n8jEgAzrUtSJeIcdw9ygtA4iE0C0DAjvyKKqIzUWf8r9WYeo/H1nrXV6hgpMKzhgXjlv0/t6zzlCakIyekYGeHzbUbM87lkYvstOoIpOpO6GQQCEXX5C0AEvZHpyKoxthTollbvf7sfA0hgTNzsqjm2VRS2CIBSm0qZV6VZxEcee/Mgn5b/TWqeQG2JnXAEHE48s9s7ccb+5pKgTUMiNaOv6jWZ5cmo8hx5NR0+uwNWiITXtuwNw/cV2vF7uZmiNfTyNusztkP1EJwdT26EPlW3bvftBFmOiZUDIs5iHL7k+Ygs+C/7i0mdrAIj3DwfgdPOFPP71NDdGbSU1IZmkkGhND//ciA+IQJ2SStW53ZFS1SQE/tfiYFLWhppL+2ht77f2X/SMDdAzNkBhaUyMdzBlOlan5rI+RN8JzIejFQThbYXE+7LTeyT/PF3I5rtpt+4iEp8DsPJ6S84FeLLL50tSUhOISQ5BpU7MddmRiQGoJRUd3eYgSalEJv33fr8XepSqth3oVnERPmHHAQhPeEZ8SjgmChsgrT9C54o/0eOD5TyO/De/DrnYEi0DQp75b7uEx7cdMSxtweU+a7XWGZa2xG1Uc/zWnyXqXlCGfXMaQpj4IgojR0sAjBytSAiOynZkQvT9IGqv+ZxXpx4QuPc6pT6pyrWhm/DffgWnnnXf8UgFQXgX11/8QRvXbzFXlmLzHe1+PObK0jR2GsnFwA0Ex93LsG9OQwijk19goUxrdbRQOhKdFKwZmRCdHEwpE3cA5DI91JKa84GetCs/i90+ozXl3Xi5E6+Xe2jhMj7fjrm4Ei0DwluTyWQg016mb2yQtk6hl+WQvuyGEBqWsiAhKAqAhKBIjHLob2Ba0QG5vh4KSyNUcUk833GFSpM/oeHeLwn55/7bH5wgCPlGRsbPCgN52jBgPbkClTo50/2yG0JoblCKqKS0HxxRyUGYK0u/tq40UUlpDzuTJDURic+ISwnnuN8cXsTd50H4/wFQ2+FTBlX9g4tB6/PtWIsr0TIg5JlLvwZ4z/0T0/J26P3vyz+3DKxMsh1CaOxkhVwh5973B5Eb6GHkaMVjz9PYNqqAkaMVPj8dJepuIL4r/qHiVx/j1KMOt6fsITU+mco/dCbpVQwPFx0ncP8NjJyt3/VQBUF4B3VK9eXvp3OxNSqv+fLPLWOFVbZDCC0NndCTKTj65Af0ZQZYKh05H7AGV8uGVLFtz+HHM3gY/n9UsmmDjZErfTzSbmlGJwVTyboV90KP4hd5nmR1AjXssv5MKilkkvT67zKhpEmfo7zJ3xNyPYVxckQcT9b+S0pEPLZNKlK6w/sxnWnk7QDOtlnyXs1RLgj5Jf2z4stax3I9hXF8SgQXAteRoIqgvGUTqti2L+AoC0dQ7G1We33yXn1WiJYBIc8MrExwn1qye94KgpAzY4UVH5ebouswhFwQfQYEnXi+4wov/87Yaehd/F/9udyesptnWy9qloWc8uH/GszN13oEQSg8N17uxCfsRL6Xe9B3Kn89mQWkjXo4/Gg6Rx5/S0jcw3yvqzgQLQNCrgXsu07Yhcfomyhxn9GB4MM3iboTiCouiWrze+C77AQpUQmkRCZgXrk0yRFxJARHUfuXflzuvx7bRhWI8wul3KCGmjLjn4fzxPM0kgQm5Wywa+7Ow8XHMXK0wqlXXcw9cv8QEn0TA1ITUjAsYwlASlQCYRceYVEl62efC4KQ/26F7MMv6iJKPVNal5vG3dAjBMfeJSk1lk5u8zj9fDmJqigSVFGUMvEgPiWCqORgelVaydZ7A3C1aEh4wlPqlRmgKTMiMYDzgZ4gSVgblaOCVXNOPVuMhaEjtex74fC/0QO58eashucCfsXUwI5UdTKmBnb5fTqKBZEMCLkW/zQMc4/SlPqkKnrKtEtHrtAjMTiKqDtpTxUr06UmZpVKcXP8Dj78bTC3Ju0iOTIeKVWN67AmqGKT8J57BOs65YC0yYrkhgr0DBVEewdjXtURhYUxjt1rayUCOQ1JBGj6z0SQ4HL/dTi08sB32QkqfNWK25N2F/CZEQThdeGJzyhl4oG7TVv05UoA5DJ9opNeEBx3F0ibQ8DeuBL7Hn5N38obOOA7mYSUSNRSKh85DiVZFcvxp/NwMa8DpE1WpJAboi835EWcN6VNqmKkb0kNu25aicDbzGoYFHubodX3EZUUzIXAdSXy1oZIBoRc+2BCG6LuBXL/x8NUmtqOoEO3qLflCx4sOq55sJC+qSFyA330TQ2BtGRBnawCSUJKlZBSUrULVUtpLQCvPaXQpKwN/n9cJvpeEM6ffqhZLr0xS+KbXV/TZyqUKxWo4pKIexrKwyV/E30/iIA918RzBwShkLRw+Zrg2HscfzKbVuWmcPfVIfpX2czJZ4tJTk0AQKlnhp7cAKWeGQB6MgUqKRlJUiNJqaS+MdOhJKmp6dCTUiaVNcusjFy4/mI7wXH3qe3QW7P8zVkSs5rV8EXcfULjH2NtWBaFnjFG+hYkp8bl+/koDkQyIOTas60XiXvyCplchoGVMUoHMx79cpJIL39sPnLLdl+5Qg/fZSeIe/KKiuM+Jup2WktCuSGN8Zl/FKW9OfqmSmwalOflifskR8Rj16ySZv+chiTGPgrh0S8nAbBt6Ia+iZIPNw4B4FpQlEgEBKEQXQ3+nbCEJ8hkcoz1rTA1cODs81UExNyknMVH2e6rJ1dw2n85YQlPaObyFUGxdwCoX2YIJ57Ox8zAHqWeKeUsGuATfoKElEgqWDbV7J/TkMTMZjX8yHEYB32nkCql0Nx5XP6chGJGDC0s4d5maOHbuPbFJs18AkWVGFooCFl7m6GFb2P7/WF8VnldgZWfH97HoYViNIFQKIp6IiAIQtFQ1BOB95VIBgRBEAShhBPJgJBn177YlO9lnuu4ghfH03oZe885wt1v9uE950i2+9yespt7PxwC4Mm6f7n59Q5uTdhJYkh0ptt7fbWdWxN2cnvKblKTVMT4vuRCt1VEewfn78EIggCkNfnntzU3O+H9v5kIj/vN5cjjbznul/WzRA74TmHl9ZY5lvv6cwf2PhjP/ocTOeg7FZU6iVfxvqy/3YOXcT75cxBFkEgGBC23p+wmOTwOSa3m+ogtJL6Iwnv+UW5P2U3wX3e0tk1PCp5uvkDo+UdE3Qng7rf7uTNtLwF7ruWpXkMHc0q1rZrtFMavCzp8C8uazprXYRceUXNpH1z6N8B/2+VM99EzVIAMFOZGyBVyzCo6YNMw+46PgiBk7qDvVOJTwlFLanZ6jyQ66QUnnv7EQd+p3A/9S2vb9KTgSvAWnkReICj2Dn8+nsnhR9O5GbI3T/WaG5TCw6ZttlMYv65rxYXYGmX/Pk9/7kA6hdwQZDIM9c2RyxTYGVfENYeOj8WdGE0gaCnTuSZBh25iUt4O28YVkenJUSepUNqaEbj3BqXbVcty38drzmBSNm2u8Kg7gVo9+B8uPk5yZLzmtX0Ld+xbemQoIzdTGCe9iiHqbiBl+zUg5mEIAC79P+L21D0YWBmTHJ750KBqP3VHJpfzZP2/vDxxn1Jtq+bupAiCkEFVu07ceXUYGyNXyls2Ri7TR6VOwtTAlluv9lPZNutHlp8PXIu1YVkAgmPvUtP+v5FCJ58tIUEVqXld0aoFH1i3yFBGdlMY50Vmzx3oWGEecpmci4HreRB+Ag+btnkut7gRyYCgxaahG8+2XiT6fjDuM9rzfPsVSrWtgmXtslwdtEF7Y3nanKSp8WnTj0rJqbgOa4qBZcbZyaRUSes5AZI680Esb05hXKpdxi/ssAuPSQ6N0TxDIPZxCA6tPHBo5cGrsw+JyaLZP/05BEpbM1RxSTmcCUEQsuNq0ZBrwb/zMs6b1uWmcf3lDtxt2uJsVott9wdrbSuTpb33klPTfhCkqpNpWGYoRgrLDOVKpGo9J0BCnWEbyDiFsYfyk7c6jsyeO2BrnNaSYKKwLTHPHRDJgKBFJpdj5GRF4stoDCyNsfqwHM82XyD8qh9yhfblYljagserTxF2xQ+L6k64jWnJ3W/2obQ1w9jZGtehTTTbVpqSuzdqZlMYvzrzAIW5EZa1XIC0pxyW6VKTeP9w/H47h6mbPc93XyPi2lPUySqqzukGgO/yf6g47mNN2fe+P0hqYgopUQnUWNw70/oFQcgduUyOpaET0ckvMVJY4mJelyvBW/GPvoqeTKG1rblBKc4F/Mqz6CuUMa1OU+fRHHn8LaYGdlgqnfjI8QvNtq3KTs5V/ZlNYfwo4gyG+hY4mdXUbHfi6U8Exd3loO9UOrj9yNOoS1rbZPbcgb+ezCJFnUiCKpJuFRe/87kqDkQyIGRQeWYnzb+t65bDum45rfXpwwSr/tgVALcv/2vCq72q/zvX7/FNR63XkTefU25wowzbGbtYU+WHzgA496qLc6//bkvE+L5EaWemtX2VWV3eOTZBEP7T1vVbzb9dzOviYq79cK/0YYLt3dI65jVmlGZdL/df3rn+Nq4ztF4HxNyiQZlBWstal5tG63LTst0GwMrQmXblvwfQ/L8kEcmAAECs70ud1i+p1TxecybTJxnatXAn7mlYnss0r+pI5P+edJiZeP9wIm8HYFHdGXVKqs7PgSAUByHxj3Rav5pUzgeuybRD3wfWzQlPfJrt/rnZ5k0Ric8Jir2No2l1UqVknZ+DgiCeQFjC+fv7U8nDncT4BF2HUiQYGhvxwNsHFxcXXYciCEWKv78/7pU8SEiMz3njEsDI0BifB97vzWeFSAYE/P39CQ0NzXab4OBgJk2ahJ+fHz/88ANt2rQppOjenre3N5MmTSIpKYmff/6ZWrVq5biPra3te/PmFoT8lpvPirz6+++/mT59OkuXLqVp06Y575BLCQkJ9OzZE1dXV1auXIlMJsu3suH9+6wQyYCQo3///ZeePXtibGzMgQMHqFmzpq5DyrWQkBB69+7N+fPnWblyJSNHjtR1SIIg/E9UVBTu7u589NFH7Nu3L9/LP3LkCJ06dWLnzp307i06DWdHPHRIyJIkSaxatYpWrVpRtWpVrl27VqwSAQB7e3tOnDjByJEjGTVqFCNGjCA5OVnXYQmCAHzzzTfExsayYsWKAim/Y8eOdOvWjXHjxhEVFVUgdbwvRDIgZCopKYnhw4czZswYRo8ezfHjx7G1tdV1WG9FoVCwcuVKNmzYwKZNm2jRogUvXrzQdViCUKJdvXqV1atXM3v2bJycCm7G1BUrVhAbG8u3336b88YlmLhNIGQQHBxMjx49uH79OmvWrGHQoEG6DinfXLp0ie7duyOXy9m/fz8ffvihrkMShBJHpVJRr149JEni6tWr6OsX7MC2ZcuWMWHCBC5fvize81kQyYCg5fLly3Tr1g2ZTMb+/fupV6+erkPKd0FBQfTo0QMvLy/WrFnDwIEDdR2SIJQoy5cv5+uvv+bSpUuF8hmjUqn48MMPkclkXLlypcCTj+JI3CYQNDZu3EjTpk0pV64c165dey8TAYAyZcpw+vRp+vXrx6BBgxg/fjwqlSrnHQVBeGcBAQF8++23jBo1qtA+Y/T19VmzZg03b97kl1/e/WFH7yPRMiCQkpLCxIkTWblyJcOGDWPlypUolUpdh1XgJEli9erVjB8/nqZNm7Jz585i2y9CEIqLnj17cv78eXx8fLCwsCjUukePHs2WLVvw9vYu0H4KxZFIBkq4V69e0bt3b86dO8eKFSsYOXJkvo/HLerOnDlDz549MTU15cCBA9SoUSPnnQRByLM///yTjh07sn37dvr06VPo9acPZWzUqBF79uwp9PqLMpEMlGBeXl5069aN+Ph49uzZk68P/Chunj17Rrdu3Xjw4AEbN24UY5IFIZ/FxcVRpUoVKlWqxLFjx3T2o2PHjh189tlnHDlyhA4dOugkhqJI9BkoobZv306jRo2wtbXl+vXrJToRAChbtiznzp2jS5cufPrpp0yfPp3U1FRdhyUI743Zs2fz4sULVq9erdPWx08//ZQ2bdowevRo4uJKxvTEuSGSgRImNTWVqVOn0rdvX3r06MHZs2dxdnbWdVhFgrGxMdu2bePnn39m4cKFdOrUicjISF2HJQjF3t27d1m8eDHffvstbm4ZJyMrTDKZjNWrV/PixQtmz56t01iKEnGboASJiIjgs88+48SJE/z88898/fXXJa5/QG4dP36cPn36YGdnx8GDB/Hw8NB1SIJQLKnVapo2bUpYWBg3b94sMp2T58yZw6xZs/Dy8qJq1aq6DkfnRDJQQty7d4+uXbsSFhbGrl27+Pjjj3UdUpH36NEjunbtir+/P7///judO3fWdUiCUOysX7+eYcOGcerUKZo3b67rcDSSkpKoWbMmNjY2/Pvvv8jlJbuhvGQffQlx4MABGjRogKGhIdeuXROJQC5VqFCBixcv8vHHH9OlSxdmz56NWq3WdViCUGyEhIQwZcoUBg4cWKQSAQClUsmvv/7K+fPn+e2333Qdjs6JZOA9plar+eGHH+jWrRtt27bl4sWLlC9fXtdhFStmZmbs2bOHWbNm8d1339GrVy9iYmJ0HZYgFAuTJ09GJpPx888/6zqUTDVv3pyBAwcyZcoUQkJCdB2OTonbBO+pmJgYBgwYwMGDB5k9ezYzZswQ/QPe0cGDB+nfvz9ly5bl4MGDOu8IJQhF2alTp2jZsiXr16/niy++0HU4WXr16hXu7u507NiRzZs36zocnRHJwHvo0aNHdOnShefPn7Nt2zY6deqk65DeG/fv36dr166Ehoayc+dOWrdureuQBKHISUpKokaNGtja2haL+/EbNmxg6NChnDx5khYtWug6HJ0o2n8hIc+OHTvGhx9+iEql4sqVKyIRyGeVK1fmypUr1K9fn08++YRFixYh8mlBSJP+Xli4cCGPHz/G09OzyCcCAIMHD6ZRo0aMGjWKpKQkXYejE0X/ryTkiiRJLFiwgPbt29OoUSOuXLmCu7u7rsN6L1laWnLkyBGmTJnC5MmT6d+/P/Hx8boOSxB0btiwYQwaNIi5c+cyceLEYjNkTy6Xs2bNGh4/fszChQt1HY5OiGSgGEtNTaVv3778+++/9O3bl2nTpjFjxgwOHjxY6BOAlDR6enrMnz+fnTt3cuDAARo3bsydO3fo0KEDQUFBug5PEHTi1q1b/PPPP1hbW2NlZVWsWs2qVKnCpEmTmDt3Lr6+vroOp9CJPgPF2P79++nevTsVK1YkMDCQzZs307NnT12HVeLcunWLLl26EBcXR2JiIiNGjGDRokW6DksQCp2trS1hYWHI5XLq1q3LhQsX0NPT03VYuRYfH0+VKlWoWLEix48fL1GdrkXLQDE2c+ZM5HI5YWFhdO3aVUy6oSM1atRg4MCBmJqaEhcXx6pVq8RjjIUSKTw8HD09PRYsWMC5c+eKVSIAaY8kX7VqFSdOnGDHjh26DqdQiWSgmDp37hz37t1DrVaTkJBAWFgYKSkpug6rxAoPDycmJgZJkkhMTOS7777TdUiCUOh++uknvLy8mDRpEgqFQtfhvJX27dvTo0cPvv76a548ecL//d//6TqkQiFuExRTSUlJzJkzhy5dulCzZk309fV1HVKJJ0kSjx8/Zvv27fTt21c8h0AQiqnAwEDc3d2pU6cOly5dIj4+vliMingXIhkQBEEQhNcMGzaMS5cucffuXQBCQ0OxsbHRcVQF6738Oenv709oaKiuwygSbG1tcXFx0XUY76WSfJ2J66pwiGtMN9fYZ599xvHjxzWvAwMDRTJQ3Pj7+1PJw53E+ARdh1IkGBob8cDbR3xw5zN/f3883D2ITyiZzxcwNjLG28dbXFcFSFxjurvGWrZsyf379xk1ahTbtm0jOjq60GMobO9dMhAaGkpifAKVfumJcQV7XYejU/GPQngwZg+hoaHiQzufhYaGEp8QzzdD11G2dCVdh1OongU/YO76YeK6KmDiGtPtNWZqasrWrVvZvHnze99fAN7DZCCdcQV7TKuX0XUYwnuubOlKfFC2pq7DEN5j4hrTrZKQCIAYWigIgiAIJd572zLwNiIvPCFg9Tkq/NSZpMBIQvbfJjk4Goc+tbFtVxm/uceRVGrKf98uyzLUiSnIDXMeX/v0pxOEHrlLreNfomei1FoXsPosSUFRGLpY4Ti8EQ/G70WmJ0emL8ftxw7IleLPVlx5+Zxlx7FlTPh8GS/DnvPP5d2ERgbRrvHnNKnVkTV7vidVreLL3nOzLCMpJRGlwjDHutbv+5HT1/ezduZZjA1NNcuDQ5/x7aq+VC7/IXUrt6BZnS5MWdYdBxsXjJQm2dYtFH2vX2N6evp47v4WuVyPdo36U8u9ab5eYyPnNKdi2ZqUsnGmX/uJmuVPg3zYdGg+5ibW1PZoRvO6XQv0GivJHS0z8zadL0v0t0roX/dJCozC+AM7Ym4GYF7XBeuWH2DoZImhkyUW9cuREpnAswX/YNuuMqUH1CPot0sZykmJTODVwdvE3HiOXZfqWLf8IMe6y01rTdKLjJ1SYu8EEX3VHyM3WxT2ZgBpyYUqFX1zQ2QK0ZhTnJy9cZiQ8ADKlnHHx+86VdzqU796GxxsnHGwcab6Bw2JiYtgw4HZNKnVkc7Nh7D/5NoM5cTERXDyyl7uP7lKy3o9qF+tTY51D+3+Ha8iM58nwUhpQlJyPA7WTgAYGhgjSWqszUt2P5viKLtrbMvhBfRtN4FyZTyYu34otdyb5us1Zqg0QaVKxsailNbyy3dO0L3lCKp/0JAZKz+led2uBXaN+fv7417Jg4TEktnRMjNGhsb4PMhb58sSnQzYtqvMoxmHib7yFPdfPyXq8lOt9S933uDlHi9cxmc9v7XvpP1IKjWlBtSjzMD6muWPv/tTazv7bjUwq+WUY0zxj0MxqmiH64w2PBi7G5u2HlSY1xGZXE7g+ouEn3iATVuPvB2ooDNNandi2baJ3Pa9yHcjNnL74QWt9X+d38bfF7czoOOULMv4efNYUlNVdG4+hC4thmqWr9wxVWu7j+v1wqN83RxjKmXjwi/T/iYxKZ4f1w5m3tid/DByC3K5nFU7p/P4+V3cnIvHbHNC9tfYq4gg7Kwdc7zv/bbX2JKJh5HL5czyHETDGu0wN7UGoM1Hfdh8eD7nbx0lOjYcoMCusdDQUBIS4+lRYSW2RhXzpcziLDTBl72Pxua582WJTgYkSUIVnQgyGZJKnWG9w6e1seteA5/hO7BsVD7TMkoPbsCr/bd5ueM6ycHRWH9cCblSP0N5uX22k7K0OUmBUQDIjRRIySpkRmm3HRS2JqTGJeflEAUdkySJ2Pgo5DI5qamqDOvbNepH6/q9+d5zALXcm2ZaRreWw/m/y7v569zvvIoI4qPqn2CgUGYoTyJ311j65CuGSmPNsvQvC0szOxKS4nJVjlA0ZHeN2VmV4VVEECaG5tmW8bbXWPp1Y2piSbIqSbPcytyO8f2WkKpO5bvV/bS2LahrzNaoImVMq+V7uSVFiU4GgtZfxL5rdRT2pjxb+A9Wrf5r3g89eo/I836oE5Kx61EjyzJMq5TGtEppJFUq4Sd9iTz/BOuWH1BhXqds6w5Yc56Ya895/N1Ryk5uRfRVf+QGeli3cefVgTs8+eEoBnZm6FsY8WTWX6gTU1BFJlBxcbd8O36h4O35ZzWt6vfE2tyB3w7M0Wp6/ff6Ibwe/EtScgKtG3yaZRkVnKtRwbkaqlQVV+6cwMvnDPWrtWF8v8XZ1r3r71+4//gKv+yYxpCu33D30SUU+krMjC05fnE7ScnxtKrfC4D5G0agNDAmVa3is0/G58uxC4Uju2usfZMBrN37PXpyfTo0GZhlGW9zjcXERbByx1QMFIaYGVtha1maXX//Qi33JpgaW7Lt6GISk+L4tO04oHheY8efzqZtuZkZll99sQU3y6ZYG5bLdVknns0jRZ2AQm5E67IzNMuTU+M54jcdPZkBruYfUd2ue36EnmclOhlwHNZQ82+z6o5EXniieW3bvgq27avkuiyZvh42bdxzvb3TiEY4jWikeW3X6b8mswrztROJ7DosCkVbr9ajNf+uVK4WXj5nNa+b1ulM0zqdc12Wvp4+DWvm/lro3WYMvduM0bxuXve/RLL6Bw21tp3+xZpclysULdldY7aWpZnxRcb+AVnJyzVmZmKVoezXr7dJA1ZorSvq19ireF9OByzBxrA8gbE3+bzyNiKT/AFYdbMVNe17ERx7hy5ui4hNeUWKOjHXZUcmBZIqpdDedTbHn84mKikQC6UjAN7hR6li3YFK1m3Y9XCkSAaKAn1zI+LuB5MYEImhk2WG9S933sCwnHXhBya8N0yNLXj8/C4vw57jYOOcYf2x89twtHPVQWTC+0JcY2/nesgftC77DWYGpdh6v6/WOnNlaRqVGcml4N94EX8vw77xKRGcCViqtaxeqcHYGKWd55jkYCyUac+9sVA6Ep0crEkGopKDsbdM+yEpR3dTPouu6a8xrVqaiou6YehkyZMfj2VYX3ZSK2QyGQlPw/JUrt/c4zz+9gh+c49rLU+NT+bBuD34TtpPyL6bmuUvtl/nVvf1AMR4BeA9YgdPZv2V9wMSipyKLtWZPHAl+05m/ivJ2sKBulVa5qnMNXu+Z8Ufk1mz5/sM6xKS4hg+uxkXbv13/Ww7uoTvfv1c8/pJwD26fl2e+MTYPNUrFE3iGns3MmSafjXpDORGAOjJ9FGpM++3pZZStf7jtf4VZgaliU4KBiA6OQhzg9KadRYGpYlOTlsnkbHvWmERyQAQ7xuC98idPF34D3f7bgYg8XkEANdbriTA8xw+X+4iNSGF5JAY1IkZO4JlJTEgEkmlxm1OR6RUiaTASM260KP3sO1QlYqLuhF23AeAhGfhpITHo7AxAcCslhOu37bNpyMVdOFZ0ANmrRnEbwfmMHlpWlN9cOgzAAZ/34Cdx1cye+0QkpITCI96SVJK7psfX4Y9JzU1ha/6/oxaSiUkPEBr/fa/ltLitdsD9x5fxsbCQfNapUrhz7ObqV+19bscoqBj4hp7d3Xs+3Li2TxOByxFITfOeYfXGCus6FB+rtZ/Nkb/dTq3VDoilyk49vQHZOhhoXTkQtAaguPu4mHdnvthRzn8ZBqVrHR3jsRtAuDFH9dx/bYNylLm3PlfMpBOWdocp5GNCdxwkbh7wRn2TYmIx3/pKa1lZQY3wMg1bYar5BfRKMtYpJXlaEFScDRKR8u0dcHRmLinjc+V6cmR1GoCPc9TflY7fEbvzu/DFHTkz7ObGdHzR2wtyzBlqXYHUDurMnzadiz7/s+TR89vZ9g3OjaczUcWaC3r1mI4Tg5uAIRGpg0dA3CwduJVRCD2/3t2wLV7Jylbxp3klLRe3knJCfzf5T181fdnLtxOa/na+fcKurcayZYjC/P3oIVCJa6xd2eisMXG0JX4lHBq2qV1rP200jqt/39YagAArhYNMy8kG63LTtd63bDMCM2/u1VY+ubmhU4kA6+TyZC9sUhubJD2f4Ue6uTMWwQyDEt8bRihQSlzkoLShgomB0Wh/OS/ZwQYlDYnKTgK06qlkdQSic8iSAmPw2/OceLuvyD8/x5g3apkTVDyPpPJZPBG86OhMq0FSE9PQXJK5s2PGYck/nd92VqmDR0DCAkPpHGtjpp1Nx+cIyE5jmdBPigNjDA2NCM2IYqVO6by+Pkd7j2+wiP/O4RHh+Djd53DZzbyadux+XCkgq6Ia+ztGSusaOkyWddh6IxIBoBSfevwdO7fGJW31Xz555bCyjjbYYSGTpbIFHo8+eEoMgN9lI6WBKw5j2VDV2zbV+HxjMOE/99DbNpUwsjVBo81fQBICo7GulUl4h+H4r/kJPEPQgj+/Sql+3/4TscqFL4OTQaydu/3ODtUwEiZt+ZHc1PrbId3Odg4o6+nYNXO6Sj0ldhbO2mGdw3t/h2Q9mAjC1NralZqTM1KjYG0h9FUcatHFbd6AMz/bSSdmg1+yyMUdE1cY4XPK2QnxgrrfG/aP/xkKgZyE9qW+47rL7cTGOtFdHIw7V1n52koY17JpNw+DaeYuHHjBnXq1KHWsS9zPWthSkQ8gesuoIpIwLJJ+TwNKSzKYm8H4fXJaq5fv07t2rV1Hc57Jf06Wzvz3xxnlIuODWfPP6uJjguntnvzPA0nLIoePrvJ8NlNxXVVwMQ1lrtrLP08jah2LMeHDt1+tZ+n0Rcx0DPhY5dp3As7QnDcXZJT4+jgOpd/A1eQoIoiURWJg0ll4lMiiE4OokfFlWzzHkA5i4aEJz7lQ4cBBMfdwVhhjYOxBxeC1iAhYW1YlgoWzTkdsAQLpSM17HriYJz7Ief3wo6QqIohNMGXtuW+0yz3DvsLNSqq2GT//BqAoNg7rLnzSZ7fn6JlgLRf9+WmfKzrMIT3lLmpNUO6fqvrMIT3mLjGcic86SkOxh64W7dBX542QZyeTEF0cjDBcXcBqGrbCXujShx4PIE+ldZz6PEUElSRqFHToPRQklNjOfFsHs5mdQC48mIz+nJDFHJDXsb5UNq4Kob6FlSz7aqVCOQ0/DA2+RXBcXepY9+P0ARfzTYnns0nKO4mXd0Ktl+BSAbywcudN9C3Nsamde4zwNzwnXoQPWMDyn/fjhd/XCPygh/qhBRcvm6BadXSORcgvHfSm2Mb1si/B1EdPbcVX/9bmBiZM7TbdznvILzX8vsaU6vVLP39a5JSElAaGDPx82X5Uu7baO70NS/i7vH3szm0dJ7MvbDD9HXfxKnnS0hRJwCg1DNDT26AUi9tpk89mQKVOhlJUiNJqaRKbzyiWVJTw64HpUwqa5ZZGbpwI2QHL+PuU9O+t2Z52pBDrb01/3oafZG4lDDOBCzlRfx9QhMeY2vkRuuy0wmI8eJGyA5aOE/I5zPynxKbDITsu0XURT/0TJWUm9aa0CN3ib0bTGpsEm7zOvF8+WlUUYmoohIw8ShFSkQ8ycFRVFrZi3sDtmLR0JWEp+GUGVBPU2ZiQASBnueRJDAqZ41V8wo8W3wKQ0cL7HvVwsTdIZuItL06chezGo7E+74CIOrSUz5Y2p3YW4FEXfQTyUAx8s/lXdx8cA5jpSlDu3/P6Wv7eeR/m/ikWMb3XczWP38mNj6SmPhIyjtVJTo2nFcRgXwzdB3TVvSiVqWmBIY8oUvzLzRlvgjzZ9fxlUhIlLFzpV6Vj9l0eD4O1s60afgZ5R0rZxPRfyKiX3Hq6l4qONfIMPOcUHwU5WtMLpczccByAOasG4parc5x4qSCcu3l74Ql+CFDjpG+FaYKe84FriYw1oty5g2y3VdPruBMwHLCE/1o4jiW4Lg7ANQvPZh//H/CTGGPgZ4p5czr8yDiBAmqSNws/ptvJH34YVaq2namqm1nIhKfc+XFRmyN3DgXuJqo5CASVVE0dRqXPychCyU2GUh8Fo6JRyls2rojV6adBpm+nKQX0cTdTRtCaNe5GsaV7Hn49T4qb+iL7+QDpEQmIKWqcRz6EarYZJ7OO455nbSZoYI3XUFuqEBuqE+c9wtMqpZG39IIu241tBKBHIcjvool7m4wpfrV1SQDdl2qcafnBqRUNR7rtJ+OJRRtQSF+lHeqQuOaHTBQ/K9pUk9BaEQQvv63AGjxYQ/KlXFnwcYvmT16G4u2fEVMXARqdSo9Ph5FQmIMa/f+QOX/dcY6eGo9SgMjDBSG+AXep6JLDcyMrWhVv5fWh3ROw8aCXvlhZmLFiJ6z8Nw9k8CQJzjaZz4pl1B0FeVrDOBpkA/b/1qKqbGFzhIBgLoO/bVed3ZLG+7Y2PFLQHvIYLcKywA0X+D6MgNaufw3u+jrLQE9K/6iVa6rRSPelpWhs6a/QHpchaHEJgMuX7cg9l4wT2Yfp9yUVrw6dJcqm/vzbPFJUhPSht/omSmRG+ihZ5b25pIp9JCSVUhqCSlVQlJpN/lIagmHnjUxqfzfLywjFytebL9O3P1gHHr/15kju+GIURf9SAmNxX/JKeLuvyD+cSjBW65S4+BwkoKjCfj1LG4/dsjvUyIUkAGdpvLo+R1+3f0tX3T9ltPX9jNv7E42HZpPYnJa06SxkRkKhRJjIzMA9PUUJKvSmibV6lRUbwz9Uktq2jToozUNbBm7cvx5dguPn9/hk0b9NMuzGzZmZ1UGcxMrIO0xtmLGwuKpKF9jAOXKuDP9izUs/f1rXoT5U8om91PrFhXpzxp4X5XYZCD496skPAlDJpehb2WMgYMpz1edJeZmABYflct2X7lCD//lp0l4EobLV82IvZM2BrfMkPo8nX8CA3sz9EyVWDQoR/gJH1IiE7BsWkGzf07DEe06V8OuczUSn0cQ9NsljN1ssfioHL6T9qOKSaJU35znrBeKjsNnNhIQ8hi5TI65iTU2FqXY/tcyfPyuU+ODxtnuq69nwNY/FxLw8jH920/i4f9+5XVrOZz1+2ZhbVEKY0NTanzQiAu3/iI6LoK6lf971GxOw8bsrZ0wM7Fi1c7pqFJTqOAspoAtjoryNRYaGcy2o4uRJDV6cn3srZzy56Dfwc4Hw/L9y339nc40dhyDu3WbLGcofF1utoHshxoq5MYceTKN2g6fvfMQxxKbDLw5Xr/iwq4AOI9uAoBlw/+aSist6wGg+QKXKfW1Rh+83hLgvuq/ziIAlo3evsnV0NlKM2Oh08js39BC0fXm2Or0+6eftRsPQC33Jpp104d4Amg+XBUKJV90/W8K1dd/pX07bINWubXcm/I2Xi9fKJ6K8jVma1macX0X5Xm/t3X4yVRaOU/FUN+Svb6jaVvue6682ESCKoIKli3wsP5Es216UnD1xRZsjSpgqG+OV8gu1JIKZ7M61LDrket6zQwccLduk+0Mhelysw2kDTUsY1JTM7qgjsNn1HH4DO+wvwiOu0MVm064W+fP4+pLbDLwLiqv+0zXIQglxI+jtuo6BOE9975dY1VsOnE37DA2hq64WjRCLtMnVZ2MicKO26/2aSUDb7oQtEbzYJ/guLtaycDp50tJUEVoXlewbEFFqxYZyshuhsK8bFPYQw3FREWCIAjCe6OceUOeRV/ifvhRqth05NarPVSybkNTx7Ekp2r3iZHJ0r4Ck9XxAKRKKTQo/QUtnCfySTntGRrVaM9KmNUMg9nNUJiXbV4faugXfYHQhMdA2hwHrZyncSNkR15OS45KfMvA/WHb8/2X/s1Oa3Ae0xSbth74zT2OOiEFuZEC12+ybs55/ZkCz1edJdE/nPiHIdh3r0Hpz+tl2P7lbi9e/HENpy+bYNPaneSQGB5NPYRD3zr5/rwDIX989+vn+f4rbPT8j+nbbgKNarZnzZ7vSUqOR2lgzIieszJs6//Cl+1/LUWSJFxKf0Dfdl9nWqZarWbGyt7Uq9aa7i1HcPD0Bq57n37vfkG+j3R9jQEs3jKOu48vs3HWpUzXZ3Yd/t+VPfx5dgtLJh5653jlMjkWSidikl9gpG+Ji1ldrr7cin/MVfTkCq1tzQxKcT7IE//oK5QxqU7jMqM56jcTE4UtlkpnGpQeotm2pfOkXNX/+gyFejIlFkpHHkf+i6G+OY6mNXO9TWEPNXyvkwHfqQcpN7U1+paG+Hy5m/I/tCN402VSIuKxal4R23b/DQ1JTwqCt1zBqIIt+maGvNzlhaRSY17XGfseNXNdr0Epc2zaemhNX/zkx2MkBUZqZix83ZvPFEjvt+Azaid2nTPv0OXQqxaoX5sQyd4Mm9cmQRIK1+Kt4xnabSZmxlbMXjeE0Z/O58DJdUTFhVOv6sc0eW1yl/QP7IOnN+BS6gNMjc05dn4bqWoVlcvXo81HfXJdr41laRrVbK81zeyvu78lJDxAM7NcOpdSFZk6eDUAM1f1y6w4APafXEOD6m1RS2m/fLo0/4Lr3qfzcDaEglAcrjFI66/w3a+fZ1leZtdhq3o9OXP9YK5jykmbst9o/u1sVhdnM+1O1+mdB9uVS0toGpUZqVnXo+LKd67/zRkKA2NvUq/UoDxvA4U31PC9TgbsOlXl1eE7GLnaYNm4PDJ9OeokFQa2przaf0srGXhT4NrzGJa1BiD2brBWMvBsyUlUkQma11YtKmLd4oMMZWQ3fbFmm0yeKQCQ9CIauaECfQujtzl0oZC1qNuN01f34+jgRm33ZujJ9UlWJWFtbsc/l3ZpfVC/adffv1DGLu2RpI/8b2t9UG8+/BPRcf/dp6xX9eNM54XPbprZN/3flT3UrdIy03V+gd6o1amUd6qKX9D9nA9cKDTF6RrLjeyuw+LIUN8Sn/C/cbduk2FdU6evctw/N9u8KSY5hIDYG5opl9/Fe50MWDR0Jfj3a8R5v6TctNa83HEdm7bumNVy5v7gbVrbyuRp036mxqc9Y0CdnEqZoQ1RWGbyZZwqaT8nQJ35XE/ZTV+cLrNnChi72fJyx3UcPq31Noct6EDNSk04/O9GHgfcY1j37zh67nca1WyPh2tdvvlF+1eY/H/3KROT0u5TqlQp9Px4FGb/G+//OrU6VWsMt6TO/D5ldtPMvu7/ruzhZdjzLG8RXPc+ReArP+48vkx0bDgtP+yJpZlNDkcvFIbico3lRk7X4dt4vZOdLnzoMABImyioMNWx769V79ueh/c6GZDJ5Rg6WZL8MhqFpRHmdV0I3nqF6Kv+yBR6WtsalDIn4NdzRF95hmn1MjiPbsrjb49gYGeK0skSxy8+0mxbdnKrXNWf2fTFEWceoW9hiFnNtIw6s2cKSJJE9BV/XMb/11PVf8VpXL5qrnkddsKHl7u9kBsq0DMx0BoKKRQ+uVxOKRsXQiODMTOxomqFehw6/Rt3H11Coa89LbatZWl2HF/BHd+LfFC2Jp+1G8/yPyZjbW5PKVsXurf6r8lycJdv3qwqU5lNM3vt3klMjS1wd02bUMXX/xa/7v6Wj6p/wqqd0xn96Xz+vriDGh80wsHGGYCeH6c1Q3r5nMUv6L5IBIqQ4nCNAazf9yOP/G+xeOt4xvZZwO2H53O8Dt+Fra0tRobG7H009p3KeZ8YGRpja2ubp33e62QAwPXb/zrtmdd1wbyu9pOv0jsPus1qn7Zg1H/j+d1/efemF9cZ2k1GMbcCKDMo4zOwX3+mgEwmo+ofAzXr4n1fYWBnprW9TWt30VGwiBnR80fNv6u41aeKW32t9ekdu8b0+QmAPm3/axb8dtj6d65/eI8ftF77PL1B1xbDNK8rutRgz88+Wts8DfKhVf2M13kt9yZaY9OFoqGoX2MAQ7t/x9Du32W5TWbX4btwcXHB54E3oaGh+VZmcWdra4uLS96e8vjeJwO6oLAwJOy4NzZtM94WeP3XfW4ZV7TDuKJdttskh8QQfeN5WsdCocQwM7bk/M2jNKrZPsO6/h1y7v385od7Zg6e3oBrGZF4llTveo3lZpv/u7IHO8u3n3zNxcUlz19+grb3NhmIfxSis7pLD0jL1mNvBxVuvf0+1KpXl+egpHgW/ECn9afPMvfw2c0Cq8Pjf8276XXo+phLGl2f78K4xpwdKuDsUEFcYzr03iUDtra2GBob8WDMHl2HUiQYGhvl+d6RkDNbW1uMjYyZu35Yzhu/h4yN8n5PUsgbcY2Ja6wwySRJyrwrfDHm7++fr/ePYmJi6N69O7Vq1WLhwoX5Vi7Ay5cv6dGjBx07dmTatGn5Wja83b0jIXfy8zo7evQoM2fOZOXKlTRs2DDnHfJg27ZtLFmyhM2bN1O1atWcd8gFcV0Vjvy6xiRJYtSoUQQFBbFr1y4MDQ3zIbo0KpWKzz//HLlczubNm9HXz5/fmOIaK2SSkKORI0dKZmZmUmBgYIGUv2zZMkkmk0mXL18ukPKFoi08PFyyt7eXevfuXSDlp6SkSLVq1ZJq1aolpaSkFEgdQtG2detWCZCOHTtWIOVfvnxZkslk0rJlywqkfKHgvZctA/np0qVLNGzYkOXLlzN2bMEMXVGpVNSvXx+1Ws3Vq1fzLbMWioeRI0eyfft2vL29KVOmTIHUceXKFRo0aMCSJUsYP358gdQhFE0RERFUqlSJli1bsmNH/j7P/nWjR49my5Yt+Pj44OiYcQY+oWgTyUA2VCoVdevWRV9fn8uXL6Onp5fzTm/p6tWr1K9fn8WLF/P11/n3IA6haLt48SINGzZk5cqVjBkzpkDrGjNmDJs3b8bb2xsnJ93PKS8UjhEjRrBjxw58fHwoXfrte+znJDIyEg8PDxo1asSePaLPVrGj24aJom3x4sWSXC6Xrl27Vij1jRkzRjIxMZH8/f0LpT5Bt5KTk6Vq1apJdevWlVQqVYHXFxkZKZUqVUrq3r17gdclFA3nz5+XAOmXX34plPq2b98uAdKRI0cKpT4h/4iWgSw8f/4cDw8PhgwZwooVKwqlzqioKDw8PGjQoAH79u0rlDoF3Vm0aBFTp07l6tWr1K5du1Dq3LlzJ3369OHw4cN07Pj2j5MVir6UlBTq1q2LgYEBly5dKtCWzXSSJNG2bVt8fX25d+8exsbGBV6nkE90nIwUWV27dpVKly4tRUVFFWq9O3fulADp0KFDhVqvULiePXsmGRsbS1999VWh1qtWq6W2bdtKZcuWlWJjYwu1bqFw/fzzz5JcLpeuX79eqPX6+vpKSqVSmjZtWqHWK7wbkQxk4uDBgxIg7dq1q9DrTv+wdnFxER/W77HOnTtLZcqUKfRkU5Ik6dGjR5KhoaE0derUQq9bKBxPnz6VjI2NpXHjxumk/h9//FHS19eX7ty5o5P6hbwTtwneEBsbS+XKlalatSp//vknMpms0GN48uQJVapU4auvvmLBggWFXr9QsA4ePEjXrl3Zs2cPPXr00EkMc+fO5YcffsDLyyvfnj0gFB1dunTh+vXreHt7Y2ZmlvMO+SwpKYkaNWpgZ2fHmTNnkMvlhR6DkEe6zkaKmkmTJkmGhobSkydPdBrH3LlzJX19fen27ds6jUPIXzExMZKzs7PUoUMHSa1W6yyOxMREyd3dXWrUqJGUmpqqsziE/Ld//34JkPbu3avTOE6dOiUB0vr163Uah5A7omXgNbdu3aJOnTrMnj2b6dOn6zSW5ORkatasiaWlJefOnROZ9Xti4sSJ/Prrr9y/f59y5crpNJYzZ87QvHlz1q1bx9ChQ3Uai5A/YmJiqFy5MjVq1ODw4cM6adl83cCBAzly5Ag+Pj7Y2WU/2ZqgY7rORoqK1NRUqUGDBlLlypWlpKQkXYcjSZIknTlzRgKktWvX6joUIR94eXlJenp60k8//aTrUDQGDhwoWVlZSS9fvtR1KEI+mDBhgmRkZCT5+fnpOhRJkiQpJCREsrKykgYOHKjrUIQciGTgfzw9PSVA+vfff3UdipZBgwaJD+v3gEqlkurXry9VqVJFSk5O1nU4GiEhIZK1tbU0YMAAXYcivKOimGxKkiStW7dOAqRTp07pOhQhG+I2AWmTBbm7u9O9e3c2bNig63C0hIaGUqlSJTp06MCWLVt0HY7wljw9PRk1ahRnz56lcePGug5Hy4YNGxg6dCgnT56kRYsWug5HeAupqak0bNiQuLg4vLy8UCgUug5JQ61W07RpU0JDQ7l16xZKpVLXIQmZ0XU2UhT069dPsrGxkV69eqXrUDK1YcMGCZD+7//+T9ehCG8hODhYsrCwkL744gtdh5Kp1NRUqXHjxlKlSpWkxMREXYcjvIXVq1dLgHTu3Dldh5KpO3fuSPr6+tLs2bN1HYqQhRKfDJw4cUICpI0bN+o6lCylpqZKTZo0kT744APxYV0M9e3bV7K1tZVCQ0N1HUqW7t69K+nr60s//vijrkMR8ig92Rw6dKiuQ8nW1KlTJaVSKfn6+uo6FCETJfo2QWJiItWrV6d06dKcPn1a5z1vs3Pv3j1q1qzJzJkz+e6773QdjpBLJ06coE2bNmzatImBAwfqOpxsTZ8+naVLl3L37l0qVKig63CEXOrbty8nTpzAx8cHGxsbXYeTpbi4OKpUqUKlSpU4duxYkf68LZF0nY3o0vfffy8pFArp/v37ug4lV6ZPny4plUrp4cOHug5FyIWEhASpQoUKUvPmzXX6TIHciouLk8qVKye1bt26WMQrSNLff/8tAdLmzZt1HUquHDlyRAKkHTt26DoU4Q0ltmXgwYMHVK9encmTJzNnzhxdh5Mr8fHxVK1aFTc3N/7++2+RWRdx33//PfPnz+f27du4u7vrOpxcOXr0KB06dGD79u3o6+vToUMHjIyMdB2WkInExESqVauGk5MTJ0+eLDafBz179uT8+fN4e3tjaWmp63CEdLrORnRBrVZLLVu2lMqXLy/Fx8frOpw8OXr0qARI27Zt03UoQjZ8fHwkAwMD6dtvv9V1KHnWs2dPyd7eXgKkP/74Q9fhCFmYOXOmpFAoJG9vb12HkifPnz+XTE1NpS+//FLXoQivKZGPtdu2bRsnT55k1apVxe5XT7t27ejVqxdff/01ERERug5HyIQkSYwaNQpnZ2dmzJih63Dy5OjRo1y7do3o6Gj09fV58eKFrkMSMuHj48NPP/3EtGnTik2rUzonJyfmzJnDr7/+ypUrV3QdjvA/Je42QUREBJUqVaJFixbs3LlT1+G8laCgINzd3enXrx+//vqrrsMR3rB161YGDBjA8ePHadOmja7DyZOIiAiGDRvG3r17Aejfvz9bt27VcVTC6yRJomXLljx//pw7d+4Uux80ACqVinr16iFJElevXkVfX1/XIZV4Ja5lYNq0aSQlJbF06VJdh/LWypQpw9y5c1mzZg2XLl3SdTjCa8LDw5k4cSJ9+vQpdokAgJWVFXv27OHgwYMYGRkRGRmp65CEN/z++++cPn2a1atXF8tEAEBfX581a9Zw69YtfvnlF12HI1CCWgYGDBhA48aNGTFiBCtXrmTMmDG6DumdpKamUr9+fVQqFe3bt6dGjRp8+umnug6rxLp//z4zZszAzs6OXbt24ePjQ+nSpXUd1juRJKnYdEorCRYuXIhCoWD+/Pm0atWK7du36zqkdzZmzBg2b97MggULCAwMZO7cuboOqcQqEW0zSUlJbN26lVOnTlGtWjWaN2+u65DemVwuZ9iwYYwaNYro6GieP38ukgEdOnPmDEeOHCE1NZWvv/4aU1NTXYf0zkQiULTs3r2bmJgYkpKS6Nevn67DyRc9e/Zk9+7dLF++nJSUFJEM6FCJuE0QEhICQEBAAA8ePODHH3/UcUTvLiwsjK+++goLCwuePn3Ks2fPdB1SiRYUFIQkSZibm7N06VIuXLig65CE94y/vz8PHjxAkiRGjhyJWq3WdUjv7KuvviIuLo6HDx8SFBSk63BKtBLRMvDw4UMg7df0xIkT+fbbb3Uc0buztbXlxo0bDBs2jIsXL3Lnzh1dh1SinTlzBrVajZmZGdu3b6dt27Y6jcff35/Q0FCdxqArtra2uLi46DqMfCVJEq9evQKgbdu2LFu2DLm8+P+W+7//+z+mTJnCpk2bSEpKIjY29r1oVSuOSkQyULlyZerVq8fKlSupV6+ersPJN1WqVOHcuXN89913+Pv76zqcEq1bt244ODiwefNmjI2NdRqLv78/7h4eJMTH6zQOXTEyNsbH2/u9SghkMhkff/wxvXr1YtiwYboOJ9/Y2dmxceNG2rdvz/Lly3X+3inJSkwHQkEoKW7cuEGdOnUYuu4bSlcqq+twClXwg2esHzaX69evU7t2bV2HIwjFRoloGRCEkqh0pbKUrfmBrsMQBKEYeOtkoCTfk8xMXu5TluRz9zb3c8X5en+au4sqcY2J92Ruva/vybdKBvz9/ank4U5ifEJ+x1NsGRob8cDbJ8eLxN/fH/dKHiQkltD7uYbG+DzI/f1ccb7ydr7els9ZL44t28HnyyZw7+RVnl73IS4img6TP6f0By5sHb+EcrUq0XJE9yzLSElMQmGozLGu6dX7UrllHVxqfECzwZ201kW+COOnNmMYu2MujpXLZ7ttfinpn2e5/exKJ96ThfOeLGxvlQyEhoaSGJ9AhZU9MKpom98xFTsJvqE8GruX0NDQHC+Q0NBQEhLj6VFhJbZGFQspwqIhNMGXvY/G5uo8afYR5ytP5yu3bhw+S3hACGXcy+J33Qe3+lWo3qY+Ns4ONB3YkaYDO+J/y5dbf13EpXpFGvb7hKD7fhnKiXwRxqWdJwi894SWI7rjWifn5+QrTY1ITkjG2tE+w7pjy7ZTt2vzXG2bX0ry51lePrvSifdkwbwnde2d+gwYVbTFtFqZ/IqlRLE1qkgZ02q6DqPYEOcrf9Xu1IRtE5fhe/E2IzZ+x8MLt7XWp6pU/OO5l67fDMmyjOU9p+Hg5kiTQR35ZFwfAGLDozmyYLPWdi2Gd8PBzUnz+rtz60CSWN5zGtXa1NcsP/f7X9Tp2oz7J6/luG1BEJ9neSPek++XIjdQ9ens45kuf7HlKolPw/NU1rN5J/CbeZRn805oLU+NT+bRuH08nnyQV/tuZ7F38XD86exMl199sYXwxKd5KuvEs3kc9ZvJiWfztJYnp8az79E4Dj6ezO1X+9421CJDnLO0cevxUbHI5HJSVala61QpKrZNWEbrL3ti7ZT1L/L2E/sCcHrdQe6euIw6Na2cVFWq1n+8MV5JLpcj19NDYWig9eAcv+veXN9/mnv/XOHMb4ez3bYoE59heSPej0WDTkcTxPu+ImDJaQzL2xB7M5DK2z4nyT8SgJutVmHfqyaxd4JxW9SFlFexqBNTcl12UmAkUkoqrrPb83T2cZICo1A6WgAQftQb6w5VsG5TiYcjd2HXvXpBHF6+exXvy+mAJdgYlicw9iafV95GZFLa8wVW3WxFTfteBMfeoYvbImJTXpGiTsx12ZFJgaRKKbR3nc3xp7OJSgrEQukIgHf4UapYd6CSdRt2PRxJdbus7xsXNeKcZe6f1Xuo37MV5g7WHJjzm9av7r3freHFo+ec3nAQj2Z1qNuteaZlVPyoOhU/qk5yQhJeR87hdy3tdkO/xeOzrPeFrz9/LU17pn6lxjWRy+XsnL6K9hP78fnSCQAcnLeRul2bZbptUSM+w/JGvB+LLp0mAyF/XKfsN60xKGXG/b7a06QqS5tTZmQjgn+7RPy9jHOqp0TEE7D0jNayUoPrYeRqA0BycAzKMmlvHKWjBcnB0Zo3UnJwFJbu//vFo1f0PmCycj3kD1qX/QYzg1Jsvd9Xa525sjSNyozkUvBvvIi/l2Hf+JQIzgRoz9RYr9RgbIxcAYhJDsZCmdZEaqF0JDo5WPNGikoOxt4y7V6wHL18P66CJM5Z5lqP7qX5d7lalfA566V5/en80Xkqy8BISf1erXK1bamKLgxePVVr2Zv1dZkxWPPvN7ctasRnWN6I92PRVTSuIpksw6QociODtFX6eqiTVZnuJqWqtf57vTnSoLQZScHRACQHRWNQ2vy1dWlvLADUxe+ZSzIyni+D/2/v3uOiqvPHj79mhmEuMAyXkasgKAaiJqWZafVLM7u42cW2WtvtW9vFym+7200tu25lfWvz0kWj2t3KLO3uJTOttIvWek9EQEVglDsCIjAwwMzvj1lGBxEYbjPA+/l4+ICZ8zmf8zkfOWfec875fN5KRypTlcKHBpu1xfVs9kaXf6d2mME3gsq6AgAqrfkE+J7MuGf0jaDS6lhmp3dcqm1O+qx1eqM/R/ZlcexI0WnL6mvr2LP2ZwIjB3igZb2EnMPcIsej9/HolYHQGaPJnb8R3eAQlHq1W+uqg/QMfn7qGZdrogJRqJXkPL0ehUaFJspIfspWjBPiCL5qGNnzvqL8uwMEXZbQ2d3oMaNDZ7Axdz4husGole5N26lXBzF18JkzggVqolAq1KzPeRqVQoNRE8XW/BTijBMYFnwVX2XP40D5dyQEXdbZ3ehR0mftE3P2UP7ntUcA+OTxpfz+uXudy9RaDTf/3/1s/ucqig/nETo4qt31fvpUCtaaOnz1Gm54ZqbLsrpqCy9d9Vemzb2NUVeOB2DdguXk7s7k3mWOZGJH0w7zytUP8sLeD9H6e99UtXIOc48cj97Lo8GA2uSHNi6E+rIaBvw+GYCEt29y+Rl+63kAGMfHuV3/oEdd/9MjZ453/h6/8LqONNmj/NQmQrRx1NSXkTzAcZn3poS3XX6eF34rAHHG8S1X0orLBj3q8np85MmT93XxC5sX7xWkz1qXn5nLmhfeJSw+muyd6TzwxcuU5jq+QT017nbG33IFubsPcNsbszleVEZ9bV276z52pIjG+kZmvPwXPnl8KWVHi10eSPx64UeMuW6i83XWf9IwhoU4XzfUN/DTe18x4rLuHUXQGXIOc48cj97Ls8FAkJ6YRyZ5sgm9il4dxKSYRzzdjF5F+qx1P733FTf8fSaBkSYWXjfbZVlQ5AAuv/8mvnvzc47sPXTaum0NI6zILyU4ynFrIXhgGOV5Jc5gIO37HUQmDqK+znE52Gqp4z+ffseMl//C3vWO9M8bXl3Jpfdcz9qX3u/ane5Ccg5zjxyP3qtX5SYoXrkbdbC+yy+LHZ6zBqWfL7FPXk7ekp+pyy2n5mAJA64bSdifzuvSbXnK7uKV6NXBXXqJbHXWbI6c2Mms5O+6rE5v0dX9ZbPbWHt4Lg22WtQqHVcP/r8uqberKBQKmt3CReOnBUClVlFvbfkebvNhiafe8w6MNFGe70i7W5ZXzDm/u9C5LPPnPVirLeRn5OKr06A16LEcr2LFnNc4kppF1rY0zKmHqCwuI3tnBj/8ew2X339T53fUi3TH+Sxr9mpO7DxC8nfuPQTaG3T1MVlqOcTPeUuwY8eki+eiqL7XZ+7o9mCg5Iu9VP6Sg8rPl5i5kzm2No3qfQU0VluJe34qea/+SMNxCw0VtfglhVFfXoM1v5Khr00n/dblGMfHUptTRtitJz+U645WkJ+yFex2tIOCMV4Sz9EFm9FEGRlwwyj0iWHtbt+xtWn4JUdiOeiYZzvqPscJ68B9nxAybUTXdoab9pZ8QU7lL/iq/JgcM5e0Y2spqN6HtbGaqXHP82Peq1gajlPbUEGYXxI19eVUWvOZPvQ1lqffSqxxPGW1OZwXdquzzoq6o2zNT8GOnWDtIOKNl7D56AKMmihGDbiBMH3bM8g1mTbkJVZmek86VW/uL6VCybQhLwHw2cH7sdltKBWef373ov+ZymdPvUVYfDQavc6tdf2DA1odRhgSHYZK7cPKR9/AR6MmeGAoG17/mMSLzuH6J+8EYMvyr/EPNpJwYTIJFyYDUJ5fwpCxwxkydjgA/7rnhW6bithd3n4+G/LSNDLvWtkdu94h3nxMmnTxXBu/AIAVmXd2y/73Jt0eDNTllKEfFkbwlESUGsfmFGoV1oJKqvc57k2arh6BLiGUrAe/JOGdm8mavZqGCgvYbETcOY7GKiu58zdiGB0NQOF721BqfVBq1VRnFKEfEYGPUYvp2pEuB06bQ3dKqqjeV0DoLaOdwQCAtbASpVaNj9G9k2NXK6vLIUw/jMTgKfgoHXO+qxRqKq0FFFTvA2CE6WpCdQl8mfUgNye8w+qs2VgaKrBhY1zEnVgbq9iYO59ow2gAthW+h49Si1qppag6gwj9CLQ+RkaarnU5iNoaxuONvL2/imsO8HP+G2hVAV4RCAAEDAgidMhAqo4d54IZlwM4H95r+nnJHdcAkHjROW7XP/3pu11eT/nfG11eT7jlytPWadpukz+/+ehpZTzFm89n3sjbj0mA1NJVDDFe3J3d0Ct0ezAw8IFLqE4rJPe5DUQ/Molja9JIfHcGRxZswmZxTMChMmhQ+qpQ+Tv+WBRqx1Acu82OvdGOvdmlSLvNzoDpo/BLCne+p40JonjFLqr3FxF6Y/LJso3NhpGcchmz8pcc6o9Vc3ThD9TsL8SSVYpuiIniFbtd6vCUSwY+QGF1Ghtyn2NS9COkHVvDjMR32XRkAfU2R1IVjcqASumLRuUPOA60BpsVu92G3d5Io911SJPdbmPUgOmE+yU53wvSxrCreAVF1ftJDj15snYM3XFZu3t2tIt4e3+F6s/i+vjFrD38KBV1RwnUDMTT/IMDuPbxM085LFx58/nMG3n7MZlauoqKuqP9/hYB9EAwUPTBDizZx0CpwCdIhzrUn7wlP1O1O4+AcbGtrqtUqzi6+Adqs8uIuv8iqlMdkXfE7edjfvFb1KEGVP6+BJwfS/nGTBoqLBgvHuJcv62hO6ZpIzBNG0HtkXIK/70N3RATdrudyu1mBv7t/3XJ/nfGjqIPOGbJRoESnU8Q/upQfs5bQl7VbmIDxrW6rkqp5oejiymrzeaiqPspqE4F4PyI2/nW/CIGdSi+Kn9iA84ns3wjloYKl+i4rWE8AN+aX6SgOo01h+dwZezfnZG/p3hzf1VaC/kp73XsdhtKhQ8Bvn1rDvymy/1NQwQ7y2az8cEDC6m3OIYl/mnRQ11Sb2d58/kMwPzit1SnFXB4zhpi/36l8+qFp3jzMVlQvY8NuX/nrKDJrM95mitin+6Sfe6tFHa73e3YcteuXYwePZqR62d2a2KPzLtWOofneLOq1HxSr0hh586dnHvuua2Wbeq7mSPXd2uSj5WZdzmH6niL/KpUUlKvaFc/NZH+cq+/4GSfPfHjWwxKPuuM5f7z8bdk/rwHjb+e65+6kx1fbMa89xB1VTXMeOVvfPXyMmoqqqipOMHAEYOpKqukPK+EO9+ex6u/n0vCxedQfDiPS+64BvNvB/APNjJw+GC+ee1jsNsZEBfJ8MljWfPCuwRHhzH+D1OIShrsdj+8fedz3PHWY+2ajjh3zwGevfjuDv2N9cfzmTvnriZyTLp/TPYGXj2awNsOnN7E2w4ib9cf+6s4O5+BwweTPPVC1BrHbHkqtYry/FLMvx0E4LzpE4lMjOXf9/0fs5Y/y/t/+QfV5SewNdqYfO90ak9Y+Ozptxgy1nHJdtM7q/DVaVBrfcnbn03MqKHogwyc//tLXQKB9mQ3zM/I4euFH6E3+ntlXgJ3yfnMPf3xmPSk3n+ECSE65Oo5tzJ0/Nl88vhSig4dZccXm7nhmZnEjU7EanEkiNEZ9Kg1anQGx2xxKrUPDVYrdpsdW6ONxoZm93NtNs6/cTLXPHY7t70xm4QLk5n60B/ZvfZntixf71K2reyGkYmx3JHyKHabnWPm0+f2F0J0nR67MtAdl8hSp71D1P9eSPCURHLnb8RmqUepUzPosTOPQ3V3ToHjW7Ip+Xg39kYbgx6fAkoFh+euJewP5/bYNKDdcbnsndRpXBj1vyQGT2Fj7nzqbRbUSh2XDXqsxfLtKdN83oGD5Zv4/shLzDz76y5te1t6Q3+1NO9AT/fXD/9eQ3HWURRKJX7BARjDQ/h60Udk78zgrAtHtbquyteHr15aRlHWUa56+I+YfzsAwKS7r+PzZ97BGB6M1l/PWRNG8dvXW6kuryRp0hjn+m0NS6woKGXdK8ux2+wofVQEtZJK2RO84XzWnjkFmtdTvukgR176nrO/nnnGdbpDbzgmW5p3ILV0FbuKP+R/krxnuGZ36ZJg4PCcNUTPuRSfQC0HZ31G7FOXU/juNhrKLQROjCf4imHOsk0HUeH729HFm/AJ0FL88W7sDTYMo6MZML31k9CpfMMMBE9JbDXV56k6MqdA0bLtDF1yA5YDJRR/tIuBD1xC8OXtH4vfljWH53Bp9By0PoF8dnAWl8c+xbbCd7E0lBMfOJFhwVc4yzYdUNsL38eki0frE8Du4o+x2RuINoxm1IDp7d6uwTeMxOAprab9bNKeMnD6vANDgyayq/jDDvTKmfWV/mpp3oHu6K/WNB+7f+tix0N6V/7tD4DrUMKm4X1NH+BqjZprn7jDuTx6xMkH3e765+Mu9SZe7P6QxMAIEzP+8Ve31+sKveV81tacAi3VEzRxKMUf7nKvQ9rQV47JluYdGGm6hv3H1na0a3qVLgkGQq4ezrE1+9DGhWCcEIfCR4nN2oh6gB8ln+91OXiay0/ZijY2GIDqfQUuB8+RhZtpKLc4XwdOjCdo4tDT6mgt1aezTEfnFLCDQqlEMzDQmUGsKw0PuZp9x9YQoo0jzjgBpcKHRpsVP/UA9pZ87nIgNbc1P4VgbSzgeDL21ANp85GFWBrKna/jAycyNGhi8ypaTfvpTpme0pf6yxvnHWiv5nMB9CW94XzWHl1VT1v60jEJ/XfegS45AwWMj6Xy11zK1u0n5HfDKfn0N4KnJBB1/8U0VrtOY6pQOuY8tdU43rfXNxJxxziiH5pI7FPN/miap/c8Q6rO1lJ9Njl1ToHKrdlYshwBQZtzCigV2G02R3TeQr2dFRswntzKX9lfto7hIb/jt5JPSQiewsVR92NtrHYpq/jvB4bVVgNAo72ecRF3MDH6Ia6IfcqlrA3XVJ9nStvZWtpPd8r0lL7UX03zDtixUVF31I1eEN2pN5zP2qOr6mlLXzomm+YdaEqW1J90yZUBxzdnI9bCE/gE6jCMiaFo2XZObDejVKtcyvqGG8h/cwuV28z4nR1J5KwLyX5iHWqTH5roQCL+fHLsafTD7UsA0lKqz4ofs/AJ0OKf7Ij+2junwNFXf2TgX05GhWG3jCbr4dXY6xsZNK/rU18qFUqMmoGcsBai8wkkxjCG7UXLMJ/YjkrpmhLV4BvOlvw3MVduI9LvbC6MnMW67CfwU5sI1EQzLuLk5DGToh9u1/ZbSvuZVfEjWp8AovyT210Gembegb7SX94078DSPz3Z5d/0X5g8iysfnEHyVRNaTWMMUHjQzNcLP8JutxNxVgxXPjDjtDI1x6tY+egblOYW8MhXiwDY9ul3/PT+Vzy0ekGXtr03nM/g9DkFKv+T61KmpXq6Q185Jvv7vANd9gDhoHlTnL8bxkRjGBPtsrzpYZvYZxzTj0beM8G5bOhr7b9PdMbtN0v1WbUnj/Dbxp5WThsdROyTjmlXFQoFScv/5FxmOVSCb6i/S3njhYMxXuj+2Gh3TBk0z/l7tGEM0YYxLsubHry5MvYZACZE3uNcNn3oa53efvO0n3lVexgbfpvbZSbHzGVyzNxOt6ctfaG/AnzDmRr3XKfb0pZlf3uF6564E32Qgbf//Cw3vTCL79/+kuqy44yYPJZzfneRs2xTULD5n6sIPysGfYA/W5avx9bQyOCxSVxw85RWtuQqMCKE5KsmtJnGGCB8aAy3L5kDwBu3PNFifXqjP7cvmcPSPz3pfG/sDZeyc9UPLZbvrN5wPouZO5mYuZNbLdO8nu7SF47JCL8RPDR6Z6fb0lv1rhuVzfgEainbkNHisoF/uRifAK1b9eniBxB6c+sTSViLT1C16yg+QXq36vY2Wp9AMso2tLjs4oF/QevT+iXF9pQ5WL4Jtap391OT3tpfY66byPYvNpPxwy4S/9+5KH1UNNRZCRgQzK8ff9vquhte/xh9oD/+JiPmZimM17z4HivmvOb8t2/jf1qso6U0xmey7dPvGD5pzBmX93WdPZ+1p0z5poOo9OpWy/QWPXFMppauwuDB26I9qVNXBk59EM8TmjJ/VaXm9+h2Q/842mW7HemHUsvBLm2Tu5qygOVXpXbbNvzUJsaF3+ncRmf2WfqrYxIuSubHf6/haFoW1z95Fz9/sI7kqyYQN2YYr988z6Ws4r8T+9TVOOYYaKhvYPK9N+AXZDitXsccAyfnfbed4f53a2mMT7Xt0+84dqSoxVsEPaU/nM/UJj/C7xzXqXNXk/5wTIZoBxOiHdylx6S36lAwYDKZ0Op1HLr/s65uT6+l1eswmUxtljOZTOi0ej47dH8PtMr76LT6dvVTE+kv9/qrOaVSSUhMOBUFpfgFGYgfO4LN/1rNoV/34ePr+g0xMMLEN6+u4OAvqQxKPosr//YHPnxkMQGhwZhiwrn0nuudZa+Zd3u7tt9SGuO073egN/oTN9oxRNf820E+eXwpZ19xASsffYObXpjFLys2cNaEUYREn8zat+xvr2D+7RAfz1vCjc/f1+E+aa6/n8/ae+5qIsdk545Jb9Wh3AQAZrOZ0lLPRtLexGQyERMT066y/bnv3OmnJtJf7vVXe3MTdKfWHkr86h8fMPGua9Eb/VtcDvDZ029x3RN3oFSpzlimpe10JDcByN+YHJPt15H+6g06fJsgJiamT3ZIT5C+c4/0V++jDzSwZ90Wkq+acNqyqQ//sc31pz99d5tltn36HYERAzrUvubkb8w90l99j1cnKhJCdFxBZq7Htn3JHdcAjm/q3SUsPpqw+GiXbXhyn4XozTp8m0AI4Z3MZjOJw4ZhqanxdFM8QqfXk5GeLt9chXCDBANC9EFddU932bJlLF68mGXLljFs2Jmn4e2IkpISpk+fzpQpU3j88cfbXqGd+uo9XSG6kwQDQogWmc1mkpKSuOOOO1i8eHG3bGPJkiXMmjWLLVu2MH78+G7ZhhCibRIMCCFadO2117J9+3bS09MJCOieee0bGxu54IILsFgs7Nq1C7W6b0yII0Rv06tnIBRCdI9Vq1axatUqFi9e3G2BAIBKpSIlJYX9+/ezaNGibtuOEKJ1cmVACOGiqqqKpKQkRo4cydq1a1EoFN2+zQcffNAZFAwaNKjbtyeEcCXBgBDCxcMPP8ySJUtIS0sjLi6uR7Z54sQJkpKSSE5OZvXq1T0SgAghTpLbBEIIp99++41Fixbx5JNP9lggAGAwGHj11VdZu3YtX375ZY9tVwjhIFcGhBAA2Gw2JkyYQGVlJbt378bX17dHt2+325k2bRq7d+8mPT0dg+H0BElCiO4hVwaEEAC8/fbb/Prrr7z55ps9HggAKBQKXn/9dcrLy3nqqad6fPtC9GdyZUAIQVFREYmJiVx//fX885//9GhbXnrpJR599FF27NjBOeec49G2CNFfSDAghOCWW27hm2++ITMzk5CQEI+2pb6+nnPPPRedTscvv/yCqo3MhUKIzpPbBEL0c99++y0ffvgh//jHPzweCACo1WpSUlLYvn07KSkpnm6OEP2CXBkQoh+rra3l7LPPJjIykk2bNnnVkL67776blStXkpmZSXh4uKebI0SfJlcGhOjHXnzxRXJycli6dKlXBQLgaJtGo+GBBx7wdFOE6PMkGBCin8rMzOSFF15g9uzZXZ6RsCsEBwfzyiuvsGLFCjZs2ODp5gjRp8ltAiH6IbvdzuTJk8nJyWHfvn3odDpPN6lFdrudSy+9FLPZTGpqqte2U4jeTq4MCNEPLV++nO+//54lS5Z49QesQqFg6dKlHDlyhPnz53u6OUL0WXJlQIh+pqysjMTERCZNmsSKFSs83Zx2eeqpp3jhhRfYu3cviYmJnm6OEH2OBANC9DMzZ85kxYoVZGRkEBER4enmtEttbS0jR44kKirK60Y9CNEXyG0CIfqRrVu38tZbbzF//vxeEwgAaLValixZwg8//MCyZcs83Rwh+hy5MiBEP9E0s59Wq+XXX3/tlTP7zZgxg40bN5KRkeEVEyQJ0VfIlQEh+olFixaxf/9+UlJSemUgALBgwQLq6+uZO3eup5siRJ8iVwaE6OOys7NRKpUkJSVx1113sWjRIk83qVOWLl3Kfffdx08//cTAgQMZNGiQPEMgRCdJMCBEH5aenk5SUhITJ07kwIEDpKenYzAYPN2sTrHZbIwfP54TJ05w+PBhVqxYwTXXXOPpZgnRq8ltAiH6sOzsbAA2bdrE8OHD2bRpk4db1Hl79uzBZDKRkZGBzWbj8OHDnm6SEL2eBANC9GE5OTkAqFQqdu/e3SceugsMDCQjIwMAq9VKZmamh1skRO8nwYAQfdiOHTsAuPHGG8nIyGDChAkeblHnDR48mNTUVGbPng04rhQIITpHnhkQog87ceIEaWlpjBs3ztNN6Rb79u0jKCiIqKgoTzdFiF5NggEhhBCin5PbBEIIIUQ/5+PpBgjRW5nNZkpLSz3dDI8wmUzExMS4vZ70mft9JkRPkGBAiA4wm80kDEuktsbi6aZ4hFavIzM9w60PN7PZTGLCMCy1Nd3YMu+l0+rJyEyXgEB4JQkGhOiA0tJSamssxL82Hd1Qk6eb06MsB0s5dP9nlJaWuvXBVlpaiqW2hunxr2HSDe3GFnqfUstBPjt0v9t9JkRPkWBAiE7QDTXhPzLS083oVUy6oUT6j/R0M4QQp5AHCIXwEjnPftPi+4Xvb6c2p8ytunLnbyT7iXXkzt/o8n5jjZVDf/2crEdWUfL53g631Rt8k/Nsi+9vL3yfstoct+ramDufddlPsDF3vsv71sYaPj/0V1ZlPcLeks872lQhvJ4EA0J4QM3BEg7c+wnml79n/y3LAKgzVwCw59I3yH9zCwdmfUqjpZ76kipstfXtrrsurwJ7fSNxz16FvdFGXd5x57KydekETx3OkJevoXxDRpfuU3cqqTnIJwfu5XvzyyzbfwsAFXVmAN7Ycylb8t/k0wOzqG+0UFVfQr2ttt11V9Tl0Wiv56q4Z7HZGzlel+dcll62juHBU7lmyMtklG/o2p0SwovIbQIhPKD4w50MmncZvuEG9s9Y5rJMExFA5D0TKPjXr9SkFZ62bn15DUcX/uDyXvjtY9HFOaYathacQBNpdNQVZcRaUIkmyvjfZccJTAx1rKTqPd8FdhZ/yGWD5mHwDWfZ/hkuywI0EUyIvIdfC/5FYU3aaevW1Jfzw9GFLu+NDb+dEF0cACesBRg1jls9Rk0UldYCjBrHJEbHrQWEBiYCoKR3pn0Woj16z9lAiL5IoTgt/a5S5+tY5KPCZm1ocTV7o83lH6dMHeYbYaCuoBIAa34lvhEBpyxzBAcA2HrffGMKTu8vX6UOAJXChwabtcX1bPZGl3+ndpjBN4LKugIAKq35BPhGOJcZfSOotDqW2bF15a4I4VXkyoAQHhA6YzS58zeiGxyCUq92a111kJ7Bz08943JNVCAKtZKcp9ej0KjQRBnJT9mKcUIcwVcNI3veV5R/d4CgyxI6uxs9ZnToDDbmzidENxi1Uu/Wunp1EFMHP3/G5YGaKJQKNetznkal0GDURLE1P4U44wSGBV/FV9nzOFD+HQlBl3V2N4TwWhIMCOEBapMf2rgQ6stqGPD7ZAAS3r7J5Wf4recBYBwf53b9gx51/eCKnDne+Xv8wus60mSP8lObCNHGUVNfRvKA3wNwU8LbLj/PC78VgDjj+JYracVlgx51eT0+cqbz9+viFzYvLkSfI8GAEB6gDtIT88gkTzej19Crg5gU84inmyFEnyXBgBC9SPHK3aiD9V12id9yqJS8JT+D3Y4u3kTUrIu6pF5vsbt4JXp1cJde4l+dNZsjJ3YyK/m7LqtTCE+TYECIblbyxV4qf8lB5edLzNzJHFubRvW+AhqrrcQ9P5W8V3+k4biFhopa/JLCqC+vwZpfydDXppN+63KM42OpzSkj7L+3DQDqjlaQn7IV7Ha0g4IxXhLP0QWb0UQZGXDDKPSJYe1qmy7eRPyCawHIvHNFd+y+2/aWfEFO5S/4qvyYHDOXtGNrKajeh7Wxmqlxz/Nj3qtYGo5T21BBmF8SNfXlVFrzmT70NZan30qscTxltTmcF3ars86KuqNszU/Bjp1g7SDijZew+egCjJooRg24gTB9YrvbN23IS6zMvKs7dl0Ij5FgQIhuVpdThn5YGMFTElFqHIecQq3CWlBJ9T7Hk+qmq0egSwgl68EvSXjnZrJmr6ahwgI2GxF3jqOxykru/I0YRkcDUPjeNpRaH5RaNdUZRehHROBj1GK6dqRLINDWMMQmpatSMV48pDu7od3K6nII0w8jMXgKPkoNACqFmkprAQXV+wAYYbqaUF0CX2Y9yM0J77A6azaWhgps2BgXcSfWxio25s4n2jAagG2F7+Gj1KJWaimqziBCPwKtj5GRpmtdAoG2hiEK0VdJMCBENxv4wCVUpxWS+9wGoh+ZxLE1aSS+O4MjCzZhszgmE1IZNCh9Vaj8HR9+CrVjWKHdZsfeaMfe0OhSp91mZ8D0UfglhTvf08YEUbxiF9X7iwi9Mflk2cZmQ+KajSgsXZVK3dEKr7lFcMnAByisTmND7nNMin6EtGNrmJH4LpuOLKDe5kgMpVEZUCl90aj8AUew0GCzYrfbsNsbabS7Dsm0222MGjCdcL8k53tB2hh2Fa+gqHo/yaE3Ot93DD10Wbt7dlQILyLBgBDdrOiDHViyj4FSgU+QDnWoP3lLfqZqdx4B42JbXVepVnF08Q/UZpcRdf9FVKc6riRE3H4+5he/RR1qQOXvS8D5sZRvzKShwuLyDb+tYYjV+wrI/fsGgiafRc7T64l9+oou2efO2FH0Accs2ShQovMJwl8dys95S8ir2k1swLhW11Up1fxwdDFltdlcFHU/BdWpAJwfcTvfml/EoA7FV+VPbMD5ZJZvxNJQwRDjxc712xqGCPCt+UUKqtNYc3gOV8b+3Xn1QojeTGG32yXsFcJNu3btYvTo0YxcP7NbExVl3rXSOdTQW1Sl5pN6RQo7d+7k3HPPbfd6TX02c+T6bktUtDLzLudQQ2+SX5VKSuoVbveZED1FZiAUwot5WyDg7bwxEBCiN5BgQAghhOjnJBgQoodk3rWyy+tMnfYOZf/NPnimtMVNLIdKOfTglxx64Avy3vipxTJ2u52s2as5/Nha8t/cAjgeMNx/03td3va2dMfwvXdSp5FR5sg+eKa0xadqT5nVWbN5Y8+lztcHyzeRsvfKrmu0ED1AggEhusDhOWuoL6vBbrNx4N5PsBZWYn7xWw7PWUPZ+nSXsk1BQeH72zm+NZvqfQVkP7mOw4+tpeSz39zarm+YgeApia2mLW7SNKdA/MLrqNqd10JtcGKbGX1iKIPn/47qtEJs1gZM14xEFaB1q11tWXN4DjX1ZdjsNj45cC+V1kK+Nb/ImsNzSC9b71K2KSjYXvg+2ce3UlC9j3XZT7L28GP8VvKZW9s1+IaRGDyl1bTFTdpTBhzzDph0g52vhwZNJFAz0K12CeFpMppAiC4QcvVwjq3ZhzYuBOOEOBQ+SmzWRtQD/Cj5fC/BVww747r5KVvRxgYDjqf7B0wf5Vx2ZOFmGsotzteBE+MJmjj0tDpaS1vcXGtzClgLKp31qE1+NJRb8A0ztLH37hsecjX7jq0hRBtHnHECSoUPjTYrfuoB7C35nGHBZx7VsDU/hWBtLAAF1fsYNWC6c9nmIwuxNJQ7X8cHTmRo0MTT6mgtbbE7ZYToKyQYEKILBIyPpWj5TmrSi4iZO5niFbsInpKA/zkDyfjzRy5lFUpHCl5bjSPdrr2+kYg7xuETqDu94qYUxU3OkHbYN8JA3bpT0hZf3nLw0dacAr4RAVTvLwSg/lgNPkEttKkLxAaMZ2fRcopq0pkcM5ddxStICJ7CQP9z+Cjjzy5lFQrHBUyrrQaARns94yLuQOcTeFq9Nhpd5gk4U9phR9ridYAjbfEw38s7VEaIvkKCASG6gEKpRDPQiLXwBD6BOgxjYihatp0T280o1SqXsr7hBvLf3ELlNjN+Z0cSOetCsp9Yh9rkhyY6kIg/nxxLH/1w+5IZtZS2uOLHLHwCtPgnO77NtjSnQMlnvxEwbhCaqEAADGNjKP1iL9lPrsMvKQylb/ecIpQKJUbNQE5YC9H5BBJjGMP2omWYT2xHpXRN6WzwDWdL/puYK7cR6Xc2F0bOYl32E/ipTQRqohkXcTJ4mBT9cLu231La4qyKH9H6BBDln9zuMiDzDoi+QYIBIbrIoHlTnL8bxkRjGBPtsrxpmGDsM46HyyLvmeBcNvS16XRW87TFVXvyCL9trPO134gIRu98yKVMzYFiTNeeHPOvUCgY/OLVnW5Le0wZNM/5e7RhDNGGMS7Lm4YJXhn7DAATIu9xLps+9LVOb7952uK8qj2MDb/N7TKTY+YyOWZup9sjhCdJMCBEJ1gOlnp0+3abjfy3thJwQexpywInxlObW9bq+iG/G+68LXAmFZsPoVArqUrNBzq/z6WWg51avzNsdhtb898iNuCC05bFB06krDa31fXbU8Z8YgcNNiv5VanO9zy5z0K0h8xAKEQHmM1mEoYlUltjabtwH6TV68hMzyAmJqbd65jNZhIThmGprenGlnkvnVZPRma6W30mRE+RYECIDjKbzZSWevbKgKeYTKYOfahJn0kgILyTBANCCCFEPyeTDgkhhBD9nAQDQgghRD8nwYAQQgjRz0kwIIQQQvRzEgwIIYQQ/ZwEA0IIIUQ/J8GAEEII0c9JMCCEEEL0cxIMCCGEEP2cBANCCCFEPyfBgBBCCNHPSTAghBBC9HMSDAghhBD9nAQDQgghRD8nwYAQQgjRz0kwIIQQQvRzEgwIIYQQ/ZwEA0IIIUQ/J8GAEEII0c9JMCCEEEL0cxIMCCGEEP2cBANCCCFEPyfBgBBCCNHPSTAghBBC9HMSDAghhBD93P8HoZ5cpab9tMYAAAAASUVORK5CYII=","text/plain":["<Figure size 640x480 with 1 Axes>"]},"metadata":{},"output_type":"display_data"}],"source":["from sklearn.tree import plot_tree\n","\n","plt.figure()\n","clf = DecisionTreeClassifier().fit(iris.data, iris.target)\n","plot_tree(clf, filled=True)\n","plt.title(\"Decision tree trained on all the iris features\")\n","plt.show()"]},{"cell_type":"code","execution_count":48,"metadata":{"trusted":true},"outputs":[],"source":["from sklearn.tree import export_text"]},{"cell_type":"code","execution_count":49,"metadata":{"trusted":true},"outputs":[{"name":"stdout","output_type":"stream","text":["|--- petal length (cm) <= 2.45\n","| |--- class: 0\n","|--- petal length (cm) > 2.45\n","| |--- petal width (cm) <= 1.75\n","| | |--- petal length (cm) <= 4.95\n","| | | |--- petal width (cm) <= 1.65\n","| | | | |--- class: 1\n","| | | |--- petal width (cm) > 1.65\n","| | | | |--- class: 2\n","| | |--- petal length (cm) > 4.95\n","| | | |--- petal width (cm) <= 1.55\n","| | | | |--- class: 2\n","| | | |--- petal width (cm) > 1.55\n","| | | | |--- petal length (cm) <= 5.45\n","| | | | | |--- class: 1\n","| | | | |--- petal length (cm) > 5.45\n","| | | | | |--- class: 2\n","| |--- petal width (cm) > 1.75\n","| | |--- petal length (cm) <= 4.85\n","| | | |--- sepal length (cm) <= 5.95\n","| | | | |--- class: 1\n","| | | |--- sepal length (cm) > 5.95\n","| | | | |--- class: 2\n","| | |--- petal length (cm) > 4.85\n","| | | |--- class: 2\n","\n"]}],"source":["r = export_text(clf, feature_names=iris['feature_names'])\n","print(r)"]},{"cell_type":"code","execution_count":50,"metadata":{"trusted":true},"outputs":[],"source":["from sklearn.tree import _tree\n","\n","def tree_to_code(tree, feature_names):\n"," tree_ = tree.tree_\n"," feature_name = [\n"," feature_names[i] if i != _tree.TREE_UNDEFINED else \"undefined!\"\n"," for i in tree_.feature\n"," ]\n"," feature_names = [f.replace(\" \", \"_\")[:-5] for f in feature_names]\n"," print(\"fn predict({} :u32) -> u32 {{\".format(\": u32, \".join(feature_names)))\n","\n"," def recurse(node, depth):\n"," indent = \" \" * depth\n"," if tree_.feature[node] != _tree.TREE_UNDEFINED:\n"," name = feature_name[node].replace(\" \", \"_\")[:-5]\n"," threshold = tree_.threshold[node]\n"," print(\"{}if {} <= {} {{\".format(indent, name, np.round(threshold,2)))\n"," recurse(tree_.children_left[node], depth + 1)\n"," print(\"{} }}\".format(indent))\n"," print(\"{}else if {{\".format(indent, name, np.round(threshold,2)))\n"," recurse(tree_.children_right[node], depth + 1)\n"," print(\"{} }}\".format(indent))\n"," else:\n"," print(\"{}return {}\".format(indent, np.argmax(tree_.value[node][0])))\n","\n"," recurse(0, 1)\n"," print(\"}\")"]},{"cell_type":"code","execution_count":51,"metadata":{"trusted":true},"outputs":[{"name":"stdout","output_type":"stream","text":["fn predict(sepal_length: u32, sepal_width: u32, petal_length: u32, petal_width :u32) -> u32 {\n"," if petal_length <= 2.45 {\n"," return 0\n"," }\n"," else if {\n"," if petal_width <= 1.75 {\n"," if petal_length <= 4.95 {\n"," if petal_width <= 1.65 {\n"," return 1\n"," }\n"," else if {\n"," return 2\n"," }\n"," }\n"," else if {\n"," if petal_width <= 1.55 {\n"," return 2\n"," }\n"," else if {\n"," if petal_length <= 5.45 {\n"," return 1\n"," }\n"," else if {\n"," return 2\n"," }\n"," }\n"," }\n"," }\n"," else if {\n"," if petal_length <= 4.85 {\n"," if sepal_length <= 5.95 {\n"," return 1\n"," }\n"," else if {\n"," return 2\n"," }\n"," }\n"," else if {\n"," return 2\n"," }\n"," }\n"," }\n","}\n","None\n"]}],"source":["c = tree_to_code(clf, feature_names=iris['feature_names']);\n","print(c)"]}],"metadata":{"kernelspec":{"display_name":"Python 3 (ipykernel)","language":"python","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.10.9"}},"nbformat":4,"nbformat_minor":4}
| https://github.com/only4sim/ZK-DTP |
backwards/backward.py | #
# A script for generating a backprop computational graph from forward
#
import argparse
import ast
from typing import Literal, Union
import msgpack
import numpy as np
class CircuitConfig():
def __init__(self, starting_index):
self.next_index = starting_index
self.outp_to_grad = {}
self.label_tensor_idx = None
self.weights_update = None
# Allocates an index for a gradient tensor and returns
def new_gradient_tensor(self, tensor_idx):
if tensor_idx in self.outp_to_grad:
raise Exception("Tensor already allocated")
self.outp_to_grad[tensor_idx] = self.next_index
self.next_index += 1
return self.outp_to_grad[tensor_idx]
# Allocates an index for a tensor
def new_tensor(self):
new_index = self.next_index
self.next_index += 1
return new_index
def new_label_tensor(self):
if self.label_tensor_idx is not None:
raise Exception("Label tensor already allocated")
self.label_tensor_idx = self.next_index
self.next_index += 1
return self.label_tensor_idx
# Allocates an index for a gradient tensor and returns
def gradient_tensor_idx(self, tensor_idx):
return self.outp_to_grad[tensor_idx]
# TODO: Put these in enums
NO_ACTIVATION = 0
SAME = 0
VALID = 1
CONV2D = 0
CONV2D_DEPTHWISE = 1
class Conv2D():
def __init__(self, layer):
params = layer['params']
self.padding = params[1]
self.activation_type = params[2]
self.stride_h = params[3]
self.stride_w = params[4]
def backward(self, layer, transcript, config):
inputs_idx, inputs_shape = layer['inp_idxes'][0], layer['inp_shapes'][0]
weights_idx, weights_shape = layer['inp_idxes'][1], layer['inp_shapes'][1]
bias_idx, bias_shape = layer['inp_idxes'][2], layer['inp_shapes'][2]
output_idx, output_shape = layer['out_idxes'][0], layer['out_shapes'][0]
permuted_inputs_idx = config.new_tensor()
permutation = [3, 1, 2, 0]
permuted_inputs_shape = [inputs_shape[p] for p in permutation]
inputs_permute_layer = {
'layer_type': 'Permute',
'params': permutation,
'inp_idxes': [inputs_idx],
'out_idxes': [permuted_inputs_idx],
'inp_shapes': [inputs_shape],
'out_shapes': [permuted_inputs_shape],
'mask': [],
}
transcript.append(inputs_permute_layer)
permuted_outputs_idx = config.new_tensor()
permuted_outputs_shape = [output_shape[p] for p in permutation]
inputs_permute_layer = {
'layer_type': 'Permute',
'params': permutation,
'inp_idxes': [config.gradient_tensor_idx(output_idx)],
'out_idxes': [permuted_outputs_idx],
'inp_shapes': [output_shape],
'out_shapes': [permuted_outputs_shape],
'mask': [],
}
transcript.append(inputs_permute_layer)
dw_idx, dw_shape = config.new_tensor(), weights_shape
dw_conv = {
'layer_type': 'Conv2D',
'params': [CONV2D, VALID, NO_ACTIVATION, self.stride_h, self.stride_w],
'inp_idxes': [permuted_inputs_idx, permuted_outputs_idx],
'out_idxes': [dw_idx],
'inp_shapes': [permuted_inputs_shape, permuted_outputs_shape],
'out_shapes': [dw_shape],
'mask': [],
}
transcript.append(dw_conv)
config.weights_update = dw_idx
permutation = [3, 1, 2, 0]
permutation_weights_idx = config.new_tensor()
permutation_weights_shape = [weights_shape[p] for p in permutation]
permute_weights = {
'layer_type': 'Permute',
'params': permutation,
'inp_idxes': [weights_idx],
'out_idxes': [permutation_weights_idx],
'inp_shapes': [weights_shape],
'out_shapes': [permutation_weights_shape],
'mask': [],
}
transcript.append(permute_weights)
rotated_weights_idx, rotated_weights_shape = config.new_tensor(), permutation_weights_shape
rotate_layer = {
'layer_type': 'Rotate',
'params': [1, 2],
'inp_idxes': [permutation_weights_idx],
'out_idxes': [rotated_weights_idx],
'inp_shapes': [permutation_weights_shape],
'out_shapes': [rotated_weights_shape],
'mask': [],
}
transcript.append(rotate_layer)
padded_gradients_idx, padded_gradients_shape = config.new_tensor(), output_shape
padded_gradients_shape[1] += (rotated_weights_shape[1] - 1) * 2
padded_gradients_shape[2] += (rotated_weights_shape[2] - 1) * 2
pad_layer = {
'layer_type': 'Pad',
'params': [
0, 0,
rotated_weights_shape[1] - 1, rotated_weights_shape[1] - 1,
rotated_weights_shape[2] - 1, rotated_weights_shape[2] - 1,
0, 0
],
'inp_idxes': [config.gradient_tensor_idx(output_idx)],
'out_idxes': [padded_gradients_idx],
'inp_shapes': [],
'out_shapes': [],
'mask': [],
}
transcript.append(pad_layer)
dx_idx, dx_shape = config.new_gradient_tensor(inputs_idx), inputs_shape
input_conv_layer = {
'layer_type': 'Conv2D',
'params': [CONV2D, VALID, NO_ACTIVATION, self.stride_h, self.stride_w],
'inp_idxes': [padded_gradients_idx, rotated_weights_idx],
'out_idxes': [dx_idx],
'inp_shapes': [padded_gradients_shape, rotated_weights_shape],
'out_shapes': [dx_shape],
'mask': [],
}
transcript.append(input_conv_layer)
permutation = [3, 1, 2, 0]
permuted_dw_idx = config.new_tensor()
permuted_dw_shape = [dw_shape[p] for p in permutation]
permute_dw = {
'layer_type': 'Permute',
'params': permutation,
'inp_idxes': [dw_idx],
'out_idxes': [permuted_dw_idx],
'inp_shapes': [dw_shape],
'out_shapes': [permuted_dw_shape],
'mask': [],
}
transcript.append(permute_dw)
updated_weights_idx, updated_weights_shape = config.new_tensor(), dw_shape
# Call a layer to update the outputs of the convolution
update_weights_layer = {
'layer_type': 'Update',
'params': [],
'inp_idxes': [weights_idx, permuted_dw_idx],
'out_idxes': [updated_weights_idx],
'inp_shapes': [weights_shape, permuted_dw_shape],
'out_shapes': [updated_weights_shape],
'mask': [],
}
# transcript.append(update_weights_layer)
class Softmax():
def __init__(self, layer):
return
# TODO: Make this generalizable to all neural networks
# (do not assume that softmax is the last layer, fused with CE-loss)
def backward(self, layer, transcript, config):
sub_layer = {
'layer_type': 'Sub',
'params': [],
# y_hat - y
'inp_idxes': [layer['out_idxes'][0], config.label_tensor_idx],
'out_idxes': [config.new_gradient_tensor(layer['inp_idxes'][0])],
'inp_shapes': [layer['out_shapes'][0], layer['out_shapes'][0]],
'out_shapes': [layer['out_shapes'][0]],
'mask': [],
}
transcript.append(sub_layer)
class AveragePool2D():
def __init__(self, layer):
return
def backward(self, layer, transcript, config):
# TODO: This is very model specific, must rewrite to be accurate
# We just broadcast dx across 3 axes
# 1 x 3 x 3 x 1 -> 1 x 1 x 1 x 1280
div_idx = config.new_tensor()
reshape_layer = {
'layer_type': 'Broadcast',
'params': [],
'inp_idxes': [config.gradient_tensor_idx(layer['out_idxes'][0])],
'out_idxes': [div_idx],
'inp_shapes': [layer['out_shapes'][0]],
'out_shapes': [layer['inp_shapes'][0]],
'mask': [],
}
transcript.append(reshape_layer)
out_idx = config.new_gradient_tensor(layer['inp_idxes'][0])
out_shape = layer['inp_shapes'][0]
div = {
'layer_type': 'Div',
'params': [layer['inp_shapes'][0][1] * layer['inp_shapes'][0][2]],
'inp_idxes': [div_idx],
'out_idxes': [out_idx],
'inp_shapes': [out_shape],
'out_shapes': [out_shape],
'mask': [],
}
transcript.append(div)
class Reshape():
def __init__(self, layer):
return
def backward(self, layer, transcript, config):
reshape_layer = {
'layer_type': 'Reshape',
'params': [],
'inp_idxes': [config.gradient_tensor_idx(layer['out_idxes'][0])],
'out_idxes': [config.new_gradient_tensor(layer['inp_idxes'][0])],
'inp_shapes': [layer['out_shapes'][0]],
'out_shapes': [layer['inp_shapes'][0]],
'mask': [],
}
transcript.append(reshape_layer)
def produce_graph():
# Read msgpack file
with open("examples/v2_1.0_224_truncated/model.msgpack", "rb") as data_file:
byte_data = data_file.read()
model = msgpack.unpackb(byte_data)
# TODO: I'm unsure whether the circuit output is always the last indexed tensor
softmax_output_index = int(np.max(
[[out for out in layer['out_idxes']] for layer in model['layers']] +
[[inp for inp in layer['inp_idxes']] for layer in model['layers']]
)[0])
circuit_config = CircuitConfig(softmax_output_index + 1)
circuit_config.new_label_tensor()
transcript = []
for layer in reversed(model['layers']):
fetched_layer = None
match layer['layer_type']:
case "Conv2D":
fetched_layer = Conv2D(layer)
case "AveragePool2D":
fetched_layer = AveragePool2D(layer)
case "Softmax":
fetched_layer = Softmax(layer)
case _:
fetched_layer = Reshape(layer)
print(layer['layer_type'])
fetched_layer.backward(layer, transcript, circuit_config)
print('----------------')
model['layers'] += transcript
model['inp_idxes'].append(circuit_config.label_tensor_idx)
model['out_idxes'] = [31]
packed = msgpack.packb(model, use_bin_type=True)
with open("./examples/train_graph/train.msgpack", 'wb') as f:
f.write(packed)
print(model.keys())
return model
model = produce_graph()
print(model.keys())
model['tensors'] = ""
print(model['inp_idxes'], model['out_idxes'])
| https://github.com/ddkang/zkml |
examples/mnist/data_to_npy.py | import tensorflow as tf
import numpy as np
import msgpack
from tensorflow import keras
mnist = tf.keras.datasets.mnist
(images_train, labels_train), (images_test, labels_test) = mnist.load_data()
x = images_test[0]
y = labels_test[0]
print(y)
x = x.flatten() / 255.
x = x.astype(np.float32)
print(x.dtype, x.shape)
np.save('5.npy', x)
| https://github.com/ddkang/zkml |
python/converter.py | import argparse
import ast
from typing import Literal, Union
import tensorflow as tf
import numpy as np
import tflite
import msgpack
def get_shape(interpreter: tf.lite.Interpreter, tensor_idx):
if tensor_idx == -1:
return []
tensor = interpreter.get_tensor(tensor_idx)
return list(tensor.shape)
def handle_numpy_or_literal(inp: Union[np.ndarray, Literal[0]]):
if isinstance(inp, int):
return np.array([inp])
return inp
def get_inputs(op: tflite.Operator):
idxes = handle_numpy_or_literal(op.InputsAsNumpy())
idxes = idxes.tolist()
idxes = list(filter(lambda x: x != -1, idxes))
return idxes
class Converter:
def __init__(
self, model_path, scale_factor, k, num_cols, num_randoms, use_selectors, commit,
expose_output
):
self.model_path = model_path
self.scale_factor = scale_factor
self.k = k
self.num_cols = num_cols
self.num_randoms = num_randoms
self.use_selectors = use_selectors
self.commit = commit
self.expose_output = expose_output
self.interpreter = tf.lite.Interpreter(
model_path=self.model_path,
experimental_preserve_all_tensors=True
)
self.interpreter.allocate_tensors()
with open(self.model_path, 'rb') as f:
buf = f.read()
self.model = tflite.Model.GetRootAsModel(buf, 0)
self.graph = self.model.Subgraphs(0)
def valid_activations(self):
return [
tflite.ActivationFunctionType.NONE,
tflite.ActivationFunctionType.RELU,
tflite.ActivationFunctionType.RELU6,
]
def _convert_add(self, op: tflite.Operator, generated_tensors: set):
# Get params
op_opt = op.BuiltinOptions()
if op_opt is None:
raise RuntimeError('Add options is None')
opt = tflite.AddOptions()
opt.Init(op_opt.Bytes, op_opt.Pos)
params = [opt.FusedActivationFunction()]
# Get inputs
inputs = get_inputs(op)
print(generated_tensors)
print('Add inputs: ', inputs)
if len(inputs) != 2:
raise RuntimeError('Add must have 2 inputs')
# If both tensors are generated, do nothing
print(inputs[0] in generated_tensors, inputs[1] in generated_tensors)
if (inputs[0] in generated_tensors) and (inputs[1] in generated_tensors):
return ('Add', params)
nb_generated = (inputs[0] in generated_tensors) + (inputs[1] in generated_tensors)
if nb_generated != 1:
raise RuntimeError('Add must have 1 generated tensor')
# Check if there are any negative infinities
const_tensor = self.interpreter.get_tensor(inputs[0]) if inputs[0] not in generated_tensors else self.interpreter.get_tensor(inputs[1])
if np.any(const_tensor == -np.inf):
# Ensure that the constant tensor is all -inf and 0
if not np.all(np.logical_or(np.isneginf(const_tensor), const_tensor == 0)):
raise RuntimeError('Add constant tensor must be -inf and 0 only')
mask = (const_tensor == -np.inf).astype(np.int64)
params = [len(mask.shape)] + list(mask.shape)
params += mask.flatten().tolist()
return ('MaskNegInf', params)
else:
return ('Add', params)
def to_dict(self, start_layer, end_layer):
interpreter = self.interpreter
model = self.model
graph = self.graph
if graph is None:
raise RuntimeError('Graph is None')
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
for inp_detail in input_details:
inp = np.zeros(inp_detail['shape'], dtype=inp_detail['dtype'])
interpreter.set_tensor(inp_detail['index'], inp)
# for i, inp in enumerate(inps):
# interpreter.set_tensor(input_details[i]['index'], inp)
interpreter.invoke()
# Get layers
generated_tensor_idxes = set()
for inp in input_details:
generated_tensor_idxes.add(inp['index'])
layers = []
keep_tensors = set()
adjusted_tensors = {}
for op_idx in range(graph.OperatorsLength()):
op = graph.Operators(op_idx)
if op is None:
raise RuntimeError('Operator is None')
model_opcode = model.OperatorCodes(op.OpcodeIndex())
if model_opcode is None:
raise RuntimeError('Operator code is None')
op_code = model_opcode.BuiltinCode()
# Skip generated tensors
for output in handle_numpy_or_literal(op.OutputsAsNumpy()):
generated_tensor_idxes.add(output)
if op_idx < start_layer:
continue
if op_idx > end_layer:
break
# Keep the input tensors
for input in handle_numpy_or_literal(op.InputsAsNumpy()):
keep_tensors.add(input)
# AvgPool2D
if op_code == tflite.BuiltinOperator.AVERAGE_POOL_2D:
layer_type = 'AveragePool2D'
op_opt = op.BuiltinOptions()
if op_opt is None:
raise RuntimeError('AvgPool2D options is None')
opt = tflite.Pool2DOptions()
opt.Init(op_opt.Bytes, op_opt.Pos)
params = [opt.FilterHeight(), opt.FilterWidth(), opt.StrideH(), opt.StrideW()]
elif op_code == tflite.BuiltinOperator.MAX_POOL_2D:
layer_type = 'MaxPool2D'
op_opt = op.BuiltinOptions()
if op_opt is None:
raise RuntimeError('MaxPool2D options is None')
opt = tflite.Pool2DOptions()
opt.Init(op_opt.Bytes, op_opt.Pos)
if opt.Padding() == tflite.Padding.SAME:
raise NotImplementedError('SAME padding is not supported')
if opt.FusedActivationFunction() != tflite.ActivationFunctionType.NONE:
raise NotImplementedError('Fused activation is not supported')
params = [opt.FilterHeight(), opt.FilterWidth(), opt.StrideH(), opt.StrideW()]
# FIXME: hack for Keras... not sure why this isn't being converted properly
elif op_code == tflite.BuiltinOperator.CUSTOM:
layer_type = 'Conv2D'
activation = 0
weights = self.interpreter.get_tensor(op.Inputs(1))
weights = np.transpose(weights, (3, 0, 1, 2))
weights = (weights * self.scale_factor).round().astype(np.int64)
adjusted_tensors[op.Inputs(1)] = weights
params = [0, 1, activation, 1, 1]
# Conv2D
elif op_code == tflite.BuiltinOperator.CONV_2D:
layer_type = 'Conv2D'
op_opt = op.BuiltinOptions()
if op_opt is None:
raise RuntimeError('Conv2D options is None')
opt = tflite.Conv2DOptions()
opt.Init(op_opt.Bytes, op_opt.Pos)
if opt.DilationHFactor() != 1 or opt.DilationWFactor() != 1:
raise NotImplementedError('Dilation is not supported')
if opt.FusedActivationFunction() not in self.valid_activations():
raise NotImplementedError('Unsupported activation function at layer {op_idx}')
# 0 is Conv2D
params = \
[0] + \
[opt.Padding()] + \
[opt.FusedActivationFunction()] + \
[opt.StrideH(), opt.StrideW()]
# DepthwiseConv2D
elif op_code == tflite.BuiltinOperator.DEPTHWISE_CONV_2D:
layer_type = 'Conv2D'
op_opt = op.BuiltinOptions()
if op_opt is None:
raise RuntimeError('DepthwiseConv2D options is None')
opt = tflite.DepthwiseConv2DOptions()
opt.Init(op_opt.Bytes, op_opt.Pos)
if opt.DilationHFactor() != 1 or opt.DilationWFactor() != 1:
raise NotImplementedError('Dilation is not supported')
if opt.FusedActivationFunction() not in self.valid_activations():
raise NotImplementedError('Unsupported activation function at layer {op_idx}')
# 1 is DepthwiseConv2D
params = \
[1] + \
[opt.Padding()] + \
[opt.FusedActivationFunction()] + \
[opt.StrideH(), opt.StrideW()]
# Fully connected
elif op_code == tflite.BuiltinOperator.FULLY_CONNECTED:
layer_type = 'FullyConnected'
op_opt = op.BuiltinOptions()
if op_opt is None:
raise RuntimeError('Fully connected options is None')
opt = tflite.FullyConnectedOptions()
opt.Init(op_opt.Bytes, op_opt.Pos)
if opt.FusedActivationFunction() not in self.valid_activations():
raise NotImplementedError(f'Unsupported activation function at layer {op_idx}')
params = [opt.FusedActivationFunction()]
elif op_code == tflite.BuiltinOperator.BATCH_MATMUL:
layer_type = 'BatchMatMul'
op_opt = op.BuiltinOptions()
if op_opt is None:
raise RuntimeError('BatchMatMul options is None')
opt = tflite.BatchMatMulOptions()
opt.Init(op_opt.Bytes, op_opt.Pos)
if opt.AdjX() is True: raise NotImplementedError('AdjX is not supported')
params = [int(opt.AdjX()), int(opt.AdjY())]
## Arithmetic
# Add
elif op_code == tflite.BuiltinOperator.ADD:
layer_type, params = self._convert_add(op, generated_tensor_idxes)
# Mul
elif op_code == tflite.BuiltinOperator.MUL:
layer_type = 'Mul'
params = []
# Sub
elif op_code == tflite.BuiltinOperator.SUB:
sub_val = interpreter.get_tensor(op.Inputs(1))
# TODO: this is a bit of a hack
if np.any(np.isin(sub_val, 10000)):
layer_type = 'MaskNegInf'
mask = (sub_val == 10000).astype(np.int64)
params = [len(mask.shape)] + list(mask.shape)
params += mask.flatten().tolist()
else:
layer_type = 'Sub'
params = []
# Div
elif op_code == tflite.BuiltinOperator.DIV:
# Implement division as multiplication by the inverse
layer_type = 'Mul'
div_val = interpreter.get_tensor(op.Inputs(1))
if type(div_val) != np.float32: raise NotImplementedError('Only support one divisor')
adjusted_tensors[op.Inputs(1)] = np.array([(self.scale_factor / div_val).round().astype(np.int64)])
params = []
# Pad
elif op_code == tflite.BuiltinOperator.PAD:
layer_type = 'Pad'
tensor_idx = op.Inputs(1)
tensor = interpreter.get_tensor(tensor_idx).flatten().astype(np.int64)
params = tensor.tolist()
# Softmax
elif op_code == tflite.BuiltinOperator.SOFTMAX:
layer_type = 'Softmax'
# TODO: conditionally determine whether or not to subtract the max
# It should depend on the input to the softmax
if layers[-1]['layer_type'] == 'MaskNegInf':
params = layers[-1]['params']
elif layers[-2]['layer_type'] == 'MaskNegInf':
params = layers[-2]['params']
params = [params[0] - 1] + params[2:]
else:
params = []
# Mean
elif op_code == tflite.BuiltinOperator.MEAN:
layer_type = 'Mean'
inp_shape = interpreter.get_tensor(op.Inputs(0)).shape
mean_idxes = interpreter.get_tensor(op.Inputs(1)).flatten().astype(np.int64)
if len(mean_idxes) + 2 != len(inp_shape):
raise NotImplementedError(f'Only mean over all but one axis is supported: {op_idx}')
params = mean_idxes.tolist()
elif op_code == tflite.BuiltinOperator.SQUARE:
layer_type = 'Square'
params = []
# Squared difference
elif op_code == tflite.BuiltinOperator.SQUARED_DIFFERENCE:
layer_type = 'SquaredDifference'
params = []
# Pointwise
elif op_code == tflite.BuiltinOperator.RSQRT:
layer_type = 'Rsqrt'
params = []
elif op_code == tflite.BuiltinOperator.LOGISTIC:
layer_type = 'Logistic'
params = []
elif op_code == tflite.BuiltinOperator.TANH:
layer_type = 'Tanh'
params = []
elif op_code == tflite.BuiltinOperator.POW:
layer_type = 'Pow'
power = interpreter.get_tensor(op.Inputs(1)).flatten().astype(np.float32)
if power != 3.: raise NotImplementedError(f'Only support power 3')
power = power.round().astype(np.int64)
if len(power) != 1: raise NotImplementedError(f'Only scalar power is supported: {op_idx}')
params = power.tolist()
# The following are no-ops in the sense that they don't change the tensor
# However, we need to pass along the right tensors
# The param says which input to pass along
elif op_code == tflite.BuiltinOperator.SHAPE:
layer_type = 'Noop'
params = [0]
elif op_code == tflite.BuiltinOperator.GATHER:
layer_type = 'Noop'
params = [0]
elif op_code == tflite.BuiltinOperator.REDUCE_PROD:
# TODO: not sure if this is in general a no-op
layer_type = 'Noop'
params = [0]
elif op_code == tflite.BuiltinOperator.STRIDED_SLICE:
# FIXME: this is not in general a no-op
layer_type = 'Noop'
params = [0]
elif op_code == tflite.BuiltinOperator.BROADCAST_ARGS:
layer_type = 'Noop'
params = [0]
elif op_code == tflite.BuiltinOperator.BROADCAST_TO:
layer_type = 'Noop'
params = [0]
## Shape
elif op_code == tflite.BuiltinOperator.RESHAPE:
layer_type = 'Reshape'
params = []
elif op_code == tflite.BuiltinOperator.TRANSPOSE:
layer_type = 'Transpose'
params = get_shape(interpreter, op.Inputs(0)) + interpreter.get_tensor(op.Inputs(1)).flatten().astype(np.int64).tolist()
elif op_code == tflite.BuiltinOperator.CONCATENATION:
# FIXME: This is not in general a no-op
layer_type = 'Concatenation'
op_opt = op.BuiltinOptions()
if op_opt is None:
raise RuntimeError('Concatenation options is None')
opt = tflite.ConcatenationOptions()
opt.Init(op_opt.Bytes, op_opt.Pos)
params = [opt.Axis()]
elif op_code == tflite.BuiltinOperator.PACK:
layer_type = 'Pack'
op_opt = op.BuiltinOptions()
if op_opt is None:
raise RuntimeError('Pack options is None')
opt = tflite.PackOptions()
opt.Init(op_opt.Bytes, op_opt.Pos)
params = [opt.Axis()]
if params[0] > 1: raise NotImplementedError(f'Only axis=0,1 supported at layer {op_idx}')
elif op_code == tflite.BuiltinOperator.SPLIT:
layer_type = 'Split'
op_opt = op.BuiltinOptions()
if op_opt is None:
raise RuntimeError('Split options is None')
opt = tflite.SplitOptions()
opt.Init(op_opt.Bytes, op_opt.Pos)
axis = interpreter.get_tensor(op.Inputs(0)).flatten().astype(np.int64)[0]
num_splits = opt.NumSplits()
inp = interpreter.get_tensor(op.Inputs(1))
if inp.shape[axis] % num_splits != 0:
raise NotImplementedError(f'Only equal splits supported at layer {op_idx}')
params = [int(axis), num_splits]
elif op_code == tflite.BuiltinOperator.SLICE:
layer_type = 'Slice'
begin = interpreter.get_tensor(op.Inputs(1)).flatten().astype(np.int64).tolist()
size = interpreter.get_tensor(op.Inputs(2)).flatten().astype(np.int64).tolist()
params = begin + size
elif op_code == tflite.BuiltinOperator.RESIZE_NEAREST_NEIGHBOR:
layer_type = 'ResizeNearestNeighbor'
op_opt = op.BuiltinOptions()
if op_opt is None:
raise RuntimeError('ResizeNearestNeighbor options is None')
opt = tflite.ResizeNearestNeighborOptions()
opt.Init(op_opt.Bytes, op_opt.Pos)
if opt.AlignCorners():
raise NotImplementedError(f'Align corners not supported at layer {op_idx}')
if not opt.HalfPixelCenters():
raise NotImplementedError(f'Half pixel centers not supported at layer {op_idx}')
# Can take the out shape directly from the tensor
params = [int(opt.AlignCorners()), int(opt.HalfPixelCenters())]
# Not implemented
else:
op_name = None
for attr in dir(tflite.BuiltinOperator):
if not attr.startswith('__'):
if getattr(tflite.BuiltinOperator, attr) == op_code:
op_name = attr
raise NotImplementedError('Unsupported operator at layer {}: {}, {}'.format(op_idx, op_code, op_name))
inp_idxes = get_inputs(op)
# FIXME: hack for testing
rsqrt_overflows = [99, 158, 194, 253, 289, 348]
if op_idx in rsqrt_overflows:
if op_code == tflite.BuiltinOperator.RSQRT:
mask = [0, 1]
else:
mask = []
else:
mask = []
layers.append({
'layer_type': layer_type,
'inp_idxes': inp_idxes,
'inp_shapes': [get_shape(interpreter, inp_idx) for inp_idx in inp_idxes],
'out_idxes': [op.Outputs(i) for i in range(op.OutputsLength())],
'out_shapes': [get_shape(interpreter, op.Outputs(i)) for i in range(op.OutputsLength())],
'params': params,
'mask': mask,
})
print(layers)
print()
# Get tensors
print('keep tensors:', keep_tensors)
tensors = []
for tensor_idx in range(graph.TensorsLength()):
if tensor_idx not in keep_tensors:
continue
tensor = graph.Tensors(tensor_idx)
if tensor is None:
raise NotImplementedError('Tensor is None')
if tensor_idx in generated_tensor_idxes:
print(f'skipping generated tensor: {format(tensor_idx)}, {tensor.Name()}')
continue
shape = []
for i in range(tensor.ShapeLength()):
shape.append(int(tensor.Shape(i)))
if shape == []:
shape = [1]
tensor_data = interpreter.get_tensor(tensor_idx)
if tensor.Type() == tflite.TensorType.FLOAT32:
tensor_data = (tensor_data * self.scale_factor).round().astype(np.int64)
elif tensor.Type() == tflite.TensorType.INT32:
tensor_data = tensor_data.astype(np.int64)
elif tensor.Type() == tflite.TensorType.INT64:
continue
else:
raise NotImplementedError('Unsupported tensor type: {}'.format(tensor.Type()))
if tensor_idx in adjusted_tensors:
tensor_data = adjusted_tensors[tensor_idx]
shape = tensor_data.shape
tensors.append({
'idx': tensor_idx,
'shape': shape,
'data': tensor_data.flatten().tolist(),
})
# print(tensor_idx, tensor.Type(), tensor.Name(), tensors[-1]['shape'])
# print(np.abs(tensor_data).max())
commit_before = []
commit_after = []
if self.commit:
input_tensors = [inp['index'] for inp in input_details]
weight_tensors = [tensor['idx'] for tensor in tensors if tensor['idx'] not in input_tensors]
commit_before = [weight_tensors, input_tensors]
output_tensors = [out['index'] for out in output_details]
commit_after = [output_tensors]
out_idxes = layers[-1]['out_idxes'] if self.expose_output else []
d = {
'global_sf': self.scale_factor,
'k': self.k,
'num_cols': self.num_cols,
'num_random': self.num_randoms,
'inp_idxes': [inp['index'] for inp in input_details],
# 'out_idxes': [out['index'] for out in output_details],
'out_idxes': out_idxes,
'layers': layers,
'tensors': tensors,
'use_selectors': self.use_selectors,
'commit_before': commit_before,
'commit_after': commit_after,
}
print()
print(d['layers'][-1])
# d['out_idxes'] = [14]
print(d.keys())
print(d['out_idxes'])
return d
def to_msgpack(self, start_layer, end_layer, use_selectors=True):
d = self.to_dict(start_layer, end_layer)
model_packed = msgpack.packb(d, use_bin_type=True)
d['tensors'] = []
config_packed = msgpack.packb(d, use_bin_type=True)
return model_packed, config_packed
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--model', type=str, required=True)
parser.add_argument('--model_output', type=str, required=True)
parser.add_argument('--config_output', type=str, required=True)
parser.add_argument('--scale_factor', type=int, default=2**16)
parser.add_argument('--k', type=int, default=19)
parser.add_argument('--eta', type=float, default=0.001)
parser.add_argument('--num_cols', type=int, default=6)
parser.add_argument('--use_selectors', action=argparse.BooleanOptionalAction, required=False, default=True)
parser.add_argument('--commit', action=argparse.BooleanOptionalAction, required=False, default=False)
parser.add_argument('--expose_output', action=argparse.BooleanOptionalAction, required=False, default=True)
parser.add_argument('--start_layer', type=int, default=0)
parser.add_argument('--end_layer', type=int, default=10000)
parser.add_argument('--num_randoms', type=int, default=20001)
args = parser.parse_args()
converter = Converter(
args.model,
args.scale_factor,
args.k,
args.num_cols,
args.num_randoms,
args.use_selectors,
args.commit,
args.expose_output,
)
model_packed, config_packed = converter.to_msgpack(
start_layer=args.start_layer,
end_layer=args.end_layer,
)
if model_packed is None:
raise Exception('Failed to convert model')
with open(args.model_output, 'wb') as f:
f.write(model_packed)
with open(args.config_output, 'wb') as f:
f.write(config_packed)
if __name__ == '__main__':
main()
| https://github.com/ddkang/zkml |
python/input_converter.py | import argparse
import ast
import numpy as np
import msgpack
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--model_config', type=str, required=True)
parser.add_argument('--inputs', type=str, required=True)
parser.add_argument('--output', type=str, required=True)
args = parser.parse_args()
inputs = args.inputs.split(',')
with open(args.model_config, 'rb') as f:
model_config = msgpack.unpackb(f.read())
input_idxes = model_config['inp_idxes']
scale_factor = model_config['global_sf']
# Get the input shapes from the layers
input_shapes = [[0] for _ in input_idxes]
for layer in model_config['layers']:
for layer_inp_idx, layer_shape in zip(layer['inp_idxes'], layer['inp_shapes']):
for index, inp_idx in enumerate(input_idxes):
if layer_inp_idx == inp_idx:
input_shapes[index] = layer_shape
tensors = []
for inp, shape, idx in zip(inputs, input_shapes, input_idxes):
tensor = np.load(inp).reshape(shape)
tensor = (tensor * scale_factor).round().astype(np.int64)
tensors.append({
'idx': idx,
'shape': shape,
'data': tensor.flatten().tolist(),
})
packed = msgpack.packb(tensors, use_bin_type=True)
with open(args.output, 'wb') as f:
f.write(packed)
if __name__ == '__main__':
main() | https://github.com/ddkang/zkml |
python/training_converter.py | # A converter for training data
# Performs the conversion npy -> msgpack
# TODO: Ensure that training works with models that take in multiple input shapes
#
# Shortcut:
# `python3 python/training_converter.py --input_shapes 7,7,320 --input_idxes 1,0 --output training_data/inputs.msgpack --labels_output training_data/labels.msgpack`
#
import argparse
import ast
import numpy as np
import msgpack
import os
NUM_LOADS = 1
SF = 1 << 17
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--input_shapes', type=str, required=True)
parser.add_argument('--output', type=str, required=True)
TRAINING_DIRECTORY = './testing/data/pre_last_conv/flowers/train'
args = parser.parse_args()
input_shapes = ast.literal_eval(args.input_shapes)
loaded = 0
tensors = []
num_classes = os.listdir(TRAINING_DIRECTORY)
first_file = "0.npy"
for file_name in os.listdir(TRAINING_DIRECTORY):
if loaded == NUM_LOADS:
break
label = int(first_file[:-4])
data_array = np.load(TRAINING_DIRECTORY + '/' + first_file)
input_shape = input_shapes
for idx in range(data_array.shape[0]):
print(SF)
print((np.vstack(data_array) * SF).round().astype(np.int64))
tensors.append({
'idx': 0,
'shape': input_shape,
'data': list(map(lambda x: int(x), list((data_array[idx] * SF).round().astype(np.int64).flatten()))),
})
# represent the label as a one hot encoding
one_hot = np.zeros(102)
one_hot[label] = SF
print("IMPORTANT LABEL", label)
print("IMPORTANT LABEL", data_array[idx].flatten()[:500])
# print(one_hot.shape())
tensors.append({
'idx': 11,
'shape': (1, 102),
'data': list(map(lambda x: int(x), one_hot)),
})
loaded += 1
if loaded == NUM_LOADS:
break
packed_inputs = msgpack.packb(tensors, use_bin_type=True)
# print(tensors)
with open(args.output, 'wb') as f:
f.write(packed_inputs)
if __name__ == '__main__':
main()
| https://github.com/ddkang/zkml |
src/bin/test_circuit.rs | use halo2_proofs::{dev::MockProver, halo2curves::bn256::Fr};
use zkml::{
model::ModelCircuit,
utils::{
helpers::get_public_values,
loader::{load_model_msgpack, ModelMsgpack},
},
};
fn main() {
let config_fname = std::env::args().nth(1).expect("config file path");
let inp_fname = std::env::args().nth(2).expect("input file path");
let config: ModelMsgpack = load_model_msgpack(&config_fname, &inp_fname);
let circuit = ModelCircuit::<Fr>::generate_from_file(&config_fname, &inp_fname);
let _prover = MockProver::run(config.k.try_into().unwrap(), &circuit, vec![vec![]]).unwrap();
let public_vals = get_public_values();
let prover = MockProver::run(config.k.try_into().unwrap(), &circuit, vec![public_vals]).unwrap();
assert_eq!(prover.verify(), Ok(()));
}
| https://github.com/ddkang/zkml |
src/bin/time_circuit.rs | use halo2_proofs::halo2curves::{bn256::Fr, pasta::Fp};
use zkml::{
model::ModelCircuit,
utils::{proving_ipa::time_circuit_ipa, proving_kzg::time_circuit_kzg},
};
fn main() {
let config_fname = std::env::args().nth(1).expect("config file path");
let inp_fname = std::env::args().nth(2).expect("input file path");
let kzg_or_ipa = std::env::args().nth(3).expect("kzg or ipa");
if kzg_or_ipa != "kzg" && kzg_or_ipa != "ipa" {
panic!("Must specify kzg or ipa");
}
if kzg_or_ipa == "kzg" {
let circuit = ModelCircuit::<Fr>::generate_from_file(&config_fname, &inp_fname);
time_circuit_kzg(circuit);
} else {
let circuit = ModelCircuit::<Fp>::generate_from_file(&config_fname, &inp_fname);
time_circuit_ipa(circuit);
}
}
| https://github.com/ddkang/zkml |
src/bin/verify_circuit.rs | use halo2_proofs::halo2curves::bn256::Fr;
use zkml::{
model::ModelCircuit,
utils::{loader::load_config_msgpack, proving_kzg::verify_circuit_kzg},
};
fn main() {
let config_fname = std::env::args().nth(1).expect("config file path");
let vkey_fname = std::env::args().nth(2).expect("verification key file path");
let proof_fname = std::env::args().nth(3).expect("proof file path");
let public_vals_fname = std::env::args().nth(4).expect("public values file path");
let kzg_or_ipa = std::env::args().nth(5).expect("kzg or ipa");
if kzg_or_ipa != "kzg" && kzg_or_ipa != "ipa" {
panic!("Must specify kzg or ipa");
}
if kzg_or_ipa == "kzg" {
let config = load_config_msgpack(&config_fname);
let circuit = ModelCircuit::<Fr>::generate_from_msgpack(config, false);
println!("Loaded configuration");
verify_circuit_kzg(circuit, &vkey_fname, &proof_fname, &public_vals_fname);
} else {
// Serialization of the verification key doesn't seem to be supported for IPA
panic!("Not implemented");
}
}
| https://github.com/ddkang/zkml |
src/bin/verify_wav.rs | use std::fs::File;
use halo2_proofs::{dev::MockProver, halo2curves::bn256::Fr};
use zkml::{
model::ModelCircuit,
utils::{
helpers::get_public_values,
loader::{load_config_msgpack, ModelMsgpack, TensorMsgpack},
},
};
fn main() {
let config_fname = std::env::args().nth(1).expect("config file path");
let wav_fname = std::env::args().nth(2).expect("wav file path");
let mut wav_file = File::open(wav_fname).unwrap();
let (_header, data) = wav::read(&mut wav_file).unwrap();
let data = match data {
wav::BitDepth::Sixteen(data) => data,
_ => panic!("Unsupported bit depth"),
};
let data: Vec<i64> = data.iter().map(|x| *x as i64).collect();
let base_config = load_config_msgpack(&config_fname);
let config = ModelMsgpack {
tensors: vec![TensorMsgpack {
idx: 0,
shape: vec![1, data.len().try_into().unwrap()],
data: data,
}],
inp_idxes: vec![0],
out_idxes: vec![],
layers: vec![],
commit_before: Some(vec![]),
commit_after: Some(vec![vec![0]]),
..base_config
};
println!("Config: {:?}", config);
let k = config.k;
let circuit = ModelCircuit::<Fr>::generate_from_msgpack(config, false);
let _prover = MockProver::run(k.try_into().unwrap(), &circuit, vec![vec![]]).unwrap();
let public_vals: Vec<Fr> = get_public_values();
println!("Public values: {:?}", public_vals);
}
| https://github.com/ddkang/zkml |
src/commitments.rs | pub mod commit;
pub mod packer;
pub mod poseidon_commit;
| https://github.com/ddkang/zkml |
src/commitments/commit.rs | use std::{collections::HashMap, rc::Rc};
use halo2_proofs::{circuit::Layouter, halo2curves::ff::PrimeField, plonk::Error};
use crate::{gadgets::gadget::GadgetConfig, layers::layer::CellRc};
pub trait Commit<F: PrimeField> {
fn commit(
&self,
layouter: impl Layouter<F>,
gadget_config: Rc<GadgetConfig>,
constants: &HashMap<i64, CellRc<F>>,
values: &Vec<CellRc<F>>,
blinding: CellRc<F>,
) -> Result<Vec<CellRc<F>>, Error>;
}
| https://github.com/ddkang/zkml |
src/commitments/packer.rs | use std::{
cmp::{max, min},
collections::{BTreeMap, HashMap},
marker::PhantomData,
rc::Rc,
};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Value},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error, Expression},
poly::Rotation,
};
use ndarray::{Array, IxDyn};
use crate::{
gadgets::gadget::{GadgetConfig, GadgetType},
layers::layer::{AssignedTensor, CellRc},
};
const NUM_BITS_PER_FIELD_ELEM: usize = 254;
pub struct PackerConfig<F: PrimeField> {
pub num_bits_per_elem: usize,
pub num_elem_per_packed: usize,
pub num_packed_per_row: usize,
pub exponents: Vec<F>,
_marker: PhantomData<F>,
}
pub struct PackerChip<F: PrimeField> {
pub config: PackerConfig<F>,
}
impl<F: PrimeField> PackerChip<F> {
pub fn get_exponents(num_bits_per_elem: usize, num_exponents: usize) -> Vec<F> {
let mul_val = F::from(1 << num_bits_per_elem);
let mut exponents = vec![F::ONE];
for _ in 1..num_exponents {
exponents.push(exponents[exponents.len() - 1] * mul_val);
}
exponents
}
pub fn construct(num_bits_per_elem: usize, gadget_config: &GadgetConfig) -> PackerConfig<F> {
let columns = &gadget_config.columns;
let num_elem_per_packed = if NUM_BITS_PER_FIELD_ELEM / num_bits_per_elem > columns.len() - 1 {
columns.len() - 1
} else {
// TODO: for many columns, pack many in a single row
NUM_BITS_PER_FIELD_ELEM / num_bits_per_elem
};
println!("column len: {}", columns.len());
println!("num_bits_per_elem: {}", num_bits_per_elem);
println!("NUM_BITS_PER_FIELD_ELEM: {}", NUM_BITS_PER_FIELD_ELEM);
println!("num_elem_per_packed: {}", num_elem_per_packed);
let num_packed_per_row = max(
1,
columns.len() / (num_elem_per_packed * (num_bits_per_elem + 1)),
);
println!("num_packed_per_row: {}", num_packed_per_row);
let exponents = Self::get_exponents(num_bits_per_elem, num_elem_per_packed);
let config = PackerConfig {
num_bits_per_elem,
num_elem_per_packed,
num_packed_per_row,
exponents,
_marker: PhantomData,
};
config
}
pub fn configure(
meta: &mut ConstraintSystem<F>,
packer_config: PackerConfig<F>,
gadget_config: GadgetConfig,
) -> GadgetConfig {
let selector = meta.complex_selector();
let columns = gadget_config.columns;
let lookup = gadget_config.tables.get(&GadgetType::InputLookup).unwrap()[0];
let exponents = &packer_config.exponents;
let num_bits_per_elem = packer_config.num_bits_per_elem;
let shift_val = 1 << (num_bits_per_elem - 1);
let shift_val = Expression::Constant(F::from(shift_val as u64));
meta.create_gate("packer", |meta| {
let s = meta.query_selector(selector);
let mut constraints = vec![];
for i in 0..packer_config.num_packed_per_row {
let offset = i * (packer_config.num_elem_per_packed + 1);
let inps = columns[offset..offset + packer_config.num_elem_per_packed]
.iter()
.map(|col| meta.query_advice(*col, Rotation::cur()))
.collect::<Vec<_>>();
let outp = meta.query_advice(
columns[offset + packer_config.num_elem_per_packed],
Rotation::cur(),
);
let res = inps
.into_iter()
.zip(exponents.iter())
.map(|(inp, exp)| (inp + shift_val.clone()) * (*exp))
.fold(Expression::Constant(F::ZERO), |acc, prod| acc + prod);
constraints.push(s.clone() * (res - outp));
// constraints.push(s.clone() * Expression::Constant(F::zero()));
}
constraints
});
// Ensure that the weights/inputs are in the correct range
for i in 0..packer_config.num_packed_per_row {
let offset = i * (packer_config.num_elem_per_packed + 1);
for j in 0..packer_config.num_elem_per_packed {
meta.lookup("packer lookup", |meta| {
let s = meta.query_selector(selector);
let inp = meta.query_advice(columns[offset + j], Rotation::cur());
vec![(s * (inp + shift_val.clone()), lookup)]
});
}
}
let mut selectors = gadget_config.selectors;
selectors.insert(GadgetType::Packer, vec![selector]);
GadgetConfig {
columns,
selectors,
..gadget_config
}
}
pub fn copy_and_pack_row(
&self,
mut layouter: impl Layouter<F>,
gadget_config: Rc<GadgetConfig>,
cells: Vec<CellRc<F>>,
zero: &AssignedCell<F, F>,
) -> Result<Vec<CellRc<F>>, Error> {
let columns = &gadget_config.columns;
let selector = gadget_config.selectors.get(&GadgetType::Packer).unwrap()[0];
let num_bits_per_elem = gadget_config.num_bits_per_elem;
let shift_val = 1 << (num_bits_per_elem - 1);
let shift_val = F::from(shift_val as u64);
let outp = layouter.assign_region(
|| "pack row",
|mut region| {
if gadget_config.use_selectors {
selector.enable(&mut region, 0)?;
}
let mut packed = vec![];
for i in 0..self.config.num_packed_per_row {
let val_offset = i * self.config.num_elem_per_packed;
let col_offset = i * (self.config.num_elem_per_packed + 1);
let mut vals = cells
[val_offset..min(val_offset + self.config.num_elem_per_packed, cells.len())]
.iter()
.enumerate()
.map(|(i, x)| {
x.copy_advice(|| "", &mut region, columns[col_offset + i], 0)
.unwrap();
x.value().copied()
})
.collect::<Vec<_>>();
let zero_copied = (cells.len()..self.config.num_elem_per_packed)
.map(|i| {
zero
.copy_advice(|| "", &mut region, columns[col_offset + i], 0)
.unwrap();
zero.value().copied()
})
.collect::<Vec<_>>();
vals.extend(zero_copied);
let res = vals.iter().zip(self.config.exponents.iter()).fold(
Value::known(F::ZERO),
|acc, (inp, exp)| {
let res = acc + (*inp + Value::known(shift_val)) * Value::known(*exp);
res
},
);
let outp = region.assign_advice(
|| "",
columns[col_offset + self.config.num_elem_per_packed],
0,
|| res,
)?;
packed.push(Rc::new(outp));
}
Ok(packed)
},
)?;
Ok(outp)
}
pub fn assign_and_pack_row(
&self,
mut layouter: impl Layouter<F>,
gadget_config: Rc<GadgetConfig>,
values: Vec<&F>,
zero: &AssignedCell<F, F>,
) -> Result<(Vec<CellRc<F>>, Vec<CellRc<F>>), Error> {
let columns = &gadget_config.columns;
let selector = gadget_config.selectors.get(&GadgetType::Packer).unwrap()[0];
let num_bits_per_elem = gadget_config.num_bits_per_elem;
let shift_val = 1 << (num_bits_per_elem - 1);
let shift_val = F::from(shift_val as u64);
let outp = layouter.assign_region(
|| "pack row",
|mut region| {
if gadget_config.use_selectors {
selector.enable(&mut region, 0)?;
}
let mut packed = vec![];
let mut assigned = vec![];
for i in 0..self.config.num_packed_per_row {
let val_offset = i * self.config.num_elem_per_packed;
let col_offset = i * (self.config.num_elem_per_packed + 1);
let mut values = values
[val_offset..min(val_offset + self.config.num_elem_per_packed, values.len())]
.iter()
.map(|x| **x)
.collect::<Vec<_>>();
let vals = values
.iter()
.enumerate()
.map(|(i, x)| {
let tmp = region
.assign_advice(|| "", columns[col_offset + i], 0, || Value::known(*x))
.unwrap();
Rc::new(tmp)
})
.collect::<Vec<_>>();
assigned.extend(vals);
let zero_vals = (values.len()..self.config.num_elem_per_packed)
.map(|i| {
zero
.copy_advice(|| "", &mut region, columns[col_offset + i], 0)
.unwrap();
F::ZERO
})
.collect::<Vec<_>>();
values.extend(zero_vals);
let res =
values
.iter()
.zip(self.config.exponents.iter())
.fold(F::ZERO, |acc, (inp, exp)| {
let res = acc + (*inp + shift_val) * (*exp);
res
});
let outp = region.assign_advice(
|| "",
columns[col_offset + self.config.num_elem_per_packed],
0,
|| Value::known(res),
)?;
packed.push(Rc::new(outp));
}
Ok((packed, assigned))
},
)?;
Ok(outp)
}
pub fn assign_and_pack(
&self,
mut layouter: impl Layouter<F>,
gadget_config: Rc<GadgetConfig>,
constants: &HashMap<i64, CellRc<F>>,
tensors: &BTreeMap<i64, Array<F, IxDyn>>,
) -> Result<(BTreeMap<i64, AssignedTensor<F>>, Vec<CellRc<F>>), Error> {
let mut values = vec![];
for (_, tensor) in tensors {
for value in tensor.iter() {
values.push(value);
}
}
let mut packed = vec![];
let mut assigned = vec![];
let zero = constants.get(&0).unwrap().clone();
let num_elems_per_row = self.config.num_packed_per_row * self.config.num_elem_per_packed;
for i in 0..(values.len().div_ceil(num_elems_per_row)) {
let row =
values[i * num_elems_per_row..min((i + 1) * num_elems_per_row, values.len())].to_vec();
let (row_packed, row_assigned) = self
.assign_and_pack_row(
layouter.namespace(|| "pack row"),
gadget_config.clone(),
row,
zero.as_ref(),
)
.unwrap();
packed.extend(row_packed);
assigned.extend(row_assigned);
}
let mut assigned_tensors = BTreeMap::new();
let mut start_idx = 0;
for (tensor_id, tensor) in tensors {
let num_el = tensor.len();
let v = assigned[start_idx..start_idx + num_el].to_vec();
let new_tensor = Array::from_shape_vec(tensor.raw_dim(), v).unwrap();
assigned_tensors.insert(*tensor_id, new_tensor);
start_idx += num_el;
}
Ok((assigned_tensors, packed))
}
pub fn copy_and_pack(
&self,
mut layouter: impl Layouter<F>,
gadget_config: Rc<GadgetConfig>,
constants: &HashMap<i64, CellRc<F>>,
tensors: &BTreeMap<i64, AssignedTensor<F>>,
) -> Result<Vec<CellRc<F>>, Error> {
let mut values = vec![];
for (_, tensor) in tensors {
for value in tensor.iter() {
values.push(value.clone());
}
}
let mut packed = vec![];
let zero = constants.get(&0).unwrap().clone();
let num_elems_per_row = self.config.num_packed_per_row * self.config.num_elem_per_packed;
for i in 0..(values.len().div_ceil(num_elems_per_row)) {
let row =
values[i * num_elems_per_row..min((i + 1) * num_elems_per_row, values.len())].to_vec();
let row_packed = self
.copy_and_pack_row(
layouter.namespace(|| "pack row"),
gadget_config.clone(),
row,
zero.as_ref(),
)
.unwrap();
packed.extend(row_packed);
}
Ok(packed)
}
}
| https://github.com/ddkang/zkml |
src/commitments/poseidon_commit.rs | use std::{collections::HashMap, marker::PhantomData, rc::Rc};
use halo2_gadgets::poseidon::{
primitives::{generate_constants, Absorbing, ConstantLength, Domain, Mds, Spec},
PaddedWord, PoseidonSpongeInstructions, Pow5Chip, Pow5Config, Sponge,
};
use halo2_proofs::{
circuit::Layouter,
halo2curves::ff::{FromUniformBytes, PrimeField},
plonk::{Advice, Column, ConstraintSystem, Error},
};
use crate::{gadgets::gadget::GadgetConfig, layers::layer::CellRc};
use super::commit::Commit;
pub const WIDTH: usize = 3;
pub const RATE: usize = 2;
pub const L: usize = 8 - WIDTH - 1;
#[derive(Clone, Debug)]
pub struct PoseidonCommitChip<
F: PrimeField + Ord + FromUniformBytes<64>,
const WIDTH: usize,
const RATE: usize,
const L: usize,
> {
pub poseidon_config: Pow5Config<F, WIDTH, RATE>,
}
#[derive(Debug)]
pub struct P128Pow5T3Gen<F: PrimeField, const SECURE_MDS: usize>(PhantomData<F>);
impl<F: PrimeField, const SECURE_MDS: usize> P128Pow5T3Gen<F, SECURE_MDS> {
pub fn new() -> Self {
P128Pow5T3Gen(PhantomData::default())
}
}
impl<F: FromUniformBytes<64> + Ord, const SECURE_MDS: usize> Spec<F, 3, 2>
for P128Pow5T3Gen<F, SECURE_MDS>
{
fn full_rounds() -> usize {
8
}
fn partial_rounds() -> usize {
56
}
fn sbox(val: F) -> F {
val.pow_vartime([5])
}
fn secure_mds() -> usize {
SECURE_MDS
}
fn constants() -> (Vec<[F; 3]>, Mds<F, 3>, Mds<F, 3>) {
generate_constants::<_, Self, 3, 2>()
}
}
/// A Poseidon hash function, built around a sponge.
#[derive(Debug)]
pub struct MyHash<
F: PrimeField,
PoseidonChip: PoseidonSpongeInstructions<F, S, D, T, RATE>,
S: Spec<F, T, RATE>,
D: Domain<F, RATE>,
const T: usize,
const RATE: usize,
> {
pub sponge: Sponge<F, PoseidonChip, S, Absorbing<PaddedWord<F>, RATE>, D, T, RATE>,
}
impl<F: PrimeField + Ord + FromUniformBytes<64>> PoseidonCommitChip<F, WIDTH, RATE, L> {
pub fn configure(
meta: &mut ConstraintSystem<F>,
// TODO: ??
_input: [Column<Advice>; L],
state: [Column<Advice>; WIDTH],
partial_sbox: Column<Advice>,
) -> PoseidonCommitChip<F, WIDTH, RATE, L> {
let rc_a = (0..WIDTH).map(|_| meta.fixed_column()).collect::<Vec<_>>();
let rc_b = (0..WIDTH).map(|_| meta.fixed_column()).collect::<Vec<_>>();
meta.enable_constant(rc_b[0]);
PoseidonCommitChip {
poseidon_config: Pow5Chip::configure::<P128Pow5T3Gen<F, 0>>(
meta,
state.try_into().unwrap(),
partial_sbox,
rc_a.try_into().unwrap(),
rc_b.try_into().unwrap(),
),
}
}
}
impl<F: PrimeField + Ord + FromUniformBytes<64>> Commit<F>
for PoseidonCommitChip<F, WIDTH, RATE, L>
{
fn commit(
&self,
mut layouter: impl Layouter<F>,
_gadget_config: Rc<GadgetConfig>,
_constants: &HashMap<i64, CellRc<F>>,
values: &Vec<CellRc<F>>,
blinding: CellRc<F>,
) -> Result<Vec<CellRc<F>>, Error> {
let chip = Pow5Chip::construct(self.poseidon_config.clone());
let mut hasher: MyHash<F, Pow5Chip<F, 3, 2>, P128Pow5T3Gen<F, 0>, ConstantLength<L>, 3, 2> =
Sponge::new(chip, layouter.namespace(|| "sponge"))
.map(|sponge| MyHash { sponge })
.unwrap();
let mut new_vals = values
.iter()
.map(|x| x.clone())
.chain(vec![blinding.clone()])
.collect::<Vec<_>>();
while new_vals.len() % L != 0 {
new_vals.push(blinding.clone());
}
for (i, value) in new_vals
.iter()
.map(|x| PaddedWord::Message((**x).clone()))
.chain(<ConstantLength<L> as Domain<F, RATE>>::padding(L).map(PaddedWord::Padding))
.enumerate()
{
hasher
.sponge
.absorb(layouter.namespace(|| format!("absorb {}", i)), value)
.unwrap();
}
let outp = hasher
.sponge
.finish_absorbing(layouter.namespace(|| "finish absorbing"))
.unwrap()
.squeeze(layouter.namespace(|| "squeeze"))
.unwrap();
let outp = Rc::new(outp);
Ok(vec![outp])
}
}
| https://github.com/ddkang/zkml |
src/gadgets.rs | pub mod add_pairs;
pub mod adder;
pub mod bias_div_floor_relu6;
pub mod bias_div_round_relu6;
pub mod dot_prod;
pub mod gadget;
pub mod input_lookup;
pub mod max;
pub mod mul_pairs;
pub mod sqrt_big;
pub mod square;
pub mod squared_diff;
pub mod sub_pairs;
pub mod update;
pub mod var_div;
pub mod var_div_big;
pub mod var_div_big3;
// Generics
pub mod nonlinear;
| https://github.com/ddkang/zkml |
src/gadgets/add_pairs.rs | use std::{marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error},
poly::Rotation,
};
use super::gadget::{Gadget, GadgetConfig, GadgetType};
type AddPairsConfig = GadgetConfig;
pub struct AddPairsChip<F: PrimeField> {
config: Rc<AddPairsConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> AddPairsChip<F> {
pub fn construct(config: Rc<AddPairsConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn num_cols_per_op() -> usize {
3
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
let selector = meta.selector();
let columns = gadget_config.columns;
meta.create_gate("add pair", |meta| {
let s = meta.query_selector(selector);
let mut constraints = vec![];
for i in 0..columns.len() / Self::num_cols_per_op() {
let offset = i * Self::num_cols_per_op();
let inp1 = meta.query_advice(columns[offset + 0], Rotation::cur());
let inp2 = meta.query_advice(columns[offset + 1], Rotation::cur());
let outp = meta.query_advice(columns[offset + 2], Rotation::cur());
let res = inp1 + inp2;
constraints.append(&mut vec![s.clone() * (res - outp)])
}
constraints
});
let mut selectors = gadget_config.selectors;
selectors.insert(GadgetType::AddPairs, vec![selector]);
GadgetConfig {
columns,
selectors,
..gadget_config
}
}
}
impl<F: PrimeField> Gadget<F> for AddPairsChip<F> {
fn name(&self) -> String {
"add pairs chip".to_string()
}
fn num_cols_per_op(&self) -> usize {
Self::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
_single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let inp1 = &vec_inputs[0];
let inp2 = &vec_inputs[1];
assert_eq!(inp1.len(), inp2.len());
let columns = &self.config.columns;
if self.config.use_selectors {
let selector = self.config.selectors.get(&GadgetType::AddPairs).unwrap()[0];
selector.enable(region, row_offset)?;
}
let mut outps = vec![];
for i in 0..inp1.len() {
let offset = i * self.num_cols_per_op();
let inp1 = inp1[i].copy_advice(|| "", region, columns[offset + 0], row_offset)?;
let inp2 = inp2[i].copy_advice(|| "", region, columns[offset + 1], row_offset)?;
let outp = inp1.value().map(|x: &F| x.to_owned()) + inp2.value().map(|x: &F| x.to_owned());
let outp = region.assign_advice(|| "", columns[offset + 2], row_offset, || outp)?;
outps.push(outp);
}
Ok(outps)
}
fn forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let zero = &single_inputs[0];
let mut inp1 = vec_inputs[0].clone();
let mut inp2 = vec_inputs[1].clone();
let initial_len = inp1.len();
while inp1.len() % self.num_inputs_per_row() != 0 {
inp1.push(zero);
inp2.push(zero);
}
let vec_inputs = vec![inp1, inp2];
let res = self.op_aligned_rows(
layouter.namespace(|| format!("forward row {}", self.name())),
&vec_inputs,
single_inputs,
)?;
Ok(res[0..initial_len].to_vec())
}
}
| https://github.com/ddkang/zkml |
src/gadgets/adder.rs | use std::{marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region, Value},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error, Expression},
poly::Rotation,
};
use super::gadget::{Gadget, GadgetConfig, GadgetType};
type AdderConfig = GadgetConfig;
pub struct AdderChip<F: PrimeField> {
config: Rc<AdderConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> AdderChip<F> {
pub fn construct(config: Rc<AdderConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
let selector = meta.selector();
let columns = gadget_config.columns;
meta.create_gate("adder gate", |meta| {
let s = meta.query_selector(selector);
let gate_inp = columns[0..columns.len() - 1]
.iter()
.map(|col| meta.query_advice(*col, Rotation::cur()))
.collect::<Vec<_>>();
let gate_output = meta.query_advice(*columns.last().unwrap(), Rotation::cur());
let res = gate_inp
.iter()
.fold(Expression::Constant(F::ZERO), |a, b| a + b.clone());
vec![s * (res - gate_output)]
});
let mut selectors = gadget_config.selectors;
selectors.insert(GadgetType::Adder, vec![selector]);
GadgetConfig {
columns,
selectors,
..gadget_config
}
}
}
// NOTE: The forward pass of the adder adds _everything_ into one cell
impl<F: PrimeField> Gadget<F> for AdderChip<F> {
fn name(&self) -> String {
"adder".to_string()
}
fn num_cols_per_op(&self) -> usize {
self.config.columns.len()
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() - 1
}
fn num_outputs_per_row(&self) -> usize {
1
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
_single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
assert_eq!(vec_inputs.len(), 1);
let inp = &vec_inputs[0];
if self.config.use_selectors {
let selector = self.config.selectors.get(&GadgetType::Adder).unwrap()[0];
selector.enable(region, row_offset)?;
}
inp
.iter()
.enumerate()
.map(|(i, cell)| cell.copy_advice(|| "", region, self.config.columns[i], row_offset))
.collect::<Result<Vec<_>, _>>()?;
let e = inp.iter().fold(Value::known(F::ZERO), |a, b| {
a + b.value().map(|x: &F| x.to_owned())
});
let res = region.assign_advice(
|| "",
*self.config.columns.last().unwrap(),
row_offset,
|| e,
)?;
Ok(vec![res])
}
fn forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
assert_eq!(single_inputs.len(), 1);
let mut inputs = vec_inputs[0].clone();
let zero = single_inputs[0].clone();
while inputs.len() % self.num_inputs_per_row() != 0 {
inputs.push(&zero);
}
let mut outputs = self.op_aligned_rows(
layouter.namespace(|| "adder forward"),
&vec![inputs],
single_inputs,
)?;
while outputs.len() != 1 {
while outputs.len() % self.num_inputs_per_row() != 0 {
outputs.push(zero.clone());
}
let tmp = outputs.iter().map(|x| x).collect::<Vec<_>>();
outputs = self.op_aligned_rows(
layouter.namespace(|| "adder forward"),
&vec![tmp],
single_inputs,
)?;
}
Ok(outputs)
}
}
| https://github.com/ddkang/zkml |
src/gadgets/bias_div_floor_relu6.rs | use std::{collections::HashMap, marker::PhantomData};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error, Expression},
poly::Rotation,
};
use crate::gadgets::gadget::convert_to_u64;
use super::gadget::{Gadget, GadgetConfig, GadgetType};
type BiasDivFloorRelu6Config = GadgetConfig;
const SHIFT_MIN_VAL: i64 = -(1 << 30);
pub struct BiasDivFloorRelu6Chip<F: PrimeField> {
config: BiasDivFloorRelu6Config,
_marker: PhantomData<F>,
}
impl<F: PrimeField> BiasDivFloorRelu6Chip<F> {
pub fn construct(config: BiasDivFloorRelu6Config) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn get_map(scale_factor: u64, num_rows: i64, div_outp_min_val: i64) -> HashMap<i64, i64> {
let div_val = scale_factor;
let div_outp_min_val = div_outp_min_val;
let mut map = HashMap::new();
for i in 0..num_rows {
let shifted = i + div_outp_min_val;
let val = shifted.clamp(0, 6 * div_val as i64);
map.insert(i as i64, val);
}
map
}
pub fn num_cols_per_op() -> usize {
5
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
let selector = meta.complex_selector();
let sf = Expression::Constant(F::from(gadget_config.scale_factor));
let columns = gadget_config.columns;
let mod_lookup = meta.lookup_table_column();
let relu_lookup = meta.lookup_table_column();
let div_lookup = meta.lookup_table_column();
meta.create_gate("bias_mul", |meta| {
let s = meta.query_selector(selector);
let mut constraints = vec![];
for op_idx in 0..columns.len() / Self::num_cols_per_op() {
let offset = op_idx * Self::num_cols_per_op();
let inp = meta.query_advice(columns[offset + 0], Rotation::cur());
let bias = meta.query_advice(columns[offset + 1], Rotation::cur());
let div_res = meta.query_advice(columns[offset + 2], Rotation::cur());
let mod_res = meta.query_advice(columns[offset + 3], Rotation::cur());
constraints.push(s.clone() * (inp - (sf.clone() * (div_res - bias) + mod_res)));
}
constraints
});
for op_idx in 0..columns.len() / Self::num_cols_per_op() {
let offset = op_idx * Self::num_cols_per_op();
meta.lookup("bias_div_relu6 lookup", |meta| {
let s = meta.query_selector(selector);
let mod_res = meta.query_advice(columns[offset + 3], Rotation::cur());
// Constrains that the modulus \in [0, DIV_VAL)
vec![(s.clone() * mod_res.clone(), mod_lookup)]
});
meta.lookup("bias_div_relu6 lookup", |meta| {
let s = meta.query_selector(selector);
let div = meta.query_advice(columns[offset + 2], Rotation::cur());
let outp = meta.query_advice(columns[offset + 4], Rotation::cur());
let div_outp_min_val = Expression::Constant(F::from((-SHIFT_MIN_VAL) as u64));
// Constrains that output \in [0, 6 * SF]
vec![
(s.clone() * outp, relu_lookup),
(s * (div + div_outp_min_val), div_lookup),
]
});
}
let mut selectors = gadget_config.selectors;
selectors.insert(GadgetType::BiasDivFloorRelu6, vec![selector]);
let mut tables = gadget_config.tables;
tables.insert(
GadgetType::BiasDivFloorRelu6,
vec![mod_lookup, relu_lookup, div_lookup],
);
let mut maps = gadget_config.maps;
let relu_map = Self::get_map(
gadget_config.scale_factor,
gadget_config.num_rows as i64,
gadget_config.div_outp_min_val,
);
maps.insert(GadgetType::BiasDivFloorRelu6, vec![relu_map]);
GadgetConfig {
columns,
selectors,
tables,
maps,
..gadget_config
}
}
}
impl<F: PrimeField> Gadget<F> for BiasDivFloorRelu6Chip<F> {
fn name(&self) -> String {
"BiasDivRelu6".to_string()
}
fn num_cols_per_op(&self) -> usize {
Self::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.num_inputs_per_row()
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
_single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let div_val = self.config.scale_factor as i64;
let div_outp_min_val_i64 = -self.config.div_outp_min_val;
let div_inp_min_val_pos_i64 = -SHIFT_MIN_VAL;
let div_inp_min_val_pos = F::from(div_inp_min_val_pos_i64 as u64);
let inp = &vec_inputs[0];
let bias = &vec_inputs[1];
assert_eq!(inp.len(), bias.len());
assert_eq!(inp.len() % self.num_inputs_per_row(), 0);
let relu_map = &self
.config
.maps
.get(&GadgetType::BiasDivFloorRelu6)
.unwrap()[0];
if self.config.use_selectors {
let selector = self
.config
.selectors
.get(&GadgetType::BiasDivFloorRelu6)
.unwrap()[0];
selector.enable(region, row_offset)?;
}
let mut outp_cells = vec![];
for (i, (inp, bias)) in inp.iter().zip(bias.iter()).enumerate() {
let offset = i * self.num_cols_per_op();
let inp_f = inp.value().map(|x: &F| x.to_owned());
let bias_f = bias.value().map(|x: &F| {
let a = *x + div_inp_min_val_pos;
let a = convert_to_u64(&a) as i64 - div_inp_min_val_pos_i64;
a
});
let div_mod_res = inp_f.map(|x: F| {
let x_pos = x + div_inp_min_val_pos;
let inp = convert_to_u64(&x_pos);
// println!("inp: {:?}, bias: {:?}, x_pos: {:?}", inp, bias, x_pos);
let div_res = inp as i64 / div_val - (div_inp_min_val_pos_i64 / div_val);
let mod_res = inp as i64 % div_val;
// println!("div_res: {:?}, mod_res: {:?}", div_res, mod_res);
(div_res, mod_res)
});
let div_res = div_mod_res.map(|x: (i64, i64)| x.0) + bias_f;
let mod_res = div_mod_res.map(|x: (i64, i64)| x.1);
let outp = div_res.map(|x: i64| {
let mut x_pos = x - div_outp_min_val_i64;
if !relu_map.contains_key(&(x_pos)) {
println!("x: {}, x_pos: {}", x, x_pos);
x_pos = 0;
}
let outp_val = relu_map.get(&(x_pos)).unwrap();
// println!("x: {}, x_pos: {}, outp_val: {}", x, x_pos, outp_val);
F::from(*outp_val as u64)
});
// Assign inp, bias
inp.copy_advice(|| "", region, self.config.columns[offset + 0], row_offset)?;
bias.copy_advice(|| "", region, self.config.columns[offset + 1], row_offset)?;
// Assign div_res, mod_res
let div_res_cell = region
.assign_advice(
|| "div_res",
self.config.columns[offset + 2],
row_offset,
|| {
div_res.map(|x: i64| {
F::from((x - div_outp_min_val_i64) as u64) - F::from(-div_outp_min_val_i64 as u64)
})
},
)
.unwrap();
let _mod_res_cell = region
.assign_advice(
|| "mod_res",
self.config.columns[offset + 3],
row_offset,
|| mod_res.map(|x: i64| F::from(x as u64)),
)
.unwrap();
let outp_cell = region
.assign_advice(
|| "outp",
self.config.columns[offset + 4],
row_offset,
|| outp.map(|x: F| x.to_owned()),
)
.unwrap();
// outp_cells.push((outp_cell, div_res_cell));
outp_cells.push(outp_cell);
outp_cells.push(div_res_cell);
}
Ok(outp_cells)
}
fn forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let mut inps = vec_inputs[0].clone();
let mut biases = vec_inputs[1].clone();
// Needed to pad: bias - bias = 0
let default = biases[0].clone();
while inps.len() % self.num_inputs_per_row() != 0 {
inps.push(&default);
biases.push(&default);
}
let res = self.op_aligned_rows(
layouter.namespace(|| "bias_div_relu6"),
&vec![inps, biases],
single_inputs,
)?;
Ok(res)
}
}
| https://github.com/ddkang/zkml |
src/gadgets/bias_div_round_relu6.rs | use std::{collections::HashMap, marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region, Value},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error, Expression},
poly::Rotation,
};
use crate::gadgets::gadget::convert_to_u64;
use super::gadget::{Gadget, GadgetConfig, GadgetType};
type BiasDivRoundRelu6Config = GadgetConfig;
const NUM_COLS_PER_OP: usize = 5;
pub struct BiasDivRoundRelu6Chip<F: PrimeField> {
config: Rc<BiasDivRoundRelu6Config>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> BiasDivRoundRelu6Chip<F> {
pub fn construct(config: Rc<BiasDivRoundRelu6Config>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn get_map(scale_factor: u64, min_val: i64, num_rows: i64) -> HashMap<i64, i64> {
let div_val = scale_factor;
let mut map = HashMap::new();
for i in 0..num_rows {
let shifted = i + min_val;
let val = shifted.clamp(0, 6 * div_val as i64);
map.insert(i as i64, val);
}
map
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
let selector = meta.complex_selector();
let sf = Expression::Constant(F::from(gadget_config.scale_factor));
let two = Expression::Constant(F::from(2));
let columns = gadget_config.columns;
let mut tables = gadget_config.tables;
let div_lookup = tables.get(&GadgetType::InputLookup).unwrap()[0];
let relu_lookup = meta.lookup_table_column();
meta.create_gate("bias_mul", |meta| {
let s = meta.query_selector(selector);
let mut constraints = vec![];
for op_idx in 0..columns.len() / NUM_COLS_PER_OP {
let offset = op_idx * NUM_COLS_PER_OP;
let inp = meta.query_advice(columns[offset + 0], Rotation::cur());
let bias = meta.query_advice(columns[offset + 1], Rotation::cur());
let div_res = meta.query_advice(columns[offset + 2], Rotation::cur());
let mod_res = meta.query_advice(columns[offset + 3], Rotation::cur());
// ((div - bias) * 2 + mod) * sf = 2 * inp + sf
constraints.push(
s.clone()
* (two.clone() * inp + sf.clone()
- (sf.clone() * two.clone() * (div_res - bias) + mod_res)),
);
}
constraints
});
for op_idx in 0..columns.len() / NUM_COLS_PER_OP {
let offset = op_idx * NUM_COLS_PER_OP;
meta.lookup("bias_div_relu6 lookup", |meta| {
let s = meta.query_selector(selector);
let mod_res = meta.query_advice(columns[offset + 3], Rotation::cur());
// Constrains that the modulus \in [0, DIV_VAL)
// div_val - mod_res \in [0, max_val)
vec![(s.clone() * (two.clone() * sf.clone() - mod_res), div_lookup)]
});
meta.lookup("bias_div_relu6 lookup", |meta| {
let s = meta.query_selector(selector);
let div = meta.query_advice(columns[offset + 2], Rotation::cur());
let outp = meta.query_advice(columns[offset + 4], Rotation::cur());
let div_outp_min_val = gadget_config.div_outp_min_val;
let div_outp_min_val = Expression::Constant(F::from((-div_outp_min_val) as u64));
// Constrains that output \in [0, 6 * SF]
vec![
(s.clone() * (div + div_outp_min_val), div_lookup),
(s.clone() * outp, relu_lookup),
]
});
}
let mut selectors = gadget_config.selectors;
selectors.insert(GadgetType::BiasDivRoundRelu6, vec![selector]);
tables.insert(GadgetType::BiasDivRoundRelu6, vec![relu_lookup]);
let mut maps = gadget_config.maps;
let relu_map = Self::get_map(
gadget_config.scale_factor,
gadget_config.min_val,
gadget_config.num_rows as i64,
);
maps.insert(GadgetType::BiasDivRoundRelu6, vec![relu_map]);
GadgetConfig {
columns,
selectors,
tables,
maps,
..gadget_config
}
}
}
impl<F: PrimeField> Gadget<F> for BiasDivRoundRelu6Chip<F> {
fn name(&self) -> String {
"BiasDivRelu6".to_string()
}
fn num_cols_per_op(&self) -> usize {
NUM_COLS_PER_OP
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / NUM_COLS_PER_OP
}
fn num_outputs_per_row(&self) -> usize {
self.num_inputs_per_row() * 2
}
fn load_lookups(&self, mut layouter: impl Layouter<F>) -> Result<(), Error> {
let map = &self.config.maps[&GadgetType::BiasDivRoundRelu6][0];
let relu_lookup = self.config.tables[&GadgetType::BiasDivRoundRelu6][0];
layouter
.assign_table(
|| "bdr round div/relu lookup",
|mut table| {
for i in 0..self.config.num_rows {
let i = i as i64;
let val = map.get(&i).unwrap();
table
.assign_cell(
|| "relu lookup",
relu_lookup,
i as usize,
|| Value::known(F::from(*val as u64)),
)
.unwrap();
}
Ok(())
},
)
.unwrap();
Ok(())
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
_single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let div_val = self.config.scale_factor as i64;
let div_outp_min_val_i64 = self.config.div_outp_min_val;
let div_inp_min_val_pos_i64 = -self.config.shift_min_val;
let div_inp_min_val_pos = F::from(div_inp_min_val_pos_i64 as u64);
let inp = &vec_inputs[0];
let bias = &vec_inputs[1];
assert_eq!(inp.len(), bias.len());
assert_eq!(inp.len() % self.num_inputs_per_row(), 0);
let relu_map = &self
.config
.maps
.get(&GadgetType::BiasDivRoundRelu6)
.unwrap()[0];
if self.config.use_selectors {
let selector = self
.config
.selectors
.get(&GadgetType::BiasDivRoundRelu6)
.unwrap()[0];
selector.enable(region, row_offset).unwrap();
}
let mut outp_cells = vec![];
for (i, (inp, bias)) in inp.iter().zip(bias.iter()).enumerate() {
let offset = i * NUM_COLS_PER_OP;
let inp_f = inp.value().map(|x: &F| x.to_owned());
let bias_f = bias.value().map(|x: &F| {
let a = *x + div_inp_min_val_pos;
let a = convert_to_u64(&a) as i64 - div_inp_min_val_pos_i64;
a
});
let div_mod_res = inp_f.map(|x: F| {
let x_pos = x + div_inp_min_val_pos;
let inp = convert_to_u64(&x_pos) as i64;
let div_inp = 2 * inp + div_val;
let div_res = div_inp / (2 * div_val) - div_inp_min_val_pos_i64 / div_val;
let mod_res = div_inp % (2 * div_val);
(div_res, mod_res)
});
let div_res = div_mod_res.map(|x: (i64, i64)| x.0) + bias_f;
let mod_res = div_mod_res.map(|x: (i64, i64)| x.1);
let outp = div_res.map(|x: i64| {
let mut x_pos = x - div_outp_min_val_i64;
if !relu_map.contains_key(&(x_pos)) {
println!("x: {}, x_pos: {}", x, x_pos);
x_pos = 0;
}
let outp_val = relu_map.get(&(x_pos)).unwrap();
F::from(*outp_val as u64)
});
// Assign inp, bias
inp
.copy_advice(|| "", region, self.config.columns[offset + 0], row_offset)
.unwrap();
bias
.copy_advice(|| "", region, self.config.columns[offset + 1], row_offset)
.unwrap();
// Assign div_res, mod_res
let div_res_cell = region
.assign_advice(
|| "div_res",
self.config.columns[offset + 2],
row_offset,
|| {
div_res.map(|x: i64| {
F::from((x - div_outp_min_val_i64) as u64) - F::from(-div_outp_min_val_i64 as u64)
})
},
)
.unwrap();
let _mod_res_cell = region
.assign_advice(
|| "mod_res",
self.config.columns[offset + 3],
row_offset,
|| mod_res.map(|x: i64| F::from(x as u64)),
)
.unwrap();
let outp_cell = region
.assign_advice(
|| "outp",
self.config.columns[offset + 4],
row_offset,
|| outp.map(|x: F| x.to_owned()),
)
.unwrap();
// outp_cells.push((outp_cell, div_res_cell));
outp_cells.push(outp_cell);
outp_cells.push(div_res_cell);
}
Ok(outp_cells)
}
fn forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let mut inps = vec_inputs[0].clone();
let mut biases = vec_inputs[1].clone();
let initial_len = inps.len();
// Needed to pad: bias - bias = 0
let default = biases[0].clone();
while inps.len() % self.num_inputs_per_row() != 0 {
inps.push(&default);
biases.push(&default);
}
let res = self
.op_aligned_rows(
layouter.namespace(|| "bias_div_relu6"),
&vec![inps, biases],
single_inputs,
)
.unwrap();
Ok(res[0..initial_len * 2].to_vec())
}
}
| https://github.com/ddkang/zkml |
src/gadgets/dot_prod.rs | use std::{marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{Advice, Column, ConstraintSystem, Error, Expression},
poly::Rotation,
};
use crate::gadgets::adder::AdderChip;
use super::gadget::{Gadget, GadgetConfig, GadgetType};
type DotProductConfig = GadgetConfig;
pub struct DotProductChip<F: PrimeField> {
config: Rc<DotProductConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> DotProductChip<F> {
pub fn construct(config: Rc<DotProductConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn get_input_columns(config: &GadgetConfig) -> Vec<Column<Advice>> {
let num_inputs = (config.columns.len() - 1) / 2;
config.columns[0..num_inputs].to_vec()
}
pub fn get_weight_columns(config: &GadgetConfig) -> Vec<Column<Advice>> {
let num_inputs = (config.columns.len() - 1) / 2;
config.columns[num_inputs..config.columns.len() - 1].to_vec()
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
let selector = meta.selector();
let columns = &gadget_config.columns;
meta.create_gate("dot product gate", |meta| {
let s = meta.query_selector(selector);
let gate_inp = DotProductChip::<F>::get_input_columns(&gadget_config)
.iter()
.map(|col| meta.query_advice(*col, Rotation::cur()))
.collect::<Vec<_>>();
let gate_weights = DotProductChip::<F>::get_weight_columns(&gadget_config)
.iter()
.map(|col| meta.query_advice(*col, Rotation::cur()))
.collect::<Vec<_>>();
let gate_output = meta.query_advice(columns[columns.len() - 1], Rotation::cur());
let res = gate_inp
.iter()
.zip(gate_weights)
.map(|(a, b)| a.clone() * b.clone())
.fold(Expression::Constant(F::ZERO), |a, b| a + b);
vec![s * (res - gate_output)]
});
let mut selectors = gadget_config.selectors;
selectors.insert(GadgetType::DotProduct, vec![selector]);
GadgetConfig {
columns: gadget_config.columns,
selectors,
..gadget_config
}
}
}
impl<F: PrimeField> Gadget<F> for DotProductChip<F> {
fn name(&self) -> String {
"dot product".to_string()
}
fn num_cols_per_op(&self) -> usize {
self.config.columns.len()
}
fn num_inputs_per_row(&self) -> usize {
(self.config.columns.len() - 1) / 2
}
fn num_outputs_per_row(&self) -> usize {
1
}
// The caller is expected to pad the inputs
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
assert_eq!(vec_inputs.len(), 2);
let inp = &vec_inputs[0];
let weights = &vec_inputs[1];
assert_eq!(inp.len(), weights.len());
assert_eq!(inp.len(), self.num_inputs_per_row());
let zero = &single_inputs[0];
if self.config.use_selectors {
let selector = self.config.selectors.get(&GadgetType::DotProduct).unwrap()[0];
selector.enable(region, row_offset).unwrap();
}
let inp_cols = DotProductChip::<F>::get_input_columns(&self.config);
inp
.iter()
.enumerate()
.map(|(i, cell)| cell.copy_advice(|| "", region, inp_cols[i], row_offset))
.collect::<Result<Vec<_>, _>>()
.unwrap();
let weight_cols = DotProductChip::<F>::get_weight_columns(&self.config);
weights
.iter()
.enumerate()
.map(|(i, cell)| cell.copy_advice(|| "", region, weight_cols[i], row_offset))
.collect::<Result<Vec<_>, _>>()
.unwrap();
// All columns need to be assigned
if self.config.columns.len() % 2 == 0 {
zero
.copy_advice(
|| "",
region,
self.config.columns[self.config.columns.len() - 2],
row_offset,
)
.unwrap();
}
let e = inp
.iter()
.zip(weights.iter())
.map(|(a, b)| a.value().map(|x: &F| *x) * b.value())
.reduce(|a, b| a + b)
.unwrap();
let res = region
.assign_advice(
|| "",
self.config.columns[self.config.columns.len() - 1],
row_offset,
|| e,
)
.unwrap();
Ok(vec![res])
}
fn forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
assert_eq!(vec_inputs.len(), 2);
assert_eq!(single_inputs.len(), 1);
let zero = &single_inputs[0];
let mut inputs = vec_inputs[0].clone();
let mut weights = vec_inputs[1].clone();
while inputs.len() % self.num_inputs_per_row() != 0 {
inputs.push(&zero);
weights.push(&zero);
}
let outputs = layouter
.assign_region(
|| "dot prod rows",
|mut region| {
let mut outputs = vec![];
for i in 0..inputs.len() / self.num_inputs_per_row() {
let inp =
inputs[i * self.num_inputs_per_row()..(i + 1) * self.num_inputs_per_row()].to_vec();
let weights =
weights[i * self.num_inputs_per_row()..(i + 1) * self.num_inputs_per_row()].to_vec();
let res = self
.op_row_region(&mut region, i, &vec![inp, weights], &vec![zero.clone()])
.unwrap();
outputs.push(res[0].clone());
}
Ok(outputs)
},
)
.unwrap();
let adder_chip = AdderChip::<F>::construct(self.config.clone());
let tmp = outputs.iter().map(|x| x).collect::<Vec<_>>();
Ok(
adder_chip
.forward(
layouter.namespace(|| "dot prod adder"),
&vec![tmp],
single_inputs,
)
.unwrap(),
)
}
}
| https://github.com/ddkang/zkml |
src/gadgets/gadget.rs | use std::{
collections::{BTreeSet, HashMap},
sync::Arc,
};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::group::ff::PrimeField,
plonk::{Advice, Column, Error, Fixed, Selector, TableColumn},
};
use num_bigint::{BigUint, ToBigUint};
use num_traits::cast::ToPrimitive;
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq, PartialOrd, Ord)]
pub enum GadgetType {
AddPairs,
Adder,
BiasDivRoundRelu6,
BiasDivFloorRelu6,
DotProduct,
Exp,
Logistic,
Max,
Pow,
Relu,
Rsqrt,
Sqrt,
SqrtBig,
Square,
SquaredDiff,
SubPairs,
Tanh,
MulPairs,
VarDivRound,
VarDivRoundBig,
VarDivRoundBig3,
Packer, // This is a special case
InputLookup, // Dummy placeholder for the input lookup
Update,
}
#[derive(Clone, Debug, Default)]
pub struct GadgetConfig {
pub used_gadgets: Arc<BTreeSet<GadgetType>>,
pub columns: Vec<Column<Advice>>,
pub fixed_columns: Vec<Column<Fixed>>,
pub selectors: HashMap<GadgetType, Vec<Selector>>,
pub tables: HashMap<GadgetType, Vec<TableColumn>>,
pub maps: HashMap<GadgetType, Vec<HashMap<i64, i64>>>,
pub scale_factor: u64,
pub shift_min_val: i64, // MUST be divisible by 2 * scale_factor
pub num_rows: usize,
pub num_cols: usize,
pub k: usize,
pub eta: f64,
pub min_val: i64,
pub max_val: i64,
pub div_outp_min_val: i64,
pub use_selectors: bool,
pub commit_before: Vec<Vec<i64>>,
pub commit_after: Vec<Vec<i64>>,
pub num_bits_per_elem: i64,
}
// TODO: refactor
pub fn convert_to_u64<F: PrimeField>(x: &F) -> u64 {
let big = BigUint::from_bytes_le(x.to_repr().as_ref());
let big_digits = big.to_u64_digits();
if big_digits.len() > 2 {
println!("big_digits: {:?}", big_digits);
}
if big_digits.len() == 1 {
big_digits[0] as u64
} else if big_digits.len() == 0 {
0
} else {
panic!();
}
}
pub fn convert_to_u128<F: PrimeField>(x: &F) -> u128 {
let big = BigUint::from_bytes_le(x.to_repr().as_ref());
big.to_biguint().unwrap().to_u128().unwrap()
}
pub trait Gadget<F: PrimeField> {
fn name(&self) -> String;
fn num_cols_per_op(&self) -> usize;
fn num_inputs_per_row(&self) -> usize;
fn num_outputs_per_row(&self) -> usize;
fn load_lookups(&self, _layouter: impl Layouter<F>) -> Result<(), Error> {
Ok(())
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error>;
// The caller is required to ensure that the inputs are of the correct length.
fn op_aligned_rows(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
// Sanity check inputs
for inp in vec_inputs.iter() {
assert_eq!(inp.len() % self.num_inputs_per_row(), 0);
}
let outputs = layouter.assign_region(
|| format!("gadget {}", self.name()),
|mut region| {
let mut outputs = vec![];
for i in 0..vec_inputs[0].len() / self.num_inputs_per_row() {
let mut vec_inputs_row = vec![];
for inp in vec_inputs.iter() {
vec_inputs_row.push(
inp[i * self.num_inputs_per_row()..(i + 1) * self.num_inputs_per_row()].to_vec(),
);
}
let row_outputs = self.op_row_region(&mut region, i, &vec_inputs_row, &single_inputs)?;
assert_eq!(row_outputs.len(), self.num_outputs_per_row());
outputs.extend(row_outputs);
}
Ok(outputs)
},
)?;
Ok(outputs)
}
fn forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
self.op_aligned_rows(
layouter.namespace(|| format!("forward row {}", self.name())),
vec_inputs,
single_inputs,
)
}
}
| https://github.com/ddkang/zkml |
src/gadgets/input_lookup.rs | use std::{marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region, Value},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error},
};
use super::gadget::{Gadget, GadgetConfig, GadgetType};
pub struct InputLookupChip<F: PrimeField> {
config: Rc<GadgetConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> InputLookupChip<F> {
pub fn construct(config: Rc<GadgetConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
let lookup = meta.lookup_table_column();
let mut tables = gadget_config.tables;
tables.insert(GadgetType::InputLookup, vec![lookup]);
GadgetConfig {
tables,
..gadget_config
}
}
}
impl<F: PrimeField> Gadget<F> for InputLookupChip<F> {
fn load_lookups(&self, mut layouter: impl Layouter<F>) -> Result<(), Error> {
let lookup = self.config.tables[&GadgetType::InputLookup][0];
layouter
.assign_table(
|| "input lookup",
|mut table| {
for i in 0..self.config.num_rows as i64 {
table
.assign_cell(
|| "mod lookup",
lookup,
i as usize,
|| Value::known(F::from(i as u64)),
)
.unwrap();
}
Ok(())
},
)
.unwrap();
Ok(())
}
fn name(&self) -> String {
panic!("InputLookupChip should not be called directly")
}
fn num_cols_per_op(&self) -> usize {
panic!("InputLookupChip should not be called directly")
}
fn num_inputs_per_row(&self) -> usize {
panic!("InputLookupChip should not be called directly")
}
fn num_outputs_per_row(&self) -> usize {
panic!("InputLookupChip should not be called directly")
}
fn op_row_region(
&self,
_region: &mut Region<F>,
_row_offset: usize,
_vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
_single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
panic!("InputLookupChip should not be called directly")
}
}
| https://github.com/ddkang/zkml |
src/gadgets/max.rs | use std::{marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error},
poly::Rotation,
};
use crate::gadgets::gadget::convert_to_u64;
use super::gadget::{Gadget, GadgetConfig, GadgetType};
pub struct MaxChip<F: PrimeField> {
config: Rc<GadgetConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> MaxChip<F> {
pub fn construct(config: Rc<GadgetConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn num_cols_per_op() -> usize {
3
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
let selector = meta.complex_selector();
let columns = gadget_config.columns;
let tables = gadget_config.tables;
let inp_lookup = tables.get(&GadgetType::InputLookup).unwrap()[0];
meta.create_gate("max arithmetic", |meta| {
let s = meta.query_selector(selector);
let mut constraints = vec![];
for i in 0..columns.len() / Self::num_cols_per_op() {
let offset = i * Self::num_cols_per_op();
let inp1 = meta.query_advice(columns[offset + 0], Rotation::cur());
let inp2 = meta.query_advice(columns[offset + 1], Rotation::cur());
let outp = meta.query_advice(columns[offset + 2], Rotation::cur());
constraints.push(s.clone() * (inp1 - outp.clone()) * (inp2 - outp))
}
constraints
});
for idx in 0..columns.len() / Self::num_cols_per_op() {
meta.lookup("max inp1", |meta| {
let s = meta.query_selector(selector);
let offset = idx * Self::num_cols_per_op();
let inp1 = meta.query_advice(columns[offset + 0], Rotation::cur());
let outp = meta.query_advice(columns[offset + 2], Rotation::cur());
vec![(s * (outp - inp1), inp_lookup)]
});
meta.lookup("max inp2", |meta| {
let s = meta.query_selector(selector);
let offset = idx * Self::num_cols_per_op();
let inp2 = meta.query_advice(columns[offset + 1], Rotation::cur());
let outp = meta.query_advice(columns[offset + 2], Rotation::cur());
vec![(s * (outp - inp2), inp_lookup)]
});
}
let mut selectors = gadget_config.selectors;
selectors.insert(GadgetType::Max, vec![selector]);
GadgetConfig {
columns,
selectors,
tables,
..gadget_config
}
}
}
impl<F: PrimeField> Gadget<F> for MaxChip<F> {
fn name(&self) -> String {
"max".to_string()
}
fn num_cols_per_op(&self) -> usize {
3
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op() * 2
}
fn num_outputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
_single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
assert_eq!(vec_inputs.len(), 1);
let inp = &vec_inputs[0];
if self.config.use_selectors {
let selector = self.config.selectors.get(&GadgetType::Max).unwrap()[0];
selector.enable(region, row_offset)?;
}
let min_val_pos = F::from((-self.config.shift_min_val) as u64);
let mut outp = vec![];
let chunks: Vec<&[&AssignedCell<F, F>]> = inp.chunks(self.num_outputs_per_row()).collect();
let i1 = chunks[0];
let i2 = chunks[1];
for (idx, (inp1, inp2)) in i1.iter().zip(i2.iter()).enumerate() {
let offset = idx * self.num_cols_per_op();
inp1
.copy_advice(|| "", region, self.config.columns[offset + 0], row_offset)
.unwrap();
inp2
.copy_advice(|| "", region, self.config.columns[offset + 1], row_offset)
.unwrap();
let max = inp1.value().zip(inp2.value()).map(|(a, b)| {
let a = convert_to_u64(&(*a + min_val_pos));
let b = convert_to_u64(&(*b + min_val_pos));
let max = a.max(b);
let max = F::from(max) - min_val_pos;
max
});
let res = region
.assign_advice(|| "", self.config.columns[offset + 2], row_offset, || max)
.unwrap();
outp.push(res);
}
Ok(outp)
}
fn forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let mut inputs = vec_inputs[0].clone();
let first = inputs[0];
while inputs.len() % self.num_inputs_per_row() != 0 {
inputs.push(first);
}
// TODO: pretty sure this is correct but check
let num_iters = inputs.len().div_ceil(self.num_inputs_per_row()) + self.num_inputs_per_row();
let mut outputs = self.op_aligned_rows(
layouter.namespace(|| "max forward"),
&vec![inputs],
single_inputs,
)?;
for _ in 0..num_iters {
while outputs.len() % self.num_inputs_per_row() != 0 {
outputs.push(first.clone());
}
let tmp = outputs.iter().map(|x| x).collect::<Vec<_>>();
outputs = self.op_aligned_rows(
layouter.namespace(|| "max forward"),
&vec![tmp],
single_inputs,
)?;
}
outputs = vec![outputs.into_iter().next().unwrap()];
Ok(outputs)
}
}
| https://github.com/ddkang/zkml |
src/gadgets/mul_pairs.rs | use std::{marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error},
poly::Rotation,
};
use super::gadget::{Gadget, GadgetConfig, GadgetType};
type MulPairsConfig = GadgetConfig;
pub struct MulPairsChip<F: PrimeField> {
config: Rc<MulPairsConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> MulPairsChip<F> {
pub fn construct(config: Rc<MulPairsConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn num_cols_per_op() -> usize {
3
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
let selector = meta.selector();
let columns = gadget_config.columns;
meta.create_gate("mul pair", |meta| {
let s = meta.query_selector(selector);
let mut constraints = vec![];
for i in 0..columns.len() / Self::num_cols_per_op() {
let offset = i * Self::num_cols_per_op();
let inp1 = meta.query_advice(columns[offset + 0], Rotation::cur());
let inp2 = meta.query_advice(columns[offset + 1], Rotation::cur());
let outp = meta.query_advice(columns[offset + 2], Rotation::cur());
let res = inp1 * inp2;
constraints.append(&mut vec![s.clone() * (res - outp)])
}
constraints
});
let mut selectors = gadget_config.selectors;
selectors.insert(GadgetType::MulPairs, vec![selector]);
GadgetConfig {
columns,
selectors,
..gadget_config
}
}
}
impl<F: PrimeField> Gadget<F> for MulPairsChip<F> {
fn name(&self) -> String {
"MulPairs".to_string()
}
fn num_cols_per_op(&self) -> usize {
Self::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
// TODO: This + below is basically copied from add pairs - make arithmetic generic
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
_single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let inp1 = &vec_inputs[0];
let inp2 = &vec_inputs[1];
assert_eq!(inp1.len(), inp2.len());
let columns = &self.config.columns;
if self.config.use_selectors {
let selector = self.config.selectors.get(&GadgetType::MulPairs).unwrap()[0];
selector.enable(region, row_offset)?;
}
let mut outps = vec![];
for i in 0..inp1.len() {
let offset = i * self.num_cols_per_op();
let inp1 = inp1[i].copy_advice(|| "", region, columns[offset + 0], row_offset)?;
let inp2 = inp2[i].copy_advice(|| "", region, columns[offset + 1], row_offset)?;
let outp = inp1.value().map(|x: &F| x.to_owned()) * inp2.value().map(|x: &F| x.to_owned());
let outp = region.assign_advice(|| "", columns[offset + 2], row_offset, || outp)?;
outps.push(outp);
}
Ok(outps)
}
fn forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let zero = &single_inputs[0];
let mut inp1 = vec_inputs[0].clone();
let mut inp2 = vec_inputs[1].clone();
let initial_len = inp1.len();
while inp1.len() % self.num_inputs_per_row() != 0 {
inp1.push(zero);
inp2.push(zero);
}
let vec_inputs = vec![inp1, inp2];
let res = self.op_aligned_rows(
layouter.namespace(|| format!("forward row {}", self.name())),
&vec_inputs,
single_inputs,
)?;
Ok(res[0..initial_len].to_vec())
}
}
| https://github.com/ddkang/zkml |
src/gadgets/nonlinear.rs | pub mod exp;
pub mod logistic;
pub mod non_linearity;
pub mod pow;
pub mod relu;
pub mod rsqrt;
pub mod sqrt;
pub mod tanh;
| https://github.com/ddkang/zkml |
src/gadgets/nonlinear/exp.rs | use std::{collections::HashMap, marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error},
};
use super::{
super::gadget::{Gadget, GadgetConfig, GadgetType},
non_linearity::NonLinearGadget,
};
type ExpGadgetConfig = GadgetConfig;
// IMPORTANT: this return exp(x) * SF
pub struct ExpGadgetChip<F: PrimeField> {
config: Rc<ExpGadgetConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> ExpGadgetChip<F> {
pub fn construct(config: Rc<ExpGadgetConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
<ExpGadgetChip<F> as NonLinearGadget<F>>::configure(meta, gadget_config, GadgetType::Exp)
}
}
impl<F: PrimeField> NonLinearGadget<F> for ExpGadgetChip<F> {
fn generate_map(scale_factor: u64, min_val: i64, num_rows: i64) -> HashMap<i64, i64> {
let mut map = HashMap::new();
for i in 0..num_rows {
let shifted = i + min_val;
let x = (shifted as f64) / (scale_factor as f64);
let exp = x.exp();
let exp = (exp * ((scale_factor * scale_factor) as f64)).round() as i64;
map.insert(i as i64, exp);
}
map
}
fn get_map(&self) -> &HashMap<i64, i64> {
&self.config.maps.get(&GadgetType::Exp).unwrap()[0]
}
fn get_selector(&self) -> halo2_proofs::plonk::Selector {
self.config.selectors.get(&GadgetType::Exp).unwrap()[0]
}
}
impl<F: PrimeField> Gadget<F> for ExpGadgetChip<F> {
fn name(&self) -> String {
"Exp".to_string()
}
fn num_cols_per_op(&self) -> usize {
<ExpGadgetChip<F> as NonLinearGadget<F>>::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn load_lookups(&self, layouter: impl Layouter<F>) -> Result<(), Error> {
NonLinearGadget::load_lookups(self, layouter, self.config.clone(), GadgetType::Exp)?;
Ok(())
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
NonLinearGadget::op_row_region(
self,
region,
row_offset,
vec_inputs,
single_inputs,
self.config.clone(),
)
}
fn forward(
&self,
layouter: impl halo2_proofs::circuit::Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
NonLinearGadget::forward(self, layouter, vec_inputs, single_inputs)
}
}
| https://github.com/ddkang/zkml |
src/gadgets/nonlinear/logistic.rs | use std::{collections::HashMap, marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error},
};
use super::{
super::gadget::{Gadget, GadgetConfig, GadgetType},
non_linearity::NonLinearGadget,
};
pub struct LogisticGadgetChip<F: PrimeField> {
config: Rc<GadgetConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> LogisticGadgetChip<F> {
pub fn construct(config: Rc<GadgetConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
<LogisticGadgetChip<F> as NonLinearGadget<F>>::configure(
meta,
gadget_config,
GadgetType::Logistic,
)
}
}
impl<F: PrimeField> NonLinearGadget<F> for LogisticGadgetChip<F> {
fn generate_map(scale_factor: u64, min_val: i64, num_rows: i64) -> HashMap<i64, i64> {
let mut map = HashMap::new();
for i in 0..num_rows {
let shifted = i + min_val;
let x = (shifted as f64) / (scale_factor as f64);
let logistic = 1. / (1. + (-x).exp());
let logistic = (logistic * ((scale_factor) as f64)).round() as i64;
map.insert(i as i64, logistic);
}
map
}
fn get_map(&self) -> &HashMap<i64, i64> {
&self.config.maps.get(&GadgetType::Logistic).unwrap()[0]
}
fn get_selector(&self) -> halo2_proofs::plonk::Selector {
self.config.selectors.get(&GadgetType::Logistic).unwrap()[0]
}
}
impl<F: PrimeField> Gadget<F> for LogisticGadgetChip<F> {
fn name(&self) -> String {
"LogisticChip".to_string()
}
fn num_cols_per_op(&self) -> usize {
<LogisticGadgetChip<F> as NonLinearGadget<F>>::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn load_lookups(&self, layouter: impl Layouter<F>) -> Result<(), Error> {
NonLinearGadget::load_lookups(self, layouter, self.config.clone(), GadgetType::Logistic)?;
Ok(())
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
NonLinearGadget::op_row_region(
self,
region,
row_offset,
vec_inputs,
single_inputs,
self.config.clone(),
)
}
fn forward(
&self,
layouter: impl halo2_proofs::circuit::Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
NonLinearGadget::forward(self, layouter, vec_inputs, single_inputs)
}
}
| https://github.com/ddkang/zkml |
src/gadgets/nonlinear/non_linearity.rs | use std::{collections::HashMap, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region, Value},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error, Expression, Selector},
poly::Rotation,
};
use crate::gadgets::gadget::convert_to_u128;
use super::super::gadget::Gadget;
use super::super::gadget::{GadgetConfig, GadgetType};
const NUM_COLS_PER_OP: usize = 2;
pub trait NonLinearGadget<F: PrimeField>: Gadget<F> {
fn generate_map(scale_factor: u64, min_val: i64, num_rows: i64) -> HashMap<i64, i64>;
fn get_map(&self) -> &HashMap<i64, i64>;
fn get_selector(&self) -> Selector;
fn num_cols_per_op() -> usize {
NUM_COLS_PER_OP
}
fn configure(
meta: &mut ConstraintSystem<F>,
gadget_config: GadgetConfig,
gadget_type: GadgetType,
) -> GadgetConfig {
let selector = meta.complex_selector();
let columns = gadget_config.columns;
let mut tables = gadget_config.tables;
let inp_lookup = tables.get(&GadgetType::InputLookup).unwrap()[0];
let outp_lookup = meta.lookup_table_column();
for op_idx in 0..columns.len() / NUM_COLS_PER_OP {
let offset = op_idx * NUM_COLS_PER_OP;
meta.lookup("non-linear lookup", |meta| {
let s = meta.query_selector(selector);
let inp = meta.query_advice(columns[offset + 0], Rotation::cur());
let outp = meta.query_advice(columns[offset + 1], Rotation::cur());
let shift_val = gadget_config.min_val;
let shift_val_pos = Expression::Constant(F::from((-shift_val) as u64));
vec![
(s.clone() * (inp + shift_val_pos), inp_lookup),
(s.clone() * outp, outp_lookup),
]
});
}
let mut selectors = gadget_config.selectors;
selectors.insert(gadget_type, vec![selector]);
tables.insert(gadget_type, vec![inp_lookup, outp_lookup]);
let mut maps = gadget_config.maps;
let non_linear_map = Self::generate_map(
gadget_config.scale_factor,
gadget_config.min_val,
gadget_config.num_rows as i64,
);
maps.insert(gadget_type, vec![non_linear_map]);
GadgetConfig {
columns,
selectors,
tables,
maps,
..gadget_config
}
}
fn load_lookups(
&self,
mut layouter: impl Layouter<F>,
config: Rc<GadgetConfig>,
gadget_type: GadgetType,
) -> Result<(), Error> {
let map = self.get_map();
let table_col = config.tables.get(&gadget_type).unwrap()[1];
let shift_pos_i64 = -config.shift_min_val;
let shift_pos = F::from(shift_pos_i64 as u64);
layouter.assign_table(
|| "non linear table",
|mut table| {
for i in 0..config.num_rows {
let i = i as i64;
// FIXME: refactor this
let tmp = *map.get(&i).unwrap();
let val = if i == 0 {
F::ZERO
} else {
if tmp >= 0 {
F::from(tmp as u64)
} else {
let tmp = tmp + shift_pos_i64;
F::from(tmp as u64) - shift_pos
}
};
table.assign_cell(
|| "non linear cell",
table_col,
i as usize,
|| Value::known(val),
)?;
}
Ok(())
},
)?;
Ok(())
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
_single_inputs: &Vec<&AssignedCell<F, F>>,
gadget_config: Rc<GadgetConfig>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let columns = &gadget_config.columns;
let inp = &vec_inputs[0];
let map = self.get_map();
let shift_val_pos_i64 = -gadget_config.shift_min_val;
let shift_val_pos = F::from(shift_val_pos_i64 as u64);
let min_val = gadget_config.min_val;
if gadget_config.use_selectors {
let selector = self.get_selector();
selector.enable(region, row_offset)?;
}
let mut outps = vec![];
for i in 0..inp.len() {
let offset = i * 2;
inp[i].copy_advice(|| "", region, columns[offset + 0], row_offset)?;
let outp = inp[i].value().map(|x: &F| {
let pos = convert_to_u128(&(*x + shift_val_pos)) as i128 - shift_val_pos_i64 as i128;
let x = pos as i64 - min_val;
let val = *map.get(&x).unwrap();
if x == 0 {
F::ZERO
} else {
if val >= 0 {
F::from(val as u64)
} else {
let val_pos = val + shift_val_pos_i64;
F::from(val_pos as u64) - F::from(shift_val_pos_i64 as u64)
}
}
});
let outp =
region.assign_advice(|| "nonlinearity", columns[offset + 1], row_offset, || outp)?;
outps.push(outp);
}
Ok(outps)
}
fn forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let zero = &single_inputs[0];
let inp_len = vec_inputs[0].len();
let mut inp = vec_inputs[0].clone();
while inp.len() % self.num_inputs_per_row() != 0 {
inp.push(zero);
}
let vec_inputs = vec![inp];
let outp = self.op_aligned_rows(
layouter.namespace(|| format!("forward row {}", self.name())),
&vec_inputs,
&single_inputs,
)?;
Ok(outp[0..inp_len].to_vec())
}
}
| https://github.com/ddkang/zkml |
src/gadgets/nonlinear/pow.rs | use std::{collections::HashMap, marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error},
};
use super::{
super::gadget::{Gadget, GadgetConfig, GadgetType},
non_linearity::NonLinearGadget,
};
// IMPORTANT: PowGadget assumes a single power across the entire DAG
pub struct PowGadgetChip<F: PrimeField> {
config: Rc<GadgetConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> PowGadgetChip<F> {
pub fn construct(config: Rc<GadgetConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
<PowGadgetChip<F> as NonLinearGadget<F>>::configure(meta, gadget_config, GadgetType::Pow)
}
}
impl<F: PrimeField> NonLinearGadget<F> for PowGadgetChip<F> {
fn generate_map(scale_factor: u64, min_val: i64, num_rows: i64) -> HashMap<i64, i64> {
let power = 3.; // FIXME: need to make this variable somehow...
let mut map = HashMap::new();
for i in 0..num_rows {
let shifted = i + min_val;
let x = (shifted as f64) / (scale_factor as f64);
let y = x.powf(power);
let y = (y * ((scale_factor) as f64)).round() as i64;
map.insert(i as i64, y);
}
map
}
fn get_map(&self) -> &HashMap<i64, i64> {
&self.config.maps.get(&GadgetType::Pow).unwrap()[0]
}
fn get_selector(&self) -> halo2_proofs::plonk::Selector {
self.config.selectors.get(&GadgetType::Pow).unwrap()[0]
}
}
impl<F: PrimeField> Gadget<F> for PowGadgetChip<F> {
fn name(&self) -> String {
"PowGadgetChip".to_string()
}
fn num_cols_per_op(&self) -> usize {
<PowGadgetChip<F> as NonLinearGadget<F>>::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn load_lookups(&self, layouter: impl Layouter<F>) -> Result<(), Error> {
NonLinearGadget::load_lookups(self, layouter, self.config.clone(), GadgetType::Pow)?;
Ok(())
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
NonLinearGadget::op_row_region(
self,
region,
row_offset,
vec_inputs,
single_inputs,
self.config.clone(),
)
}
fn forward(
&self,
layouter: impl halo2_proofs::circuit::Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
NonLinearGadget::forward(self, layouter, vec_inputs, single_inputs)
}
}
| https://github.com/ddkang/zkml |
src/gadgets/nonlinear/relu.rs | use std::{collections::HashMap, marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error},
};
use super::{
super::gadget::{Gadget, GadgetConfig, GadgetType},
non_linearity::NonLinearGadget,
};
pub struct ReluChip<F: PrimeField> {
config: Rc<GadgetConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> ReluChip<F> {
pub fn construct(config: Rc<GadgetConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
<ReluChip<F> as NonLinearGadget<F>>::configure(meta, gadget_config, GadgetType::Relu)
}
}
impl<F: PrimeField> NonLinearGadget<F> for ReluChip<F> {
fn generate_map(_scale_factor: u64, min_val: i64, num_rows: i64) -> HashMap<i64, i64> {
let mut map = HashMap::new();
for i in 0..num_rows {
let shifted = i + min_val;
let relu = shifted.max(0);
map.insert(i as i64, relu);
}
map
}
fn get_map(&self) -> &HashMap<i64, i64> {
&self.config.maps.get(&GadgetType::Relu).unwrap()[0]
}
fn get_selector(&self) -> halo2_proofs::plonk::Selector {
self.config.selectors.get(&GadgetType::Relu).unwrap()[0]
}
}
impl<F: PrimeField> Gadget<F> for ReluChip<F> {
fn name(&self) -> String {
"Relu".to_string()
}
fn num_cols_per_op(&self) -> usize {
<ReluChip<F> as NonLinearGadget<F>>::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn load_lookups(&self, layouter: impl Layouter<F>) -> Result<(), Error> {
NonLinearGadget::load_lookups(self, layouter, self.config.clone(), GadgetType::Relu)?;
Ok(())
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
NonLinearGadget::op_row_region(
self,
region,
row_offset,
vec_inputs,
single_inputs,
self.config.clone(),
)
}
fn forward(
&self,
layouter: impl halo2_proofs::circuit::Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
NonLinearGadget::forward(self, layouter, vec_inputs, single_inputs)
}
}
| https://github.com/ddkang/zkml |
src/gadgets/nonlinear/rsqrt.rs | use std::{collections::HashMap, marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error},
};
use super::{
super::gadget::{Gadget, GadgetConfig, GadgetType},
non_linearity::NonLinearGadget,
};
pub struct RsqrtGadgetChip<F: PrimeField> {
config: Rc<GadgetConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> RsqrtGadgetChip<F> {
pub fn construct(config: Rc<GadgetConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
<RsqrtGadgetChip<F> as NonLinearGadget<F>>::configure(meta, gadget_config, GadgetType::Rsqrt)
}
}
impl<F: PrimeField> NonLinearGadget<F> for RsqrtGadgetChip<F> {
fn generate_map(scale_factor: u64, min_val: i64, num_rows: i64) -> HashMap<i64, i64> {
let mut map = HashMap::new();
for i in 0..num_rows {
let shifted = i + min_val;
let x = (shifted as f64) / (scale_factor as f64);
let sqrt = x.sqrt();
let rsqrt = 1.0 / sqrt;
let rsqrt = (rsqrt * (scale_factor as f64)).round() as i64;
map.insert(i as i64, rsqrt);
}
map
}
fn get_map(&self) -> &HashMap<i64, i64> {
&self.config.maps.get(&GadgetType::Rsqrt).unwrap()[0]
}
fn get_selector(&self) -> halo2_proofs::plonk::Selector {
self.config.selectors.get(&GadgetType::Rsqrt).unwrap()[0]
}
}
impl<F: PrimeField> Gadget<F> for RsqrtGadgetChip<F> {
fn name(&self) -> String {
"RsqrtGadget".to_string()
}
fn num_cols_per_op(&self) -> usize {
<RsqrtGadgetChip<F> as NonLinearGadget<F>>::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn load_lookups(&self, layouter: impl Layouter<F>) -> Result<(), Error> {
NonLinearGadget::load_lookups(self, layouter, self.config.clone(), GadgetType::Rsqrt)?;
Ok(())
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
NonLinearGadget::op_row_region(
self,
region,
row_offset,
vec_inputs,
single_inputs,
self.config.clone(),
)
}
fn forward(
&self,
layouter: impl halo2_proofs::circuit::Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
NonLinearGadget::forward(self, layouter, vec_inputs, single_inputs)
}
}
| https://github.com/ddkang/zkml |
src/gadgets/nonlinear/sqrt.rs | use std::{collections::HashMap, marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error},
};
use super::{
super::gadget::{Gadget, GadgetConfig, GadgetType},
non_linearity::NonLinearGadget,
};
pub struct SqrtGadgetChip<F: PrimeField> {
config: Rc<GadgetConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> SqrtGadgetChip<F> {
pub fn construct(config: Rc<GadgetConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
<SqrtGadgetChip<F> as NonLinearGadget<F>>::configure(meta, gadget_config, GadgetType::Sqrt)
}
}
impl<F: PrimeField> NonLinearGadget<F> for SqrtGadgetChip<F> {
fn generate_map(scale_factor: u64, min_val: i64, num_rows: i64) -> HashMap<i64, i64> {
let mut map = HashMap::new();
for i in 0..num_rows {
let shifted = i + min_val;
let x = (shifted as f64) / (scale_factor as f64);
let sqrt = x.sqrt();
let sqrt = (sqrt * (scale_factor as f64)).round() as i64;
map.insert(i as i64, sqrt);
}
map
}
fn get_map(&self) -> &HashMap<i64, i64> {
&self.config.maps.get(&GadgetType::Sqrt).unwrap()[0]
}
fn get_selector(&self) -> halo2_proofs::plonk::Selector {
self.config.selectors.get(&GadgetType::Sqrt).unwrap()[0]
}
}
impl<F: PrimeField> Gadget<F> for SqrtGadgetChip<F> {
fn name(&self) -> String {
"SqrtGadget".to_string()
}
fn num_cols_per_op(&self) -> usize {
<SqrtGadgetChip<F> as NonLinearGadget<F>>::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn load_lookups(&self, layouter: impl Layouter<F>) -> Result<(), Error> {
NonLinearGadget::load_lookups(self, layouter, self.config.clone(), GadgetType::Sqrt)?;
Ok(())
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
NonLinearGadget::op_row_region(
self,
region,
row_offset,
vec_inputs,
single_inputs,
self.config.clone(),
)
}
fn forward(
&self,
layouter: impl halo2_proofs::circuit::Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
NonLinearGadget::forward(self, layouter, vec_inputs, single_inputs)
}
}
| https://github.com/ddkang/zkml |
src/gadgets/nonlinear/tanh.rs | use std::{collections::HashMap, marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error},
};
use super::{
super::gadget::{Gadget, GadgetConfig, GadgetType},
non_linearity::NonLinearGadget,
};
pub struct TanhGadgetChip<F: PrimeField> {
config: Rc<GadgetConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> TanhGadgetChip<F> {
pub fn construct(config: Rc<GadgetConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
<TanhGadgetChip<F> as NonLinearGadget<F>>::configure(meta, gadget_config, GadgetType::Tanh)
}
}
impl<F: PrimeField> NonLinearGadget<F> for TanhGadgetChip<F> {
fn generate_map(scale_factor: u64, min_val: i64, num_rows: i64) -> HashMap<i64, i64> {
let scale_factor = scale_factor as f64;
let mut map = HashMap::new();
for i in 0..num_rows {
let shifted = i + min_val;
let x = (shifted as f64) / scale_factor;
let y = x.tanh();
let y = (y * scale_factor).round() as i64;
map.insert(i as i64, y);
}
map
}
fn get_map(&self) -> &HashMap<i64, i64> {
&self.config.maps.get(&GadgetType::Tanh).unwrap()[0]
}
fn get_selector(&self) -> halo2_proofs::plonk::Selector {
self.config.selectors.get(&GadgetType::Tanh).unwrap()[0]
}
}
impl<F: PrimeField> Gadget<F> for TanhGadgetChip<F> {
fn name(&self) -> String {
"TanhGadgetChip".to_string()
}
fn num_cols_per_op(&self) -> usize {
<TanhGadgetChip<F> as NonLinearGadget<F>>::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn load_lookups(&self, layouter: impl Layouter<F>) -> Result<(), Error> {
NonLinearGadget::load_lookups(self, layouter, self.config.clone(), GadgetType::Tanh)?;
Ok(())
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
NonLinearGadget::op_row_region(
self,
region,
row_offset,
vec_inputs,
single_inputs,
self.config.clone(),
)
}
fn forward(
&self,
layouter: impl halo2_proofs::circuit::Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
NonLinearGadget::forward(self, layouter, vec_inputs, single_inputs)
}
}
| https://github.com/ddkang/zkml |
src/gadgets/sqrt_big.rs | use std::{marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error, Expression},
poly::Rotation,
};
use crate::gadgets::gadget::convert_to_u64;
use super::gadget::{Gadget, GadgetConfig, GadgetType};
type SqrtBigConfig = GadgetConfig;
pub struct SqrtBigChip<F: PrimeField> {
config: Rc<SqrtBigConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> SqrtBigChip<F> {
pub fn construct(config: Rc<SqrtBigConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn num_cols_per_op() -> usize {
3
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
let selector = meta.complex_selector();
let two = Expression::Constant(F::from(2));
let columns = gadget_config.columns;
let tables = gadget_config.tables;
let inp_lookup = tables.get(&GadgetType::InputLookup).unwrap()[0];
// TODO: prove that these constraints work
meta.create_gate("sqrt_big arithm", |meta| {
let s = meta.query_selector(selector);
let mut constraints = vec![];
for op_idx in 0..columns.len() / Self::num_cols_per_op() {
let offset = op_idx * Self::num_cols_per_op();
let inp = meta.query_advice(columns[offset + 0], Rotation::cur());
let sqrt = meta.query_advice(columns[offset + 1], Rotation::cur());
let rem = meta.query_advice(columns[offset + 2], Rotation::cur());
let lhs = inp.clone();
let rhs = sqrt.clone() * sqrt.clone() + rem.clone();
constraints.push(s.clone() * (lhs - rhs));
}
constraints
});
for op_idx in 0..columns.len() / Self::num_cols_per_op() {
let offset = op_idx * Self::num_cols_per_op();
meta.lookup("sqrt_big sqrt lookup", |meta| {
let s = meta.query_selector(selector);
let sqrt = meta.query_advice(columns[offset + 1], Rotation::cur());
vec![(s.clone() * sqrt, inp_lookup)]
});
meta.lookup("sqrt_big rem lookup", |meta| {
let s = meta.query_selector(selector);
let sqrt = meta.query_advice(columns[offset + 1], Rotation::cur());
let rem = meta.query_advice(columns[offset + 2], Rotation::cur());
vec![(s.clone() * (rem + sqrt), inp_lookup)]
});
meta.lookup("sqrt_big sqrt - rem lookup", |meta| {
let s = meta.query_selector(selector);
let sqrt = meta.query_advice(columns[offset + 1], Rotation::cur());
let rem = meta.query_advice(columns[offset + 2], Rotation::cur());
vec![(s.clone() * (two.clone() * sqrt - rem), inp_lookup)]
});
}
let mut selectors = gadget_config.selectors;
selectors.insert(GadgetType::SqrtBig, vec![selector]);
GadgetConfig {
columns,
tables,
selectors,
..gadget_config
}
}
}
impl<F: PrimeField> Gadget<F> for SqrtBigChip<F> {
fn name(&self) -> String {
"sqrt_big".to_string()
}
fn num_cols_per_op(&self) -> usize {
Self::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.num_inputs_per_row()
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
_single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let inps = &vec_inputs[0];
if self.config.use_selectors {
let selector = self.config.selectors.get(&GadgetType::SqrtBig).unwrap()[0];
selector.enable(region, row_offset)?;
}
let mut outp_cells = vec![];
for (i, inp) in inps.iter().enumerate() {
let offset = i * self.num_cols_per_op();
inp.copy_advice(
|| "sqrt_big",
region,
self.config.columns[offset],
row_offset,
)?;
let outp = inp.value().map(|x: &F| {
let inp_val = convert_to_u64(x) as i64;
let fsqrt = (inp_val as f64).sqrt();
let sqrt = fsqrt.round() as i64;
let rem = inp_val - sqrt * sqrt;
(sqrt, rem)
});
let sqrt_cell = region.assign_advice(
|| "sqrt_big",
self.config.columns[offset + 1],
row_offset,
|| outp.map(|x| F::from(x.0 as u64)),
)?;
let _rem_cell = region.assign_advice(
|| "sqrt_big",
self.config.columns[offset + 2],
row_offset,
|| {
outp.map(|x| {
let rem_pos = x.1 + x.0;
F::from(rem_pos as u64) - F::from(x.0 as u64)
})
},
)?;
outp_cells.push(sqrt_cell);
}
Ok(outp_cells)
}
fn forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let zero = &single_inputs[0];
let mut inp = vec_inputs[0].clone();
let inp_len = inp.len();
while inp.len() % self.num_inputs_per_row() != 0 {
inp.push(zero);
}
let vec_inputs = vec![inp];
let outp = self.op_aligned_rows(
layouter.namespace(|| format!("forward row {}", self.name())),
&vec_inputs,
single_inputs,
)?;
Ok(outp[0..inp_len].to_vec())
}
}
| https://github.com/ddkang/zkml |
src/gadgets/square.rs | use std::{marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error},
poly::Rotation,
};
use super::gadget::{Gadget, GadgetConfig, GadgetType};
pub struct SquareGadgetChip<F: PrimeField> {
config: Rc<GadgetConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> SquareGadgetChip<F> {
pub fn construct(config: Rc<GadgetConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
// TODO: it would be more efficient to do the division here directly
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
let selector = meta.selector();
let columns = gadget_config.columns;
meta.create_gate("square gate", |meta| {
let s = meta.query_selector(selector);
let gate_inp = meta.query_advice(columns[0], Rotation::cur());
let gate_output = meta.query_advice(columns[1], Rotation::cur());
let res = gate_inp.clone() * gate_inp;
vec![s * (res - gate_output)]
});
let mut selectors = gadget_config.selectors;
selectors.insert(GadgetType::Square, vec![selector]);
GadgetConfig {
columns,
selectors,
..gadget_config
}
}
}
impl<F: PrimeField> Gadget<F> for SquareGadgetChip<F> {
fn name(&self) -> String {
"SquareChip".to_string()
}
fn num_cols_per_op(&self) -> usize {
2
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.num_inputs_per_row()
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
_single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
assert_eq!(vec_inputs.len(), 1);
if self.config.use_selectors {
let selector = self.config.selectors.get(&GadgetType::Square).unwrap()[0];
selector.enable(region, row_offset)?;
}
let inps = &vec_inputs[0];
let mut outp = vec![];
for (i, inp) in inps.iter().enumerate() {
let offset = i * self.num_cols_per_op();
inp.copy_advice(|| "", region, self.config.columns[offset], row_offset)?;
let outp_val = inp.value().map(|x: &F| x.to_owned() * x.to_owned());
let outp_cell = region.assign_advice(
|| "square output",
self.config.columns[offset + 1],
row_offset,
|| outp_val,
)?;
outp.push(outp_cell);
}
Ok(outp)
}
fn forward(
&self,
mut layouter: impl halo2_proofs::circuit::Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let zero = &single_inputs[0];
let mut inp = vec_inputs[0].clone();
let initial_len = inp.len();
while inp.len() % self.num_inputs_per_row() != 0 {
inp.push(zero);
}
let vec_inputs = vec![inp];
let res = self.op_aligned_rows(
layouter.namespace(|| format!("forward row {}", self.name())),
&vec_inputs,
single_inputs,
)?;
Ok(res[0..initial_len].to_vec())
}
}
| https://github.com/ddkang/zkml |
src/gadgets/squared_diff.rs | use std::{marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error},
poly::Rotation,
};
use super::gadget::{Gadget, GadgetConfig, GadgetType};
type SquaredDiffConfig = GadgetConfig;
pub struct SquaredDiffGadgetChip<F: PrimeField> {
config: Rc<SquaredDiffConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> SquaredDiffGadgetChip<F> {
pub fn construct(config: Rc<SquaredDiffConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn num_cols_per_op() -> usize {
3
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
let selector = meta.selector();
let columns = gadget_config.columns;
meta.create_gate("squared diff", |meta| {
let s = meta.query_selector(selector);
let mut constraints = vec![];
for i in 0..columns.len() / Self::num_cols_per_op() {
let offset = i * Self::num_cols_per_op();
let inp1 = meta.query_advice(columns[offset + 0], Rotation::cur());
let inp2 = meta.query_advice(columns[offset + 1], Rotation::cur());
let outp = meta.query_advice(columns[offset + 2], Rotation::cur());
let res = (inp1 - inp2).square();
constraints.append(&mut vec![s.clone() * (res - outp)])
}
constraints
});
let mut selectors = gadget_config.selectors;
selectors.insert(GadgetType::SquaredDiff, vec![selector]);
GadgetConfig {
columns,
selectors,
..gadget_config
}
}
}
impl<F: PrimeField> Gadget<F> for SquaredDiffGadgetChip<F> {
fn name(&self) -> String {
"SquaredDiff".to_string()
}
fn num_cols_per_op(&self) -> usize {
Self::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
_single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let inp1 = &vec_inputs[0];
let inp2 = &vec_inputs[1];
assert_eq!(inp1.len(), inp2.len());
let columns = &self.config.columns;
if self.config.use_selectors {
let selector = self.config.selectors.get(&GadgetType::SquaredDiff).unwrap()[0];
selector.enable(region, row_offset)?;
}
let mut outps = vec![];
for i in 0..inp1.len() {
let offset = i * self.num_cols_per_op();
let inp1 = inp1[i].copy_advice(|| "", region, columns[offset + 0], row_offset)?;
let inp2 = inp2[i].copy_advice(|| "", region, columns[offset + 1], row_offset)?;
let outp = inp1.value().map(|x: &F| x.to_owned()) - inp2.value().map(|x: &F| x.to_owned());
let outp = outp * outp;
let outp = region.assign_advice(|| "", columns[offset + 2], row_offset, || outp)?;
outps.push(outp);
}
Ok(outps)
}
fn forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let zero = &single_inputs[0];
let mut inp1 = vec_inputs[0].clone();
let mut inp2 = vec_inputs[1].clone();
let initial_len = inp1.len();
while inp1.len() % self.num_inputs_per_row() != 0 {
inp1.push(zero);
inp2.push(zero);
}
let vec_inputs = vec![inp1, inp2];
let res = self.op_aligned_rows(
layouter.namespace(|| format!("forward row {}", self.name())),
&vec_inputs,
single_inputs,
)?;
Ok(res[0..initial_len].to_vec())
}
}
| https://github.com/ddkang/zkml |
src/gadgets/sub_pairs.rs | use std::{marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error},
poly::Rotation,
};
use super::gadget::{Gadget, GadgetConfig, GadgetType};
type SubPairsConfig = GadgetConfig;
pub struct SubPairsChip<F: PrimeField> {
config: Rc<SubPairsConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> SubPairsChip<F> {
pub fn construct(config: Rc<SubPairsConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn num_cols_per_op() -> usize {
3
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
let selector = meta.selector();
let columns = gadget_config.columns;
meta.create_gate("sub pair", |meta| {
let s = meta.query_selector(selector);
let mut constraints = vec![];
for i in 0..columns.len() / Self::num_cols_per_op() {
let offset = i * Self::num_cols_per_op();
let inp1 = meta.query_advice(columns[offset + 0], Rotation::cur());
let inp2 = meta.query_advice(columns[offset + 1], Rotation::cur());
let outp = meta.query_advice(columns[offset + 2], Rotation::cur());
let res = inp1 - inp2;
constraints.append(&mut vec![s.clone() * (res - outp)])
}
constraints
});
let mut selectors = gadget_config.selectors;
selectors.insert(GadgetType::SubPairs, vec![selector]);
GadgetConfig {
columns,
selectors,
..gadget_config
}
}
}
impl<F: PrimeField> Gadget<F> for SubPairsChip<F> {
fn name(&self) -> String {
"sub pairs chip".to_string()
}
fn num_cols_per_op(&self) -> usize {
Self::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
_single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let inp1 = &vec_inputs[0];
let inp2 = &vec_inputs[1];
assert_eq!(inp1.len(), inp2.len());
let columns = &self.config.columns;
if self.config.use_selectors {
let selector = self.config.selectors.get(&GadgetType::SubPairs).unwrap()[0];
selector.enable(region, row_offset)?;
}
let mut outps = vec![];
for i in 0..inp1.len() {
let offset = i * self.num_cols_per_op();
let inp1 = inp1[i].copy_advice(|| "", region, columns[offset + 0], row_offset)?;
let inp2 = inp2[i].copy_advice(|| "", region, columns[offset + 1], row_offset)?;
let outp = inp1.value().map(|x: &F| x.to_owned()) - inp2.value().map(|x: &F| x.to_owned());
let outp = region.assign_advice(|| "", columns[offset + 2], row_offset, || outp)?;
outps.push(outp);
}
Ok(outps)
}
fn forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let zero = &single_inputs[0];
let mut inp1 = vec_inputs[0].clone();
let mut inp2 = vec_inputs[1].clone();
let initial_len = inp1.len();
while inp1.len() % self.num_inputs_per_row() != 0 {
inp1.push(zero);
inp2.push(zero);
}
let vec_inputs = vec![inp1, inp2];
let res = self.op_aligned_rows(
layouter.namespace(|| format!("forward row {}", self.name())),
&vec_inputs,
single_inputs,
)?;
Ok(res[0..initial_len].to_vec())
}
}
| https://github.com/ddkang/zkml |
src/gadgets/update.rs | use std::marker::PhantomData;
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error, Expression},
poly::Rotation,
};
use crate::gadgets::gadget::{convert_to_u64, GadgetConfig};
use super::gadget::{Gadget, GadgetType};
type UpdateConfig = GadgetConfig;
#[derive(Clone, Debug)]
pub struct UpdateGadgetChip<F: PrimeField> {
config: UpdateConfig,
_marker: PhantomData<F>,
}
impl<F: PrimeField> UpdateGadgetChip<F> {
pub fn construct(config: UpdateConfig) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn num_cols_per_op() -> usize {
4
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> UpdateConfig {
let tables = &gadget_config.tables;
let mod_lookup = tables.get(&GadgetType::InputLookup).unwrap()[0];
let columns = gadget_config.columns;
let selector = meta.complex_selector();
let div_val = gadget_config.scale_factor;
let eta: u64 = (gadget_config.scale_factor as f64 * gadget_config.eta) as u64;
meta.create_gate("updater_arith", |meta| {
let s = meta.query_selector(selector);
let sf = Expression::Constant(F::from(div_val as u64));
let eta = Expression::Constant(F::from(eta as u64));
let mut constraints = vec![];
for op_idx in 0..columns.len() / Self::num_cols_per_op() {
let offset = op_idx * Self::num_cols_per_op();
let w = meta.query_advice(columns[offset], Rotation::cur());
let dw = meta.query_advice(columns[offset + 1], Rotation::cur());
let div = meta.query_advice(columns[offset + 2], Rotation::cur());
let mod_res = meta.query_advice(columns[offset + 3], Rotation::cur());
let expr = (w * sf.clone() - dw * eta.clone()) - (div * sf.clone() + mod_res);
constraints.push(s.clone() * expr);
}
constraints
});
for op_idx in 0..columns.len() / Self::num_cols_per_op() {
let offset = op_idx * Self::num_cols_per_op();
// Check that mod is smaller than SF
meta.lookup("max inp1", |meta| {
let s = meta.query_selector(selector);
let mod_res = meta.query_advice(columns[offset + 3], Rotation::cur());
// Constrains that the modulus \in [0, DIV_VAL)
vec![(s.clone() * mod_res.clone(), mod_lookup)]
});
}
let mut selectors = gadget_config.selectors;
selectors.insert(GadgetType::Update, vec![selector]);
UpdateConfig {
columns,
selectors,
..gadget_config
}
}
}
impl<F: PrimeField + Ord> Gadget<F> for UpdateGadgetChip<F> {
fn name(&self) -> String {
"updater chip".to_string()
}
fn num_cols_per_op(&self) -> usize {
Self::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.config.columns.len() / self.num_cols_per_op()
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
_single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let div_val = self.config.scale_factor as i64;
let div_val_f = F::from(div_val as u64);
let eta = div_val / 1000;
let eta = F::from(eta as u64);
let div_outp_min_val = self.config.div_outp_min_val;
let div_inp_min_val_pos_i64 = -self.config.shift_min_val;
let div_inp_min_val_pos = F::from(div_inp_min_val_pos_i64 as u64);
let columns = &self.config.columns;
if self.config.use_selectors {
let selector = self.config.selectors.get(&GadgetType::Update).unwrap()[0];
selector.enable(region, row_offset)?;
}
let w = &vec_inputs[0];
let dw = &vec_inputs[1];
let mut output_cells = vec![];
for i in 0..w.len() {
let offset = i * self.num_cols_per_op();
let _w_cell = w[i].copy_advice(|| "", region, columns[offset + 0], row_offset)?;
let _dw_cell = dw[i].copy_advice(|| "", region, columns[offset + 1], row_offset)?;
let w_val = w[i].value().map(|x: &F| x.to_owned());
let dw_val = dw[i].value().map(|x: &F| x.to_owned());
let out_scaled = w_val.zip(dw_val).map(|(w, dw)| w * div_val_f - dw * eta);
let div_mod = out_scaled.map(|x| {
let x_pos = x + div_inp_min_val_pos;
let x_pos = if x_pos > F::ZERO {
x_pos
} else {
x_pos + div_val_f
};
let inp = convert_to_u64(&x_pos);
let div_res = inp as i64 / div_val - (div_inp_min_val_pos_i64 as i64 / div_val);
let mod_res = inp as i64 % div_val;
(div_res, mod_res)
});
let div_res_cell = region
.assign_advice(
|| "div_res",
self.config.columns[offset + 2],
row_offset,
|| {
div_mod.map(|(x, _): (i64, i64)| {
F::from((x - div_outp_min_val as i64) as u64) - F::from(-div_outp_min_val as u64)
})
},
)
.unwrap();
let _mod_res_cell = region
.assign_advice(
|| "mod_res",
self.config.columns[offset + 3],
row_offset,
|| div_mod.map(|(_, x): (i64, i64)| F::from(x as u64)),
)
.unwrap();
output_cells.push(div_res_cell);
}
Ok(output_cells)
}
fn forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let zero = &single_inputs[0];
let mut w = vec_inputs[0].clone();
let mut dw = vec_inputs[1].clone();
let initial_len = w.len();
while !w.len() % self.num_cols_per_op() == 0 {
w.push(zero);
}
while !dw.len() % self.num_cols_per_op() == 0 {
dw.push(zero);
}
let res = self.op_aligned_rows(
layouter.namespace(|| format!("forward row {}", self.name())),
&vec![w, dw],
single_inputs,
)?;
Ok(res[0..initial_len].to_vec())
}
}
| https://github.com/ddkang/zkml |
src/gadgets/var_div.rs | use std::{marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error, Expression},
poly::Rotation,
};
use rounded_div::RoundedDiv;
use super::gadget::{convert_to_u128, Gadget, GadgetConfig, GadgetType};
type VarDivRoundConfig = GadgetConfig;
pub struct VarDivRoundChip<F: PrimeField> {
config: Rc<VarDivRoundConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> VarDivRoundChip<F> {
pub fn construct(config: Rc<VarDivRoundConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn num_cols_per_op() -> usize {
3
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
let columns = gadget_config.columns;
let selector = meta.complex_selector();
let two = Expression::Constant(F::from(2));
let tables = gadget_config.tables;
let lookup = tables.get(&GadgetType::InputLookup).unwrap()[0];
// a | c | r | ... | b
// (2 * a + b) = (2 * b) * c + r
// b - r \in [0, 2^N) <-- forces b > r
meta.create_gate("var_div_arithm", |meta| {
let s = meta.query_selector(selector);
let mut constraints = vec![];
let b = meta.query_advice(columns[columns.len() - 1], Rotation::cur());
for i in 0..(columns.len() - 1) / Self::num_cols_per_op() {
let offset = i * Self::num_cols_per_op();
let a = meta.query_advice(columns[offset], Rotation::cur());
let c = meta.query_advice(columns[offset + 1], Rotation::cur());
let r = meta.query_advice(columns[offset + 2], Rotation::cur());
let lhs = a.clone() * two.clone() + b.clone();
let rhs = b.clone() * two.clone() * c + r;
constraints.push(s.clone() * (lhs - rhs));
}
constraints
});
for i in 0..(columns.len() - 1) / Self::num_cols_per_op() {
let offset = i * Self::num_cols_per_op();
// r \in [0, 2^N)
meta.lookup("var div range checks r", |meta| {
let s = meta.query_selector(selector);
let r = meta.query_advice(columns[offset + 2], Rotation::cur());
vec![(s.clone() * r, lookup)]
});
// 2 * b - r \in [0, 2^N)
meta.lookup("var div range checks 2b-r", |meta| {
let s = meta.query_selector(selector);
let b = meta.query_advice(columns[columns.len() - 1], Rotation::cur());
let r = meta.query_advice(columns[offset + 2], Rotation::cur());
vec![(s.clone() * (two.clone() * b - r), lookup)]
});
}
// b \in [0, 2^N)
meta.lookup("var div range checks b", |meta| {
let s = meta.query_selector(selector);
let b = meta.query_advice(columns[columns.len() - 1], Rotation::cur());
vec![(s.clone() * b, lookup)]
});
let mut selectors = gadget_config.selectors;
selectors.insert(GadgetType::VarDivRound, vec![selector]);
GadgetConfig {
columns,
tables,
selectors,
..gadget_config
}
}
}
impl<F: PrimeField> Gadget<F> for VarDivRoundChip<F> {
fn name(&self) -> String {
"VarDivRoundChip".to_string()
}
fn num_cols_per_op(&self) -> usize {
Self::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
(self.config.columns.len() - 1) / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.num_inputs_per_row()
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let a_vec = &vec_inputs[0];
// let zero = single_inputs[0].clone();
let b = &single_inputs[1];
let div_outp_min_val_i64 = self.config.div_outp_min_val;
let div_inp_min_val_pos_i64 = -self.config.shift_min_val;
if self.config.use_selectors {
let selector = self.config.selectors.get(&GadgetType::VarDivRound).unwrap()[0];
selector.enable(region, row_offset)?;
}
b.copy_advice(
|| "",
region,
self.config.columns[self.config.columns.len() - 1],
row_offset,
)?;
let mut div_out = vec![];
for (i, a) in a_vec.iter().enumerate() {
let offset = i * self.num_cols_per_op();
a.copy_advice(|| "", region, self.config.columns[offset], row_offset)?;
let div_mod = a.value().zip(b.value()).map(|(a, b)| {
let b = convert_to_u128(b);
// Needs to be divisible by b
let div_inp_min_val_pos_i64 = div_inp_min_val_pos_i64 / (b as i64) * (b as i64);
let div_inp_min_val_pos = F::from(div_inp_min_val_pos_i64 as u64);
let a_pos = *a + div_inp_min_val_pos;
let a = convert_to_u128(&a_pos);
// c = (2 * a + b) / (2 * b)
let c_pos = a.rounded_div(b);
let c = (c_pos as i128 - (div_inp_min_val_pos_i64 as u128 / b) as i128) as i64;
// r = (2 * a + b) % (2 * b)
let rem_floor = (a as i128) - (c_pos * b) as i128;
let r = 2 * rem_floor + (b as i128);
let r = r as i64;
(c, r)
});
let div_cell = region.assign_advice(
|| "",
self.config.columns[offset + 1],
row_offset,
|| {
div_mod.map(|(c, _)| {
let offset = F::from(-div_outp_min_val_i64 as u64);
let c = F::from((c - div_outp_min_val_i64) as u64);
c - offset
})
},
)?;
let _mod_cell = region.assign_advice(
|| "",
self.config.columns[offset + 2],
row_offset,
|| div_mod.map(|(_, r)| F::from(r as u64)),
)?;
div_out.push(div_cell);
}
Ok(div_out)
}
fn forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let mut inps = vec_inputs[0].clone();
let initial_len = inps.len();
// Needed to pad: bias - bias = 0
let default = &single_inputs[0];
while inps.len() % self.num_inputs_per_row() != 0 {
inps.push(&default);
}
let res = self.op_aligned_rows(layouter.namespace(|| "var_div"), &vec![inps], single_inputs)?;
Ok(res[..initial_len].to_vec())
}
}
| https://github.com/ddkang/zkml |
src/gadgets/var_div_big.rs | use std::{marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error, Expression},
poly::Rotation,
};
use rounded_div::RoundedDiv;
use super::gadget::{convert_to_u128, Gadget, GadgetConfig, GadgetType};
pub struct VarDivRoundBigChip<F: PrimeField> {
config: Rc<GadgetConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> VarDivRoundBigChip<F> {
pub fn construct(config: Rc<GadgetConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn num_cols_per_op() -> usize {
7
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
let columns = gadget_config.columns;
let selector = meta.complex_selector();
let two = Expression::Constant(F::from(2));
let range = Expression::Constant(F::from(gadget_config.num_rows as u64));
let tables = gadget_config.tables;
let lookup = tables.get(&GadgetType::InputLookup).unwrap()[0];
// a | c | r | (2 b - r)_1 | (2 b - r)_0 | r_1 | r_0 | ... | b
// a / b = c
meta.create_gate("var_div_arithm", |meta| {
let s = meta.query_selector(selector);
let mut constraints = vec![];
let b = meta.query_advice(columns[columns.len() - 1], Rotation::cur());
for i in 0..(columns.len() - 1) / Self::num_cols_per_op() {
let offset = i * Self::num_cols_per_op();
// Constrain that (2 * a + b) = (2 * b) * c + r
let a = meta.query_advice(columns[offset], Rotation::cur());
let c = meta.query_advice(columns[offset + 1], Rotation::cur());
let r = meta.query_advice(columns[offset + 2], Rotation::cur());
let lhs = a.clone() * two.clone() + b.clone();
let rhs = b.clone() * two.clone() * c + r.clone();
constraints.push(s.clone() * (lhs - rhs));
// Constrain that (2 * b - r) = br1 * max_val + br0
let br1 = meta.query_advice(columns[offset + 3], Rotation::cur());
let br0 = meta.query_advice(columns[offset + 4], Rotation::cur());
let lhs = b.clone() * two.clone() - r.clone();
let rhs = br1 * range.clone() + br0;
constraints.push(s.clone() * (lhs - rhs));
// Constrains that r = r1 * max_val + r0
let r1 = meta.query_advice(columns[offset + 5], Rotation::cur());
let r0 = meta.query_advice(columns[offset + 6], Rotation::cur());
let lhs = r.clone();
let rhs = r1 * range.clone() + r0;
constraints.push(s.clone() * (lhs - rhs));
}
constraints
});
// For var div big, we assume that a, b > 0 and are outputs of the previous layer
// r must be constrained to be in [0, b)
for i in 0..(columns.len() - 1) / Self::num_cols_per_op() {
let offset = i * Self::num_cols_per_op();
// (2 * b - r)_{1, 0} \in [0, 2^N)
meta.lookup("var div big br1", |meta| {
let s = meta.query_selector(selector);
let br1 = meta.query_advice(columns[offset + 3], Rotation::cur());
vec![(s * br1, lookup)]
});
meta.lookup("var div big br0", |meta| {
let s = meta.query_selector(selector);
let br0 = meta.query_advice(columns[offset + 4], Rotation::cur());
vec![(s * br0, lookup)]
});
// r_{1, 0} \in [0, 2^N)
meta.lookup("var div big r1", |meta| {
let s = meta.query_selector(selector);
let r1 = meta.query_advice(columns[offset + 5], Rotation::cur());
vec![(s * r1, lookup)]
});
meta.lookup("var div big r0", |meta| {
let s = meta.query_selector(selector);
let r0 = meta.query_advice(columns[offset + 6], Rotation::cur());
vec![(s * r0, lookup)]
});
}
let mut selectors = gadget_config.selectors;
selectors.insert(GadgetType::VarDivRoundBig, vec![selector]);
GadgetConfig {
columns,
tables,
selectors,
..gadget_config
}
}
}
impl<F: PrimeField> Gadget<F> for VarDivRoundBigChip<F> {
fn name(&self) -> String {
"VarDivBigRoundChip".to_string()
}
fn num_cols_per_op(&self) -> usize {
Self::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
(self.config.columns.len() - 1) / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.num_inputs_per_row()
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let a_vec = &vec_inputs[0];
// let zero = single_inputs[0].clone();
let b = &single_inputs[1];
let div_outp_min_val_i64 = self.config.div_outp_min_val;
let div_inp_min_val_pos_i64 = -self.config.shift_min_val;
let num_rows = self.config.num_rows as i64;
if self.config.use_selectors {
let selector = self
.config
.selectors
.get(&GadgetType::VarDivRoundBig)
.unwrap()[0];
selector.enable(region, row_offset)?;
}
b.copy_advice(
|| "",
region,
self.config.columns[self.config.columns.len() - 1],
row_offset,
)?;
let mut div_out = vec![];
for (i, a) in a_vec.iter().enumerate() {
let offset = i * self.num_cols_per_op();
a.copy_advice(|| "", region, self.config.columns[offset], row_offset)
.unwrap();
let div_mod = a.value().zip(b.value()).map(|(a, b)| {
let b = convert_to_u128(b);
// Needs to be divisible by b
let div_inp_min_val_pos_i64 = div_inp_min_val_pos_i64 / (b as i64) * (b as i64);
let div_inp_min_val_pos = F::from(div_inp_min_val_pos_i64 as u64);
let a_pos = *a + div_inp_min_val_pos;
let a = convert_to_u128(&a_pos);
// c = (2 * a + b) / (2 * b)
let c_pos = a.rounded_div(b);
let c = (c_pos as i128 - (div_inp_min_val_pos_i64 as u128 / b) as i128) as i64;
// r = (2 * a + b) % (2 * b)
let rem_floor = (a as i128) - (c_pos * b) as i128;
let r = 2 * rem_floor + (b as i128);
let r = r as i64;
(c, r)
});
let br_split = div_mod.zip(b.value()).map(|((_, r), b)| {
let b = convert_to_u128(b) as i64;
let val = 2 * b - r;
let p1 = val / num_rows;
let p0 = val % num_rows;
// val = p1 * max_val + p0
(p1, p0)
});
let r_split = div_mod.map(|(_, r)| {
let p1 = r / num_rows;
let p0 = r % num_rows;
// val = p1 * max_val + p0
(p1, p0)
});
let div_cell = region.assign_advice(
|| "",
self.config.columns[offset + 1],
row_offset,
|| {
div_mod.map(|(c, _)| {
let offset = F::from(-div_outp_min_val_i64 as u64);
let c = F::from((c - div_outp_min_val_i64) as u64);
c - offset
})
},
)?;
let _mod_cell = region.assign_advice(
|| "",
self.config.columns[offset + 2],
row_offset,
|| div_mod.map(|(_, r)| F::from(r as u64)),
)?;
// Assign 2 * b - r to the next 2 columns
let _br_split_cell_1 = region.assign_advice(
|| "",
self.config.columns[offset + 3],
row_offset,
|| br_split.map(|(p1, _)| F::from(p1 as u64)),
)?;
let _br_split_cell_2 = region.assign_advice(
|| "",
self.config.columns[offset + 4],
row_offset,
|| br_split.map(|(_, p0)| F::from(p0 as u64)),
)?;
// Assign r to the next 2 columns
let _r_split_cell_1 = region.assign_advice(
|| "",
self.config.columns[offset + 5],
row_offset,
|| r_split.map(|(p1, _)| F::from(p1 as u64)),
)?;
let _r_split_cell_2 = region.assign_advice(
|| "",
self.config.columns[offset + 6],
row_offset,
|| r_split.map(|(_, p0)| F::from(p0 as u64)),
)?;
div_out.push(div_cell);
}
Ok(div_out)
}
fn forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let mut inps = vec_inputs[0].clone();
let initial_len = inps.len();
// Needed to pad
let default = &single_inputs[0];
while inps.len() % self.num_inputs_per_row() != 0 {
inps.push(&default);
}
let res = self.op_aligned_rows(
layouter.namespace(|| "var_div_big"),
&vec![inps],
single_inputs,
)?;
Ok(res[..initial_len].to_vec())
}
}
| https://github.com/ddkang/zkml |
src/gadgets/var_div_big3.rs | use std::{marker::PhantomData, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Region},
halo2curves::ff::PrimeField,
plonk::{ConstraintSystem, Error, Expression},
poly::Rotation,
};
use rounded_div::RoundedDiv;
use super::gadget::{convert_to_u128, Gadget, GadgetConfig, GadgetType};
pub struct VarDivRoundBig3Chip<F: PrimeField> {
config: Rc<GadgetConfig>,
_marker: PhantomData<F>,
}
impl<F: PrimeField> VarDivRoundBig3Chip<F> {
pub fn construct(config: Rc<GadgetConfig>) -> Self {
Self {
config,
_marker: PhantomData,
}
}
pub fn num_cols_per_op() -> usize {
9
}
pub fn configure(meta: &mut ConstraintSystem<F>, gadget_config: GadgetConfig) -> GadgetConfig {
let columns = gadget_config.columns;
let selector = meta.complex_selector();
let two = Expression::Constant(F::from(2));
let range = Expression::Constant(F::from(gadget_config.num_rows as u64));
let range_sq = range.clone() * range.clone();
let tables = gadget_config.tables;
let lookup = tables.get(&GadgetType::InputLookup).unwrap()[0];
// a | c | r | (2b - r)_2 | (2 b - r)_1 | (2 b - r)_0 | r_2 | r_1 | r_0 | ... | b
// a / b = c
meta.create_gate("var_div_big3_arithm", |meta| {
let s = meta.query_selector(selector);
let mut constraints = vec![];
let b = meta.query_advice(columns[columns.len() - 1], Rotation::cur());
for i in 0..(columns.len() - 1) / Self::num_cols_per_op() {
let offset = i * Self::num_cols_per_op();
// Constrain that (2 * a + b) = (2 * b) * c + r
let a = meta.query_advice(columns[offset], Rotation::cur());
let c = meta.query_advice(columns[offset + 1], Rotation::cur());
let r = meta.query_advice(columns[offset + 2], Rotation::cur());
let lhs = a.clone() * two.clone() + b.clone();
let rhs = b.clone() * two.clone() * c + r.clone();
constraints.push(s.clone() * (lhs - rhs));
// Constrain that (2 * b - r) = br1 * max_val + br0
let br2 = meta.query_advice(columns[offset + 3], Rotation::cur());
let br1 = meta.query_advice(columns[offset + 4], Rotation::cur());
let br0 = meta.query_advice(columns[offset + 5], Rotation::cur());
let lhs = b.clone() * two.clone() - r.clone();
let rhs = br2 * range_sq.clone() + br1 * range.clone() + br0;
constraints.push(s.clone() * (lhs - rhs));
// Constrains that r = r1 * max_val + r0
let r2 = meta.query_advice(columns[offset + 6], Rotation::cur());
let r1 = meta.query_advice(columns[offset + 7], Rotation::cur());
let r0 = meta.query_advice(columns[offset + 8], Rotation::cur());
let lhs = r.clone();
let rhs = r2 * range_sq.clone() + r1 * range.clone() + r0;
constraints.push(s.clone() * (lhs - rhs));
}
constraints
});
// For var div big, we assume that a, b > 0 and are outputs of the previous layer
// r must be constrained to be in [0, b)
for i in 0..(columns.len() - 1) / Self::num_cols_per_op() {
let offset = i * Self::num_cols_per_op();
// (2 * b - r)_{1, 0} \in [0, 2^N)
meta.lookup("var div big br2", |meta| {
let s = meta.query_selector(selector);
let br2 = meta.query_advice(columns[offset + 3], Rotation::cur());
vec![(s * br2, lookup)]
});
meta.lookup("var div big br1", |meta| {
let s = meta.query_selector(selector);
let br1 = meta.query_advice(columns[offset + 4], Rotation::cur());
vec![(s * br1, lookup)]
});
meta.lookup("var div big br0", |meta| {
let s = meta.query_selector(selector);
let br0 = meta.query_advice(columns[offset + 5], Rotation::cur());
vec![(s * br0, lookup)]
});
// r_{1, 0} \in [0, 2^N)
meta.lookup("var div big r2", |meta| {
let s = meta.query_selector(selector);
let r2 = meta.query_advice(columns[offset + 6], Rotation::cur());
vec![(s * r2, lookup)]
});
meta.lookup("var div big r1", |meta| {
let s = meta.query_selector(selector);
let r1 = meta.query_advice(columns[offset + 7], Rotation::cur());
vec![(s * r1, lookup)]
});
meta.lookup("var div big r0", |meta| {
let s = meta.query_selector(selector);
let r0 = meta.query_advice(columns[offset + 8], Rotation::cur());
vec![(s * r0, lookup)]
});
}
let mut selectors = gadget_config.selectors;
selectors.insert(GadgetType::VarDivRoundBig3, vec![selector]);
GadgetConfig {
columns,
tables,
selectors,
..gadget_config
}
}
}
impl<F: PrimeField> Gadget<F> for VarDivRoundBig3Chip<F> {
fn name(&self) -> String {
"VarDivBig3RoundChip".to_string()
}
fn num_cols_per_op(&self) -> usize {
Self::num_cols_per_op()
}
fn num_inputs_per_row(&self) -> usize {
(self.config.columns.len() - 1) / self.num_cols_per_op()
}
fn num_outputs_per_row(&self) -> usize {
self.num_inputs_per_row()
}
fn op_row_region(
&self,
region: &mut Region<F>,
row_offset: usize,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let a_vec = &vec_inputs[0];
// let zero = single_inputs[0].clone();
let b = &single_inputs[1];
let c_shift_base = (-(1_i64 << 62)) as i128;
let num_rows = self.config.num_rows as i128;
if self.config.use_selectors {
let selector = self
.config
.selectors
.get(&GadgetType::VarDivRoundBig3)
.unwrap()[0];
selector.enable(region, row_offset)?;
}
b.copy_advice(
|| "",
region,
self.config.columns[self.config.columns.len() - 1],
row_offset,
)?;
let mut div_out = vec![];
for (i, a) in a_vec.iter().enumerate() {
let offset = i * self.num_cols_per_op();
a.copy_advice(|| "", region, self.config.columns[offset], row_offset)
.unwrap();
let div_mod = a.value().zip(b.value()).map(|(a, b)| {
let b = convert_to_u128(b);
let c_shift = (-c_shift_base) as u128 / b * b;
let div_inp_min_val_pos = F::from(c_shift as u64);
let a_pos = *a + div_inp_min_val_pos;
let a = convert_to_u128(&a_pos);
// c = (2 * a + b) / (2 * b)
let c_pos = a.rounded_div(b);
let c = c_pos as i128 - (c_shift / b) as i128;
// r = (2 * a + b) % (2 * b)
let rem_floor = (a as i128) - (c_pos * b) as i128;
let r = 2 * rem_floor + (b as i128);
(c, r)
});
let br_split = div_mod.zip(b.value()).map(|((_, r), b)| {
let b = convert_to_u128(b) as i128;
let val = 2 * b - r;
let p2 = val / (num_rows * num_rows);
let p1 = (val % (num_rows * num_rows)) / num_rows;
let p0 = val % num_rows;
// val = p2 * max_val^2 + p1 * max_val + p0
(p2, p1, p0)
});
let r_split = div_mod.map(|(_, r)| {
let p2 = r / (num_rows * num_rows);
let p1 = (r % (num_rows * num_rows)) / num_rows;
let p0 = r % num_rows;
// val = p1 * max_val + p0
(p2, p1, p0)
});
let div_cell = region.assign_advice(
|| "",
self.config.columns[offset + 1],
row_offset,
|| {
div_mod.map(|(c, _)| {
let offset = F::from(-c_shift_base as u64);
let c = F::from((c - c_shift_base) as u64);
c - offset
})
},
)?;
let _mod_cell = region.assign_advice(
|| "",
self.config.columns[offset + 2],
row_offset,
|| div_mod.map(|(_, r)| F::from(r as u64)),
)?;
// Assign 2 * b - r to the next 3 columns
let _br_split_cell_2 = region.assign_advice(
|| "",
self.config.columns[offset + 3],
row_offset,
|| br_split.map(|(p2, _, _)| F::from(p2 as u64)),
)?;
let _br_split_cell_1 = region.assign_advice(
|| "",
self.config.columns[offset + 4],
row_offset,
|| br_split.map(|(_, p1, _)| F::from(p1 as u64)),
)?;
let _br_split_cell_0 = region.assign_advice(
|| "",
self.config.columns[offset + 5],
row_offset,
|| br_split.map(|(_, _, p0)| F::from(p0 as u64)),
)?;
// Assign r to the next 3 columns
let _r_split_cell_2 = region.assign_advice(
|| "",
self.config.columns[offset + 6],
row_offset,
|| r_split.map(|(p2, _, _)| F::from(p2 as u64)),
)?;
let _r_split_cell_1 = region.assign_advice(
|| "",
self.config.columns[offset + 7],
row_offset,
|| r_split.map(|(_, p1, _)| F::from(p1 as u64)),
)?;
let _r_split_cell_0 = region.assign_advice(
|| "",
self.config.columns[offset + 8],
row_offset,
|| r_split.map(|(_, _, p0)| F::from(p0 as u64)),
)?;
div_out.push(div_cell);
}
Ok(div_out)
}
fn forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
single_inputs: &Vec<&AssignedCell<F, F>>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let mut inps = vec_inputs[0].clone();
let initial_len = inps.len();
// Needed to pad
let default = &single_inputs[0];
while inps.len() % self.num_inputs_per_row() != 0 {
inps.push(&default);
}
let res = self.op_aligned_rows(
layouter.namespace(|| "var_div_big3"),
&vec![inps],
single_inputs,
)?;
Ok(res[..initial_len].to_vec())
}
}
| https://github.com/ddkang/zkml |
src/layers.rs | // Generics
pub mod averager;
pub mod arithmetic;
pub mod shape;
// Concrete implementations
pub mod avg_pool_2d;
pub mod batch_mat_mul;
pub mod conv2d;
pub mod div_fixed;
pub mod fully_connected;
pub mod logistic;
pub mod max_pool_2d;
pub mod mean;
pub mod noop;
pub mod pow;
pub mod rsqrt;
pub mod softmax;
pub mod sqrt;
pub mod square;
pub mod squared_diff;
pub mod tanh;
pub mod update;
// Special: dag
pub mod dag;
// Special: layer
pub mod layer;
| https://github.com/ddkang/zkml |
src/layers/arithmetic.rs | use std::{collections::HashMap, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter},
halo2curves::ff::PrimeField,
plonk::Error,
};
use crate::{gadgets::gadget::GadgetConfig, utils::helpers::broadcast};
use super::layer::{AssignedTensor, CellRc};
pub mod add;
pub mod div_var;
pub mod mul;
pub mod sub;
pub trait Arithmetic<F: PrimeField> {
fn gadget_forward(
&self,
layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
constants: &Vec<&AssignedCell<F, F>>,
gadget_config: Rc<GadgetConfig>,
) -> Result<Vec<AssignedCell<F, F>>, Error>;
fn arithmetic_forward(
&self,
mut layouter: impl Layouter<F>,
tensors: &Vec<AssignedTensor<F>>,
constants: &HashMap<i64, CellRc<F>>,
gadget_config: Rc<GadgetConfig>,
) -> Result<(Vec<CellRc<F>>, Vec<usize>), Error> {
assert_eq!(tensors.len(), 2);
// println!("tensors: {:?} {:?}", tensors[0].shape(), tensors[1].shape());
let (inp1, inp2) = broadcast(&tensors[0], &tensors[1]);
let out_shape = inp1.shape().clone();
assert_eq!(inp1.shape(), inp2.shape());
let zero = constants.get(&0).unwrap().as_ref();
let inp1_vec = inp1.iter().map(|x| x.as_ref()).collect::<Vec<_>>();
let inp2_vec = inp2.iter().map(|x| x.as_ref()).collect::<Vec<_>>();
let vec_inputs = vec![inp1_vec, inp2_vec];
let constants = vec![zero];
let out = self.gadget_forward(
layouter.namespace(|| ""),
&vec_inputs,
&constants,
gadget_config.clone(),
)?;
let out = out.into_iter().map(|x| Rc::new(x)).collect::<Vec<_>>();
Ok((out, out_shape.to_vec()))
}
}
| https://github.com/ddkang/zkml |
src/layers/arithmetic/add.rs | use std::{collections::HashMap, rc::Rc, vec};
use halo2_proofs::{
circuit::{AssignedCell, Layouter},
halo2curves::ff::PrimeField,
plonk::Error,
};
use ndarray::{Array, IxDyn};
use crate::{
gadgets::{
add_pairs::AddPairsChip,
gadget::{Gadget, GadgetConfig, GadgetType},
nonlinear::relu::ReluChip,
},
layers::layer::{ActivationType, AssignedTensor, CellRc, GadgetConsumer},
};
use super::{
super::layer::{Layer, LayerConfig},
Arithmetic,
};
#[derive(Clone, Debug)]
pub struct AddChip {}
impl AddChip {
fn get_activation(&self, layer_params: &Vec<i64>) -> ActivationType {
let activation = layer_params[0];
match activation {
0 => ActivationType::None,
1 => ActivationType::Relu,
_ => panic!("Unsupported activation type for add"),
}
}
}
impl<F: PrimeField> Arithmetic<F> for AddChip {
fn gadget_forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
constants: &Vec<&AssignedCell<F, F>>,
gadget_config: Rc<GadgetConfig>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let add_pairs_chip = AddPairsChip::<F>::construct(gadget_config);
let out = add_pairs_chip.forward(layouter.namespace(|| "add chip"), &vec_inputs, constants)?;
Ok(out)
}
}
impl<F: PrimeField> Layer<F> for AddChip {
fn forward(
&self,
mut layouter: impl Layouter<F>,
tensors: &Vec<AssignedTensor<F>>,
constants: &HashMap<i64, CellRc<F>>,
gadget_config: Rc<GadgetConfig>,
layer_config: &LayerConfig,
) -> Result<Vec<AssignedTensor<F>>, Error> {
let activation = self.get_activation(&layer_config.layer_params);
// Do the addition
let (out, out_shape) = self.arithmetic_forward(
layouter.namespace(|| ""),
tensors,
constants,
gadget_config.clone(),
)?;
// Do the fused activation
let out = if activation == ActivationType::Relu {
let zero = constants.get(&0).unwrap();
let single_inps = vec![zero.as_ref()];
let out = out.iter().map(|x| x.as_ref()).collect::<Vec<_>>();
let relu_chip = ReluChip::<F>::construct(gadget_config);
let out = relu_chip.forward(layouter.namespace(|| "relu"), &vec![out], &single_inps)?;
let out = out.into_iter().map(|x| Rc::new(x)).collect::<Vec<_>>();
out
} else if activation == ActivationType::None {
out
} else {
panic!("Unsupported activation type for add");
};
let out = Array::from_shape_vec(IxDyn(out_shape.as_slice()), out).unwrap();
Ok(vec![out])
}
}
impl GadgetConsumer for AddChip {
fn used_gadgets(&self, layer_params: Vec<i64>) -> Vec<crate::gadgets::gadget::GadgetType> {
let activation = self.get_activation(&layer_params);
let mut outp = vec![GadgetType::AddPairs];
match activation {
ActivationType::Relu => outp.push(GadgetType::Relu),
ActivationType::None => (),
_ => panic!("Unsupported activation type for add"),
}
outp
}
}
| https://github.com/ddkang/zkml |
src/layers/arithmetic/div_var.rs | use std::{collections::HashMap, rc::Rc, vec};
use halo2_proofs::{
circuit::{AssignedCell, Layouter},
halo2curves::ff::PrimeField,
plonk::Error,
};
use ndarray::{Array, IxDyn};
use crate::{
gadgets::{
gadget::{Gadget, GadgetConfig, GadgetType},
mul_pairs::MulPairsChip,
var_div::VarDivRoundChip,
},
layers::layer::{AssignedTensor, CellRc, GadgetConsumer, Layer},
};
use super::Arithmetic;
pub struct DivVarChip {}
// TODO: hack. Used for multiplying by the scale factor
impl<F: PrimeField> Arithmetic<F> for DivVarChip {
fn gadget_forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
constants: &Vec<&AssignedCell<F, F>>,
gadget_config: Rc<GadgetConfig>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let mul_pairs_chip = MulPairsChip::<F>::construct(gadget_config.clone());
let out = mul_pairs_chip.forward(
layouter.namespace(|| "mul pairs chip"),
&vec_inputs,
constants,
)?;
Ok(out)
}
}
impl<F: PrimeField> Layer<F> for DivVarChip {
fn forward(
&self,
mut layouter: impl Layouter<F>,
tensors: &Vec<AssignedTensor<F>>,
constants: &HashMap<i64, CellRc<F>>,
gadget_config: Rc<GadgetConfig>,
_layer_config: &crate::layers::layer::LayerConfig,
) -> Result<Vec<AssignedTensor<F>>, Error> {
assert_eq!(tensors.len(), 2);
// TODO: We only support dividing by a single number for now
assert_eq!(tensors[1].shape().len(), 1);
assert_eq!(tensors[1].shape()[0], 1);
let sf = constants
.get(&(gadget_config.scale_factor as i64))
.unwrap()
.as_ref();
let sf_tensor = Array::from_shape_vec(IxDyn(&[1]), vec![Rc::new(sf.clone())]).unwrap();
// out = inp * SF
let (out, out_shape) = self.arithmetic_forward(
layouter.namespace(|| ""),
&vec![tensors[0].clone(), sf_tensor],
constants,
gadget_config.clone(),
)?;
let var_div_chip = VarDivRoundChip::<F>::construct(gadget_config.clone());
let div = tensors[1].iter().next().unwrap().as_ref();
let zero = constants.get(&0).unwrap().as_ref();
let single_inputs = vec![zero, div];
let out = out.iter().map(|x| x.as_ref()).collect::<Vec<_>>();
let out = var_div_chip.forward(layouter.namespace(|| "mul div"), &vec![out], &single_inputs)?;
let out = out.into_iter().map(|x| Rc::new(x)).collect::<Vec<_>>();
let out = Array::from_shape_vec(IxDyn(out_shape.as_slice()), out).unwrap();
Ok(vec![out])
}
}
impl GadgetConsumer for DivVarChip {
fn used_gadgets(&self, _layer_params: Vec<i64>) -> Vec<crate::gadgets::gadget::GadgetType> {
vec![
GadgetType::MulPairs,
GadgetType::VarDivRound,
GadgetType::InputLookup,
]
}
}
| https://github.com/ddkang/zkml |
src/layers/arithmetic/mul.rs | use std::{collections::HashMap, rc::Rc, vec};
use halo2_proofs::{
circuit::{AssignedCell, Layouter},
halo2curves::ff::PrimeField,
plonk::Error,
};
use ndarray::{Array, IxDyn};
use crate::{
gadgets::{
gadget::{Gadget, GadgetConfig, GadgetType},
mul_pairs::MulPairsChip,
var_div::VarDivRoundChip,
},
layers::layer::{AssignedTensor, CellRc, GadgetConsumer},
};
use super::{
super::layer::{Layer, LayerConfig},
Arithmetic,
};
#[derive(Clone, Debug)]
pub struct MulChip {}
impl<F: PrimeField> Arithmetic<F> for MulChip {
fn gadget_forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
constants: &Vec<&AssignedCell<F, F>>,
gadget_config: Rc<GadgetConfig>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let mul_pairs_chip = MulPairsChip::<F>::construct(gadget_config.clone());
let out = mul_pairs_chip.forward(
layouter.namespace(|| "mul pairs chip"),
&vec_inputs,
constants,
)?;
Ok(out)
}
}
// FIXME: move this + add to an arithmetic layer
impl<F: PrimeField> Layer<F> for MulChip {
fn forward(
&self,
mut layouter: impl Layouter<F>,
tensors: &Vec<AssignedTensor<F>>,
constants: &HashMap<i64, CellRc<F>>,
gadget_config: Rc<GadgetConfig>,
_layer_config: &LayerConfig,
) -> Result<Vec<AssignedTensor<F>>, Error> {
let (out, out_shape) = self.arithmetic_forward(
layouter.namespace(|| ""),
tensors,
constants,
gadget_config.clone(),
)?;
let var_div_chip = VarDivRoundChip::<F>::construct(gadget_config.clone());
let div = constants
.get(&(gadget_config.scale_factor as i64))
.unwrap()
.as_ref();
let zero = constants.get(&0).unwrap().as_ref();
let single_inputs = vec![zero, div];
let out = out.iter().map(|x| x.as_ref()).collect::<Vec<_>>();
let out = var_div_chip.forward(layouter.namespace(|| "mul div"), &vec![out], &single_inputs)?;
let out = out.into_iter().map(|x| Rc::new(x)).collect::<Vec<_>>();
let out = Array::from_shape_vec(IxDyn(out_shape.as_slice()), out).unwrap();
Ok(vec![out])
}
}
impl GadgetConsumer for MulChip {
fn used_gadgets(&self, _layer_params: Vec<i64>) -> Vec<crate::gadgets::gadget::GadgetType> {
vec![
GadgetType::MulPairs,
GadgetType::VarDivRound,
GadgetType::InputLookup,
]
}
}
| https://github.com/ddkang/zkml |
src/layers/arithmetic/sub.rs | use std::{collections::HashMap, rc::Rc, vec};
use halo2_proofs::{
circuit::{AssignedCell, Layouter},
halo2curves::ff::PrimeField,
plonk::Error,
};
use ndarray::{Array, IxDyn};
use crate::{
gadgets::{
gadget::{Gadget, GadgetConfig, GadgetType},
sub_pairs::SubPairsChip,
},
layers::layer::{AssignedTensor, CellRc, GadgetConsumer},
};
use super::{
super::layer::{Layer, LayerConfig},
Arithmetic,
};
#[derive(Clone, Debug)]
pub struct SubChip {}
impl<F: PrimeField> Arithmetic<F> for SubChip {
fn gadget_forward(
&self,
mut layouter: impl Layouter<F>,
vec_inputs: &Vec<Vec<&AssignedCell<F, F>>>,
constants: &Vec<&AssignedCell<F, F>>,
gadget_config: Rc<GadgetConfig>,
) -> Result<Vec<AssignedCell<F, F>>, Error> {
let sub_pairs_chip = SubPairsChip::<F>::construct(gadget_config);
let out = sub_pairs_chip.forward(layouter.namespace(|| "sub chip"), &vec_inputs, constants)?;
Ok(out)
}
}
impl<F: PrimeField> Layer<F> for SubChip {
fn forward(
&self,
mut layouter: impl Layouter<F>,
tensors: &Vec<AssignedTensor<F>>,
constants: &HashMap<i64, CellRc<F>>,
gadget_config: Rc<GadgetConfig>,
_layer_config: &LayerConfig,
) -> Result<Vec<AssignedTensor<F>>, Error> {
let (out, out_shape) = self.arithmetic_forward(
layouter.namespace(|| ""),
tensors,
constants,
gadget_config.clone(),
)?;
let out = Array::from_shape_vec(IxDyn(out_shape.as_slice()), out).unwrap();
Ok(vec![out])
}
}
impl GadgetConsumer for SubChip {
fn used_gadgets(&self, _layer_params: Vec<i64>) -> Vec<crate::gadgets::gadget::GadgetType> {
vec![GadgetType::SubPairs]
}
}
| https://github.com/ddkang/zkml |
src/layers/averager.rs | use std::{collections::HashMap, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter},
halo2curves::ff::PrimeField,
plonk::Error,
};
use crate::gadgets::gadget::Gadget;
use crate::gadgets::{adder::AdderChip, gadget::GadgetConfig, var_div::VarDivRoundChip};
use super::layer::{AssignedTensor, CellRc, LayerConfig};
pub trait Averager<F: PrimeField> {
fn splat(&self, input: &AssignedTensor<F>, layer_config: &LayerConfig) -> Vec<Vec<CellRc<F>>>;
fn get_div_val(
&self,
layouter: impl Layouter<F>,
tensors: &Vec<AssignedTensor<F>>,
gadget_config: Rc<GadgetConfig>,
layer_config: &LayerConfig,
) -> Result<AssignedCell<F, F>, Error>;
fn avg_forward(
&self,
mut layouter: impl Layouter<F>,
tensors: &Vec<AssignedTensor<F>>,
constants: &HashMap<i64, CellRc<F>>,
gadget_config: Rc<GadgetConfig>,
layer_config: &LayerConfig,
) -> Result<Vec<CellRc<F>>, Error> {
// Due to Mean BS
// assert_eq!(tensors.len(), 1);
let zero = constants.get(&0).unwrap().as_ref();
let inp = &tensors[0];
let splat_inp = self.splat(inp, layer_config);
let adder_chip = AdderChip::<F>::construct(gadget_config.clone());
let single_inputs = vec![zero];
let mut added = vec![];
for i in 0..splat_inp.len() {
let tmp = splat_inp[i].iter().map(|x| x.as_ref()).collect::<Vec<_>>();
let tmp = adder_chip.forward(
layouter.namespace(|| format!("average {}", i)),
&vec![tmp],
&single_inputs,
)?;
added.push(tmp[0].clone());
}
let div = self.get_div_val(
layouter.namespace(|| "average div"),
tensors,
gadget_config.clone(),
layer_config,
)?;
let var_div_chip = VarDivRoundChip::<F>::construct(gadget_config.clone());
let single_inputs = vec![zero, &div];
let added = added.iter().map(|x| x).collect::<Vec<_>>();
let dived = var_div_chip.forward(
layouter.namespace(|| "average div"),
&vec![added],
&single_inputs,
)?;
let dived = dived.into_iter().map(|x| Rc::new(x)).collect::<Vec<_>>();
Ok(dived)
}
}
| https://github.com/ddkang/zkml |
src/layers/avg_pool_2d.rs | use std::{collections::HashMap, rc::Rc};
use halo2_proofs::{
circuit::{AssignedCell, Layouter, Value},
halo2curves::ff::PrimeField,
plonk::Error,
};
use ndarray::{Array, IxDyn};
use crate::{
gadgets::gadget::{GadgetConfig, GadgetType},
layers::max_pool_2d::MaxPool2DChip,
};
use super::{
averager::Averager,
layer::{AssignedTensor, CellRc, GadgetConsumer, Layer, LayerConfig},
};
pub struct AvgPool2DChip {}
impl<F: PrimeField> Averager<F> for AvgPool2DChip {
fn splat(&self, input: &AssignedTensor<F>, layer_config: &LayerConfig) -> Vec<Vec<CellRc<F>>> {
assert_eq!(input.shape().len(), 4);
// Don't support batch size > 1 yet
assert_eq!(input.shape()[0], 1);
// TODO: refactor this
MaxPool2DChip::splat(input, layer_config).unwrap()
}
fn get_div_val(
&self,
mut layouter: impl Layouter<F>,
_tensors: &Vec<AssignedTensor<F>>,
gadget_config: Rc<GadgetConfig>,
layer_config: &LayerConfig,
) -> Result<AssignedCell<F, F>, Error> {
// FIXME: this needs to be revealed
let div = layer_config.layer_params[0] * layer_config.layer_params[1];
let div = F::from(div as u64);
let div = layouter
.assign_region(
|| "avg pool 2d div",
|mut region| {
let div = region
.assign_advice(
|| "avg pool 2d div",
gadget_config.columns[0],
0,
|| Value::known(div),
)
.unwrap();
Ok(div)
},
)
.unwrap();
Ok(div)
}
}
impl<F: PrimeField> Layer<F> for AvgPool2DChip {
fn forward(
&self,
layouter: impl Layouter<F>,
tensors: &Vec<AssignedTensor<F>>,
constants: &HashMap<i64, CellRc<F>>,
gadget_config: Rc<GadgetConfig>,
layer_config: &LayerConfig,
) -> Result<Vec<AssignedTensor<F>>, Error> {
let dived = self
.avg_forward(layouter, tensors, constants, gadget_config, layer_config)
.unwrap();
let inp = &tensors[0];
// TODO: refactor this
let out_xy = MaxPool2DChip::shape(inp, layer_config);
let out_shape = vec![1, out_xy.0, out_xy.1, inp.shape()[3]];
println!("out_shape: {:?}", out_shape);
let out = Array::from_shape_vec(IxDyn(&out_shape), dived).unwrap();
Ok(vec![out])
}
}
impl GadgetConsumer for AvgPool2DChip {
fn used_gadgets(&self, _layer_params: Vec<i64>) -> Vec<crate::gadgets::gadget::GadgetType> {
vec![
GadgetType::Adder,
GadgetType::VarDivRound,
GadgetType::InputLookup,
]
}
}
| https://github.com/ddkang/zkml |