Merge pull request #34468 from frreiss:issue-tag-filters
PiperOrigin-RevId: 281976830
Change-Id: I425423da7e6e1c94ca2789ccab1a5d13bba1e594
diff --git a/README.md b/README.md
index c0f5d5a..51ca43e 100644
--- a/README.md
+++ b/README.md
@@ -117,9 +117,9 @@
**Linux s390x** Nightly | [](http://ibmz-ci.osuosl.org/job/TensorFlow_IBMZ_CI/) | [Nightly](http://ibmz-ci.osuosl.org/job/TensorFlow_IBMZ_CI/)
**Linux s390x CPU** Stable Release | [](https://ibmz-ci.osuosl.org/job/TensorFlow_IBMZ_Release_Build/) | [Release](https://ibmz-ci.osuosl.org/job/TensorFlow_IBMZ_Release_Build/)
**Linux ppc64le CPU** Nightly | [](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Build/) | [Nightly](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Nightly_Artifact/)
-**Linux ppc64le CPU** Stable Release | [](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Release_Build/) | [Release](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Release_Build/)
+**Linux ppc64le CPU** Stable Release | [](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Release_Build/) | Release [1.15](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_CPU_Release_Build/) / [2.x](https://powerci.osuosl.org/job/TensorFlow2_PPC64LE_CPU_Release_Build/)
**Linux ppc64le GPU** Nightly | [](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Build/) | [Nightly](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Nightly_Artifact/)
-**Linux ppc64le GPU** Stable Release | [](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Release_Build/) | [Release](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Release_Build/)
+**Linux ppc64le GPU** Stable Release | [](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Release_Build/) | Release [1.15](https://powerci.osuosl.org/job/TensorFlow_PPC64LE_GPU_Release_Build/) / [2.x](https://powerci.osuosl.org/job/TensorFlow2_PPC64LE_GPU_Release_Build/)
**Linux CPU with Intel® MKL-DNN** Nightly | [](https://tensorflow-ci.intel.com/job/tensorflow-mkl-linux-cpu/) | [Nightly](https://tensorflow-ci.intel.com/job/tensorflow-mkl-build-whl-nightly/)
**Linux CPU with Intel® MKL-DNN** <br> **Supports Python 2.7, 3.4, 3.5, 3.6 and 3.7** | [](https://tensorflow-ci.intel.com/job/tensorflow-mkl-build-release-whl/lastStableBuild) | [1.14.0 PyPI](https://pypi.org/project/intel-tensorflow/)
**Red Hat® Enterprise Linux® 7.6 CPU & GPU** <br> Python 2.7, 3.6 | [](https://jenkins-tensorflow.apps.ci.centos.org/job/tensorflow-rhel7-3.6/2/) | [1.13.1 PyPI](https://tensorflow.pypi.thoth-station.ninja/index/)
diff --git a/configure.py b/configure.py
index 902838a..e02428a 100644
--- a/configure.py
+++ b/configure.py
@@ -1179,10 +1179,10 @@
write_to_bazelrc('test --test_env=LD_LIBRARY_PATH')
else:
test_and_build_filters.append('-gpu')
- write_to_bazelrc('test --test_tag_filters=%s'
- % ','.join(test_and_build_filters + test_only_filters))
- write_to_bazelrc('test --build_tag_filters=%s'
- % ','.join(test_and_build_filters))
+ write_to_bazelrc('test --test_tag_filters=%s' %
+ ','.join(test_and_build_filters + test_only_filters))
+ write_to_bazelrc('test --build_tag_filters=%s' %
+ ','.join(test_and_build_filters))
def set_system_libs_flag(environ_cp):
diff --git a/tensorflow/c/experimental/filesystem/filesystem_interface.h b/tensorflow/c/experimental/filesystem/filesystem_interface.h
index 2340078..dea6290 100644
--- a/tensorflow/c/experimental/filesystem/filesystem_interface.h
+++ b/tensorflow/c/experimental/filesystem/filesystem_interface.h
@@ -611,13 +611,12 @@
///
/// The returned entries are paths relative to `path`.
///
- /// Caller passes `nullptr` for `entries`. Plugins must allocate `entries`
- /// to hold all names that need to be returned and return the size of
- /// `entries`.
+ /// Plugins must allocate `entries` to hold all names that need to be returned
+ /// and return the size of `entries`. Caller takes ownership of `entries`
+ /// after the call.
///
/// In case of error, plugins must set `status` to a value different than
- /// `TF_OK`, return -1 and leave `entries` unchanged (i.e., `nullptr`, freeing
- /// any allocated memory).
+ /// `TF_OK`, free memory allocated for `entries` and return -1.
///
/// Plugins:
/// * Must set `status` to `TF_OK` if all children were returned.
diff --git a/tensorflow/c/experimental/filesystem/modular_filesystem.cc b/tensorflow/c/experimental/filesystem/modular_filesystem.cc
index aae5834..f04fd65 100644
--- a/tensorflow/c/experimental/filesystem/modular_filesystem.cc
+++ b/tensorflow/c/experimental/filesystem/modular_filesystem.cc
@@ -157,9 +157,25 @@
Status ModularFileSystem::GetChildren(const std::string& dir,
std::vector<std::string>* result) {
- // TODO(mihaimaruseac): Implementation to come in a new change
- return Status(error::UNIMPLEMENTED,
- "Modular filesystem stub not implemented yet");
+ if (ops_->get_children == nullptr)
+ return errors::Unimplemented(tensorflow::strings::StrCat(
+ "Filesystem for ", dir, " does not support GetChildren()"));
+
+ UniquePtrTo_TF_Status plugin_status(TF_NewStatus(), TF_DeleteStatus);
+ std::string translated_name = TranslateName(dir);
+ char** children;
+ const int num_children =
+ ops_->get_children(filesystem_.get(), translated_name.c_str(), &children,
+ plugin_status.get());
+ if (num_children >= 0) {
+ for (int i = 0; i < num_children; i++) {
+ result->push_back(std::string(children[i]));
+ free(children[i]);
+ }
+ free(children);
+ }
+
+ return StatusFromTF_Status(plugin_status.get());
}
Status ModularFileSystem::GetMatchingPaths(const std::string& pattern,
diff --git a/tensorflow/c/experimental/filesystem/modular_filesystem_test.cc b/tensorflow/c/experimental/filesystem/modular_filesystem_test.cc
index e977a6e..807acf4e 100644
--- a/tensorflow/c/experimental/filesystem/modular_filesystem_test.cc
+++ b/tensorflow/c/experimental/filesystem/modular_filesystem_test.cc
@@ -794,6 +794,88 @@
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
}
+TEST_P(ModularFileSystemTest, TestGetChildren) {
+ const std::string dirpath = GetURIForPath("dir");
+ Status status = env_->CreateDir(dirpath);
+ if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported";
+
+ // If updating, make sure to update expected_children below.
+ const std::vector<std::string> filenames = {
+ GetURIForPath("dir/a_file"),
+ GetURIForPath("dir/another_file"),
+ };
+ for (const auto& filename : filenames) {
+ std::unique_ptr<WritableFile> file;
+ status = env_->NewWritableFile(filename, &file);
+ if (!status.ok()) GTEST_SKIP() << "NewWritableFile() not supported";
+ }
+
+ // If updating, make sure to update expected_children below.
+ const std::vector<std::string> dirnames = {
+ GetURIForPath("dir/a_dir"),
+ GetURIForPath("dir/another_dir"),
+ };
+ for (const auto& dirname : dirnames) {
+ status = env_->CreateDir(dirname);
+ if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported";
+ }
+
+ std::vector<std::string> children;
+ status = env_->GetChildren(dirpath, &children);
+ EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK);
+ if (!status.ok()) GTEST_SKIP() << "GetChildren() not supported";
+
+ // All entries must show up in the vector.
+ // Must contain only the last name in filenames and dirnames.
+ const std::vector<std::string> expected_children = {"a_file", "another_file",
+ "a_dir", "another_dir"};
+ EXPECT_EQ(children.size(), filenames.size() + dirnames.size());
+ for (const auto& child : expected_children)
+ EXPECT_NE(std::find(children.begin(), children.end(), child),
+ children.end());
+}
+
+TEST_P(ModularFileSystemTest, TestGetChildrenEmpty) {
+ const std::string dirpath = GetURIForPath("dir");
+ Status status = env_->CreateDir(dirpath);
+ if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported";
+
+ std::vector<std::string> children;
+ status = env_->GetChildren(dirpath, &children);
+ EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK);
+ EXPECT_EQ(children.size(), 0);
+}
+
+TEST_P(ModularFileSystemTest, TestGetChildrenOfFile) {
+ const std::string filepath = GetURIForPath("a_file");
+ std::unique_ptr<WritableFile> file;
+ Status status = env_->NewWritableFile(filepath, &file);
+ if (!status.ok()) GTEST_SKIP() << "NewWritableFile() not supported";
+
+ std::vector<std::string> children;
+ status = env_->GetChildren(filepath, &children);
+ EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
+}
+
+TEST_P(ModularFileSystemTest, TestGetChildrenPathNotFound) {
+ const std::string target_path = GetURIForPath("a_dir");
+ std::vector<std::string> children;
+ Status status = env_->GetChildren(target_path, &children);
+ EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND);
+}
+
+TEST_P(ModularFileSystemTest, TestGetChildrenPathIsInvalid) {
+ const std::string filepath = GetURIForPath("a_file");
+ std::unique_ptr<WritableFile> file;
+ Status status = env_->NewWritableFile(filepath, &file);
+ if (!status.ok()) GTEST_SKIP() << "NewWritableFile() not supported";
+
+ const std::string target_path = GetURIForPath("a_file/a_new_dir");
+ std::vector<std::string> children;
+ status = env_->GetChildren(target_path, &children);
+ EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
+}
+
TEST_P(ModularFileSystemTest, TestAppendAndTell) {
const std::string filename = GetURIForPath("a_file");
std::unique_ptr<WritableFile> file;
diff --git a/tensorflow/c/experimental/filesystem/plugins/posix/posix_filesystem.cc b/tensorflow/c/experimental/filesystem/plugins/posix/posix_filesystem.cc
index 9e6da07..bde2dd9 100644
--- a/tensorflow/c/experimental/filesystem/plugins/posix/posix_filesystem.cc
+++ b/tensorflow/c/experimental/filesystem/plugins/posix/posix_filesystem.cc
@@ -12,6 +12,7 @@
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
+#include <dirent.h>
#include <errno.h>
#include <fcntl.h>
#include <stdio.h>
@@ -308,6 +309,30 @@
}
}
+static int RemoveSpecialDirectoryEntries(const struct dirent* d) {
+ return strcmp(d->d_name, ".") != 0 && strcmp(d->d_name, "..") != 0;
+}
+
+static int GetChildren(const TF_Filesystem* filesystem, const char* path,
+ char*** entries, TF_Status* status) {
+ struct dirent** dir_entries = nullptr;
+ /* we don't promise entries would be sorted */
+ int num_entries =
+ scandir(path, &dir_entries, RemoveSpecialDirectoryEntries, nullptr);
+ if (num_entries < 0) {
+ TF_SetStatusFromIOError(status, errno, path);
+ } else {
+ *entries = static_cast<char**>(calloc(num_entries, sizeof((*entries)[0])));
+ for (int i = 0; i < num_entries; i++) {
+ (*entries)[i] = strdup(dir_entries[i]->d_name);
+ free(dir_entries[i]);
+ }
+ free(dir_entries);
+ }
+
+ return num_entries;
+}
+
} // namespace tf_posix_filesystem
void TF_InitPlugin(TF_Status* status) {
@@ -344,6 +369,8 @@
tf_posix_filesystem::Stat,
/*is_directory=*/nullptr,
/*get_file_size=*/nullptr,
+ /*translate_name=*/nullptr,
+ tf_posix_filesystem::GetChildren,
nullptr,
};
diff --git a/tensorflow/cc/saved_model/loader.cc b/tensorflow/cc/saved_model/loader.cc
index 7815dbd..3bb4660 100644
--- a/tensorflow/cc/saved_model/loader.cc
+++ b/tensorflow/cc/saved_model/loader.cc
@@ -59,7 +59,7 @@
constexpr char kLoadAttemptSuccess[] = "success";
uint64 GetLatencyMicroseconds(const uint64 start_microseconds) {
- const uint64 end_microseconds = Env::Default()->NowMicros();
+ const uint64 end_microseconds = EnvTime::NowMicros();
// Avoid clock skew.
if (end_microseconds < start_microseconds) return 0;
return end_microseconds - start_microseconds;
diff --git a/tensorflow/compiler/jit/flags.cc b/tensorflow/compiler/jit/flags.cc
index 2153d3c..53f9b70 100644
--- a/tensorflow/compiler/jit/flags.cc
+++ b/tensorflow/compiler/jit/flags.cc
@@ -130,7 +130,6 @@
ops_flags = new XlaOpsCommonFlags;
ops_flags->tf_xla_always_defer_compilation = false;
- ops_flags->tf_xla_noresolve_compile_time_constants = false;
jitter_flags = new IntroduceFloatingPointJitterPassFlags;
jitter_flags->jitter_amount = 1e-5;
@@ -162,9 +161,6 @@
Flag("tf_xla_always_defer_compilation",
&ops_flags->tf_xla_always_defer_compilation, ""),
- Flag("tf_xla_noresolve_compile_time_constants",
- &ops_flags->tf_xla_noresolve_compile_time_constants,
- "Do not perform constant folding in XlaCompiler::CompileGraph"),
Flag("tf_introduce_floating_point_jitter_to_tensors",
setter_for_jitter_tensor_names, "",
diff --git a/tensorflow/compiler/jit/flags.h b/tensorflow/compiler/jit/flags.h
index baed7ad..9307874 100644
--- a/tensorflow/compiler/jit/flags.h
+++ b/tensorflow/compiler/jit/flags.h
@@ -91,14 +91,6 @@
// If true, _XlaCompile always refuses to compile the cluster, which means the
// XLA clusters always run in the TF executor. Defaults to false.
bool tf_xla_always_defer_compilation;
-
- // If true, sets compile_options.resolve_compile_time_constants to false,
- // which stops the bridge from using the HloEvaluator for constant resolution
- // in XlaCompiler::CompileGraph.
- //
- // For some models, constant folding during compile graph experiences a
- // non-linear blow up, which overshadows both compilation and execution.
- bool tf_xla_noresolve_compile_time_constants;
};
// Flags for the build_xla_ops pass.
diff --git a/tensorflow/compiler/jit/kernels/xla_ops.cc b/tensorflow/compiler/jit/kernels/xla_ops.cc
index edb19bc..06c74be 100644
--- a/tensorflow/compiler/jit/kernels/xla_ops.cc
+++ b/tensorflow/compiler/jit/kernels/xla_ops.cc
@@ -326,8 +326,6 @@
}
XlaCompiler::CompileOptions compile_options;
compile_options.is_entry_computation = true;
- compile_options.resolve_compile_time_constants =
- !GetXlaOpsCommonFlags().tf_xla_noresolve_compile_time_constants;
// Optimization: where possible, have the computation return a naked array
// rather than a one-element tuple.
compile_options.always_return_tuple = false;
diff --git a/tensorflow/compiler/jit/xla_compile_on_demand_op.cc b/tensorflow/compiler/jit/xla_compile_on_demand_op.cc
index 1c224e0..45ce68b 100644
--- a/tensorflow/compiler/jit/xla_compile_on_demand_op.cc
+++ b/tensorflow/compiler/jit/xla_compile_on_demand_op.cc
@@ -193,10 +193,6 @@
XlaCompiler::CompileOptions compile_options;
compile_options.is_entry_computation = true;
- // Optimization: don't resolve constants. If we resolve constants we never
- // emit them on the device, meaning that if they are needed by a following
- // computation the host has to transfer them.
- compile_options.resolve_compile_time_constants = false;
// Optimization: where possible, have the computation return a naked array
// rather than a one-element tuple.
compile_options.always_return_tuple = false;
diff --git a/tensorflow/compiler/mlir/lite/common/tfl_pass_config.h b/tensorflow/compiler/mlir/lite/common/tfl_pass_config.h
index 590f898..aec6387 100644
--- a/tensorflow/compiler/mlir/lite/common/tfl_pass_config.h
+++ b/tensorflow/compiler/mlir/lite/common/tfl_pass_config.h
@@ -33,7 +33,8 @@
trim_functions_whitelist({}),
quant_specs(specs),
skip_control_dialect(false),
- form_clusters(false) {}
+ form_clusters(false),
+ inline_functions(false) {}
// If `emit_builtin_tflite_ops` is true, TF Lite legalization passes will be
// added, which produces TF Lite ops.
@@ -53,6 +54,9 @@
// are formed by grouping consecutive ops of the same device, under a
// `tf_device.launch` op.
bool form_clusters;
+ // Inline function calls within the main function in the MLIR module, prior
+ // to legalization to TFLite.
+ bool inline_functions;
};
} // namespace TFL
diff --git a/tensorflow/compiler/mlir/lite/ir/tfl_ops.h b/tensorflow/compiler/mlir/lite/ir/tfl_ops.h
index c60a17a..4fcfea7 100644
--- a/tensorflow/compiler/mlir/lite/ir/tfl_ops.h
+++ b/tensorflow/compiler/mlir/lite/ir/tfl_ops.h
@@ -23,7 +23,7 @@
#include "mlir/IR/Attributes.h" // TF:local_config_mlir
#include "mlir/IR/Builders.h" // TF:local_config_mlir
#include "mlir/IR/Dialect.h" // TF:local_config_mlir
-#include "mlir/IR/OpDefinition.h" // TF:local_config_mlir
+#include "mlir/IR/OpImplementation.h" // TF:local_config_mlir
#include "mlir/IR/StandardTypes.h" // TF:local_config_mlir
#include "mlir/Support/Functional.h" // TF:local_config_mlir
#include "mlir/Support/LLVM.h" // TF:local_config_mlir
diff --git a/tensorflow/compiler/mlir/lite/ir/tfl_ops.td b/tensorflow/compiler/mlir/lite/ir/tfl_ops.td
index 2eec45e..b119219 100644
--- a/tensorflow/compiler/mlir/lite/ir/tfl_ops.td
+++ b/tensorflow/compiler/mlir/lite/ir/tfl_ops.td
@@ -298,7 +298,8 @@
//===----------------------------------------------------------------------===//
// TFL op definitions.
//===----------------------------------------------------------------------===//
-def TFL_AbsOp : TFL_Op<"abs", [NoSideEffect, SameOperandsAndResultType]> {
+def TFL_AbsOp : TFL_Op<"abs", [
+ NoSideEffect, SameOperandsAndResultType, NoQuantizableResult]> {
let summary = "Absolute value operator";
let description = [{
@@ -307,9 +308,9 @@
an output element, this operation computes \\(y = |x|\\).
}];
- let arguments = (ins AnyTensor:$x);
+ let arguments = (ins TFL_FpTensor:$x);
- let results = (outs AnyTensor:$y);
+ let results = (outs TFL_FpTensor:$y);
let hasFolder = 1;
}
@@ -558,7 +559,8 @@
def TFL_Conv2DOp : TFL_ConvOp<"conv_2d", "Convolution", 0>;
-def TFL_CosOp: TFL_Op<"cos", [NoSideEffect, SameOperandsAndResultType]> {
+def TFL_CosOp: TFL_Op<"cos", [
+ NoSideEffect, SameOperandsAndResultType, NoQuantizableResult]> {
let summary = "Cosine operator";
let description = [{
@@ -1263,7 +1265,7 @@
let printer = [{ return mlir::impl::printOneResultOp(getOperation(), p); }];
}
-def TFL_LogicalNotOp : TFL_Op<"logical_not", [NoSideEffect]> {
+def TFL_LogicalNotOp : TFL_Op<"logical_not", [NoSideEffect, NoQuantizableResult]> {
let summary = "Logical NOT operator";
let description = [{
@@ -1311,16 +1313,17 @@
let results = (outs TensorOf<[AnyFloat, QI8, QUI8, QI16, QUI16]>:$y);
}
-def TFL_LogOp: TFL_Op<"log", [NoSideEffect, SameOperandsAndResultType]> {
+def TFL_LogOp: TFL_Op<"log", [
+ NoSideEffect, SameOperandsAndResultType, NoQuantizableResult]> {
let summary = "Natural logarithm operator";
let description = [{
Performs element-wise natural logarithm operation on input.
}];
- let arguments = (ins AnyTensor:$x);
+ let arguments = (ins TFL_FpTensor:$x);
- let results = (outs AnyTensor:$y);
+ let results = (outs TFL_FpTensor:$y);
let hasFolder = 1;
}
@@ -2038,7 +2041,8 @@
let hasOptions = 1;
}
-def TFL_SinOp: TFL_Op<"sin", [NoSideEffect, SameOperandsAndResultType]> {
+def TFL_SinOp: TFL_Op<"sin", [
+ NoSideEffect, SameOperandsAndResultType, NoQuantizableResult]> {
let summary = "Sine operator";
let description = [{
@@ -2078,7 +2082,8 @@
let hasOptions = 1;
}
-def TFL_SqrtOp: TFL_Op<"sqrt", [NoSideEffect, SameOperandsAndResultType]> {
+def TFL_SqrtOp: TFL_Op<"sqrt", [
+ NoSideEffect, SameOperandsAndResultType, NoQuantizableResult]> {
let summary = "Square root operator";
let description = [{
@@ -2092,16 +2097,17 @@
let hasFolder = 1;
}
-def TFL_SquareOp: TFL_Op<"square", [NoSideEffect, SameOperandsAndResultType]> {
+def TFL_SquareOp: TFL_Op<"square", [
+ NoSideEffect, SameOperandsAndResultType, NoQuantizableResult]> {
let summary = "Square operator";
let description = [{
Computes element-wise Square of input
}];
- let arguments = (ins TensorOf<[AnyFloat, QI8, QUI8]>:$x);
+ let arguments = (ins TFL_FpTensor:$x);
- let results = (outs TensorOf<[AnyFloat, QI8, QUI8]>:$y);
+ let results = (outs TFL_FpTensor:$y);
let hasOptions = 0b1;
diff --git a/tensorflow/compiler/mlir/lite/tests/end2end/custom_opdef.pbtxt b/tensorflow/compiler/mlir/lite/tests/end2end/custom_opdef.pbtxt
index fa44cf8..7036ef7 100644
--- a/tensorflow/compiler/mlir/lite/tests/end2end/custom_opdef.pbtxt
+++ b/tensorflow/compiler/mlir/lite/tests/end2end/custom_opdef.pbtxt
@@ -37,7 +37,7 @@
}
# CHECK: func @main(%arg0: tensor<4xi32>, %arg1: tensor<4xi32>) -> tensor<*xi32>
-# CHECK-NEXT: attributes {tf.entry_function = {inputs = "input0,input1", outputs = "output"}} {
+# CHECK: attributes {tf.entry_function = {inputs = "input0,input1", outputs = "output"}} {
# CHECK-NEXT: %0 = "tf.BannaPotatoSaladWithColeslaw"(%arg0, %arg1) {T = "tfdtype$DT_INT32", device = "", name = "output"} : (tensor<4xi32>, tensor<4xi32>) -> tensor<*xi32>
# CHECK-NEXT: return %0 : tensor<*xi32>
# CHECK-NEXT: }
diff --git a/tensorflow/compiler/mlir/lite/tests/end2end/graph-input-node.pbtxt b/tensorflow/compiler/mlir/lite/tests/end2end/graph-input-node.pbtxt
index fd0e627..e204b2b 100644
--- a/tensorflow/compiler/mlir/lite/tests/end2end/graph-input-node.pbtxt
+++ b/tensorflow/compiler/mlir/lite/tests/end2end/graph-input-node.pbtxt
@@ -48,6 +48,6 @@
}
# CHECK: func @main(%arg0: tensor<4xi32>) -> tensor<4xi32>
-# CHECK-NEXT: attributes {tf.entry_function = {inputs = "input", outputs = "output"}} {
+# CHECK: attributes {tf.entry_function = {inputs = "input", outputs = "output"}} {
# CHECK-NEXT: return %arg0 : tensor<4xi32>
# CHECK-NEXT: }
diff --git a/tensorflow/compiler/mlir/lite/tests/end2end/ophint_lstm.pbtxt b/tensorflow/compiler/mlir/lite/tests/end2end/ophint_lstm.pbtxt
index a29ca6c..5b39fc2 100644
--- a/tensorflow/compiler/mlir/lite/tests/end2end/ophint_lstm.pbtxt
+++ b/tensorflow/compiler/mlir/lite/tests/end2end/ophint_lstm.pbtxt
@@ -7784,7 +7784,7 @@
}
# CHECK: func @main(%arg0: tensor<1x3x3xf32>) -> tensor<1x3xf32>
-# CHECK-NEXT: attributes {tf.entry_function = {inputs = "INPUT", outputs = "OUTPUT"}} {
+# CHECK: attributes {tf.entry_function = {inputs = "INPUT", outputs = "OUTPUT"}} {
# CHECK: [[VAL_1:%.*]] = constant dense<{{\[\[}}-0.400154352, 0.739109992, 0.201825857], [0.678572893, 0.32076478, 0.949867963], [-0.807729483, -5.324750e-01, 0.148033619]]> : tensor<3x3xf32>
# CHECK: [[VAL_2:%.*]] = constant dense<{{\[\[}}0.886177539, -0.606141329, -0.451275587], [0.325554609, 0.691527605, -0.676239967], [0.219799042, 0.626042128, -0.597596407]]> : tensor<3x3xf32>
# CHECK: [[VAL_3:%.*]] = constant dense<{{\[\[}}-0.493826151, -0.391061306, -0.349843264], [-0.0213134289, 0.558384657, -0.51513052], [0.427886248, 0.618100405, -0.187585592]]> : tensor<3x3xf32>
diff --git a/tensorflow/compiler/mlir/lite/tests/legalize-tf.mlir b/tensorflow/compiler/mlir/lite/tests/legalize-tf.mlir
index bab02c4..c2653f3 100644
--- a/tensorflow/compiler/mlir/lite/tests/legalize-tf.mlir
+++ b/tensorflow/compiler/mlir/lite/tests/legalize-tf.mlir
@@ -12,12 +12,12 @@
return %7: tensor<1xi32>
// CHECK-LABEL: addRelu
-// CHECK: %0 = tfl.add %arg0, %arg1 {fused_activation_function = "NONE"} : tensor<1xi32>
+// CHECK: tfl.add %arg0, %arg1 {fused_activation_function = "NONE"} : tensor<1xi32>
// CHECK: %1 = tfl.add %arg0, %0 {fused_activation_function = "RELU"} : tensor<1xi32>
// CHECK: %2 = "tfl.relu"(%arg0) : (tensor<1xi32>) -> tensor<1xi32>
// CHECK: %3 = tfl.add %2, %1 {fused_activation_function = "RELU6"} : tensor<1xi32>
// CHECK: %4 = tfl.add %3, %2 {fused_activation_function = "RELU6"} : tensor<1xi32>
-// CHECK: return %4 : tensor<1xi32>
+// CHECK: return
}
func @LeakyRelu(%arg0: tensor<1xf32>) -> tensor<1xf32> {
@@ -25,7 +25,7 @@
return %2: tensor<1xf32>
// CHECK-LABEL: LeakyRelu
-// CHECK: %0 = "tfl.leaky_relu"(%arg0) {alpha = 1.000000e-01 : f32} : (tensor<1xf32>) -> tensor<1xf32>
+// CHECK: "tfl.leaky_relu"(%arg0) {alpha = 1.000000e-01 : f32} : (tensor<1xf32>) -> tensor<1xf32>
}
func @biasAdd(%arg0: tensor<1x10x10x32xf32>, %arg1: tensor<32xf32>) -> tensor<1x10x10x32xf32> {
@@ -35,7 +35,7 @@
return %2 : tensor<1x10x10x32xf32>
// CHECK-LABEL: biasAdd
-// CHECK: %0 = "tfl.add"(%arg0, %arg1) {fused_activation_function = "NONE"} : (tensor<1x10x10x32xf32>, tensor<32xf32>) -> tensor<1x10x10x32xf32>
+// CHECK: "tfl.add"(%arg0, %arg1) {fused_activation_function = "NONE"} : (tensor<1x10x10x32xf32>, tensor<32xf32>) -> tensor<1x10x10x32xf32>
// CHECK: %1 = "tfl.add"(%0, %arg1) {fused_activation_function = "RELU6"} : (tensor<1x10x10x32xf32>, tensor<32xf32>) -> tensor<1x10x10x32xf32>
}
@@ -44,7 +44,7 @@
return %0 : tensor<1x10x10x32xi32>
// CHECK-LABEL: biasAddInt
-// CHECK: %0 = "tf.BiasAdd"(%arg0, %arg1)
+// CHECK: "tf.BiasAdd"(%arg0, %arg1)
}
func @squeezeAndReshape(%arg0: tensor<1x1x10xf32>, %arg1: tensor<?x10xf32>) -> i32 {
@@ -56,11 +56,11 @@
return %4 : i32
// CHECK-LABEL: squeezeAndReshape
// CHECK: %cst = constant dense<[2, 5]> : tensor<2xi32>
-// CHECK: %0 = "tfl.squeeze"(%arg0) {squeeze_dims = [0]} : (tensor<1x1x10xf32>) -> tensor<1x10xf32>
+// CHECK: "tfl.squeeze"(%arg0) {squeeze_dims = [0]} : (tensor<1x1x10xf32>) -> tensor<1x10xf32>
// CHECK: %1 = "tfl.squeeze"(%arg1) {squeeze_dims = []} : (tensor<?x10xf32>) -> tensor<*xf32>
// CHECK: %2 = "tfl.reshape"(%0, %cst) : (tensor<1x10xf32>, tensor<2xi32>) -> tensor<2x5xf32>
// CHECK: %3 = "some_op"(%1, %2) : (tensor<*xf32>, tensor<2x5xf32>) -> i32
-// CHECK: return %3 : i32
+// CHECK: return
}
func @dynamicReshape(%arg0: tensor<*xf32>, %arg1: tensor<2xi32>) -> tensor<?x?xf32> {
@@ -109,7 +109,7 @@
return %7 : tensor<1x1x1x16xf32>
// CHECK-LABEL: func @avgPool2D
-// CHECK: %0 = "tfl.average_pool_2d"(%arg0) {filter_height = 3 : i32, filter_width = 6 : i32, fused_activation_function = "NONE", padding = "VALID", stride_h = 3 : i32, stride_w = 1 : i32} : (tensor<1x6x6x16xf32>) -> tensor<1x1x1x16xf32>
+// CHECK: "tfl.average_pool_2d"(%arg0) {filter_height = 3 : i32, filter_width = 6 : i32, fused_activation_function = "NONE", padding = "VALID", stride_h = 3 : i32, stride_w = 1 : i32} : (tensor<1x6x6x16xf32>) -> tensor<1x1x1x16xf32>
// CHECK: %1 = "tf.AvgPool"(%arg0)
// CHECK: %2 = "tf.AvgPool"(%arg0)
// CHECK: %3 = "tf.AvgPool"(%arg0)
@@ -120,7 +120,7 @@
return %0 : tensor<8x16xf32>
// CHECK-LABEL: softmax
-// CHECK: %0 = "tfl.softmax"(%arg0) {beta = 1.000000e+00 : f32} : (tensor<8x16xf32>) -> tensor<8x16xf32>
+// CHECK: "tfl.softmax"(%arg0) {beta = 1.000000e+00 : f32} : (tensor<8x16xf32>) -> tensor<8x16xf32>
}
func @fakeQuantArgsFalse(%arg0: tensor<8x8x8x8xf32>) -> tensor<8x8x8x8xf32> {
@@ -128,7 +128,7 @@
return %0 : tensor<8x8x8x8xf32>
// CHECK-LABEL: fakeQuantArgsFalse
- // CHECK: %0 = "tfl.quantize"(%arg0) {qtype = tensor<8x8x8x8x!quant.uniform<u8:f32, 0.0011764706057660721:85>>}
+ // CHECK: "tfl.quantize"(%arg0) {qtype = tensor<8x8x8x8x!quant.uniform<u8:f32, 0.0011764706057660721:85>>}
// CHECK: %1 = "tfl.dequantize"(%0) : (tensor<8x8x8x8x!quant.uniform<u8:f32, 0.0011764706057660721:85>>) -> tensor<8x8x8x8xf32>
}
@@ -137,7 +137,7 @@
return %0 : tensor<8x8x8x8xf32>
// CHECK-LABEL: fakeQuantArgsTrue
- // CHECK: %0 = "tfl.quantize"(%arg0) {qtype = tensor<8x8x8x8x!quant.uniform<u8<1:255>:f32, 0.001181102379804521:86>>} : (tensor<8x8x8x8xf32>) -> tensor<8x8x8x8x!quant.uniform<u8<1:255>:f32, 0.001181102379804521:86>>
+ // CHECK: "tfl.quantize"(%arg0) {qtype = tensor<8x8x8x8x!quant.uniform<u8<1:255>:f32, 0.001181102379804521:86>>} : (tensor<8x8x8x8xf32>) -> tensor<8x8x8x8x!quant.uniform<u8<1:255>:f32, 0.001181102379804521:86>>
// CHECK: %1 = "tfl.dequantize"(%0) : (tensor<8x8x8x8x!quant.uniform<u8<1:255>:f32, 0.001181102379804521:86>>) -> tensor<8x8x8x8xf32>
}
@@ -148,7 +148,7 @@
return %0 : tensor<8x8x8x8xf32>
// CHECK-LABEL: fakeQuantVarsFalse
- // CHECK: %0 = "tfl.quantize"(%arg0) {qtype = tensor<8x8x8x8x!quant.uniform<u8:f32, 0.0011764706057660721:85>>}
+ // CHECK: "tfl.quantize"(%arg0) {qtype = tensor<8x8x8x8x!quant.uniform<u8:f32, 0.0011764706057660721:85>>}
// CHECK: %1 = "tfl.dequantize"(%0) : (tensor<8x8x8x8x!quant.uniform<u8:f32, 0.0011764706057660721:85>>) -> tensor<8x8x8x8xf32>
}
@@ -157,7 +157,7 @@
return %0 : tensor<8x8x8x8xf32>
// CHECK-LABEL: fakeQuantVarsTrue
- // CHECK: %0 = "tf.FakeQuantWithMinMaxVars"(%arg0, %arg1, %arg2) {max = 1.000000e+00 : f32, min = 0.000000e+00 : f32, narrow_range = true, num_bits = 3 : i64}
+ // CHECK: "tf.FakeQuantWithMinMaxVars"(%arg0, %arg1, %arg2) {max = 1.000000e+00 : f32, min = 0.000000e+00 : f32, narrow_range = true, num_bits = 3 : i64}
}
func @const() -> tensor<2xi32> {
@@ -175,7 +175,7 @@
return %2: tensor<2xi32>
// CHECK-LABEL: shape
-// CHECK: %0 = "tfl.shape"(%arg0) : (tensor<?x1001xf32>) -> tensor<2xi32>
+// CHECK: "tfl.shape"(%arg0) : (tensor<?x1001xf32>) -> tensor<2xi32>
// CHECK: %1 = "tfl.shape"(%arg0) : (tensor<?x1001xf32>) -> tensor<2xi32>
}
@@ -184,7 +184,7 @@
return %0 : tensor<?x?x?xf32>
// CHECK-LABEL:fill
-// CHECK: %0 = "tfl.fill"(%arg0, %arg1) : (tensor<3xi32>, tensor<f32>) -> tensor<?x?x?xf32>
+// CHECK: "tfl.fill"(%arg0, %arg1) : (tensor<3xi32>, tensor<f32>) -> tensor<?x?x?xf32>
}
func @argmin(%arg0: tensor<3xi32>, %arg1: tensor<i32>) -> tensor<i32> {
@@ -192,56 +192,56 @@
return %0 : tensor<i32>
// CHECK-LABEL: argmin
-// CHECK: %0 = "tfl.arg_min"(%arg0, %arg1) : (tensor<3xi32>, tensor<i32>) -> tensor<i32>
+// CHECK: "tfl.arg_min"(%arg0, %arg1) : (tensor<3xi32>, tensor<i32>) -> tensor<i32>
}
func @sigmoid(%arg0: tensor<?x88xf16>) -> tensor<?x88xf16> {
%0 = "tf.Sigmoid"(%arg0) : (tensor<?x88xf16>) -> tensor<?x88xf16>
return %0 : tensor<?x88xf16>
// CHECK-LABEL: sigmoid
-// CHECK: %0 = "tfl.logistic"(%arg0) : (tensor<?x88xf16>) -> tensor<?x88xf16>
+// CHECK: "tfl.logistic"(%arg0) : (tensor<?x88xf16>) -> tensor<?x88xf16>
}
func @sqrt(%arg0: tensor<8x16xf32>) -> tensor<8x16xf32> {
%0 = "tf.Sqrt"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
return %0 : tensor<8x16xf32>
// CHECK-LABEL: sqrt
-// CHECK: %0 = "tfl.sqrt"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
+// CHECK: "tfl.sqrt"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
}
func @square(%arg0: tensor<8x16xf32>) -> tensor<8x16xf32> {
%0 = "tf.Square"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
return %0 : tensor<8x16xf32>
// CHECK-LABEL: square
-// CHECK: %0 = "tfl.square"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
+// CHECK: "tfl.square"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
}
func @neg(%arg0: tensor<8x16xf32>) -> tensor<8x16xf32> {
%0 = "tf.Neg"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
return %0 : tensor<8x16xf32>
// CHECK-LABEL: neg
-// CHECK: %0 = "tfl.neg"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
+// CHECK: "tfl.neg"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
}
func @log(%arg0: tensor<8x16xf32>) -> tensor<8x16xf32> {
%0 = "tf.Log"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
return %0 : tensor<8x16xf32>
// CHECK-LABEL: log
-// CHECK: %0 = "tfl.log"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
+// CHECK: "tfl.log"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
}
func @log_softmax(%arg0: tensor<8x16xf32>) -> tensor<8x16xf32> {
%0 = "tf.LogSoftmax"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
return %0 : tensor<8x16xf32>
// CHECK-LABEL: log_softmax
-// CHECK: %0 = "tfl.log_softmax"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
+// CHECK: "tfl.log_softmax"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
}
func @zeros_like(%arg0: tensor<8x16xf32>) -> tensor<8x16xf32> {
%0 = "tf.ZerosLike"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
return %0 : tensor<8x16xf32>
// CHECK-LABEL: zeros_like
-// CHECK: %0 = "tfl.zeros_like"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
+// CHECK: "tfl.zeros_like"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
}
func @divRelu(%arg0: tensor<1xi32>, %arg1: tensor<1xi32>) -> tensor<1xi32> {
@@ -254,11 +254,11 @@
return %5: tensor<1xi32>
// CHECK-LABEL: divRelu
-// CHECK: %0 = tfl.div %arg0, %arg1 {fused_activation_function = "NONE"} : tensor<1xi32>
+// CHECK: tfl.div %arg0, %arg1 {fused_activation_function = "NONE"} : tensor<1xi32>
// CHECK: %1 = tfl.div %arg0, %0 {fused_activation_function = "RELU"} : tensor<1xi32>
// CHECK: %2 = "tfl.relu"(%arg0) : (tensor<1xi32>) -> tensor<1xi32>
// CHECK: %3 = tfl.div %2, %1 {fused_activation_function = "RELU6"} : tensor<1xi32>
-// CHECK: return %3 : tensor<1xi32>
+// CHECK: return
}
func @squaredDifferenceRelu(tensor<1xi32>, tensor<1xi32>) -> tensor<1xi32> {
@@ -268,9 +268,9 @@
return %1: tensor<1xi32>
// CHECK-LABEL: squaredDifferenceRelu
-// CHECK: %0 = tfl.squared_difference %arg0, %arg1 : tensor<1xi32>
+// CHECK: tfl.squared_difference %arg0, %arg1 : tensor<1xi32>
// CHECK: %1 = "tfl.relu6"(%0) : (tensor<1xi32>) -> tensor<1xi32>
-// CHECK: return %1 : tensor<1xi32>
+// CHECK: return
}
func @maxPool2D(%arg0: tensor<1x1x1x16xf32>) -> tensor<1x1x1x16xf32> {
@@ -292,7 +292,7 @@
return %7 : tensor<1x1x1x16xf32>
// CHECK-LABEL: func @maxPool2D
-// CHECK: %0 = "tfl.max_pool_2d"(%arg0) {filter_height = 3 : i32, filter_width = 6 : i32, fused_activation_function = "NONE", padding = "VALID", stride_h = 3 : i32, stride_w = 1 : i32} : (tensor<1x1x1x16xf32>) -> tensor<1x1x1x16xf32>
+// CHECK: "tfl.max_pool_2d"(%arg0) {filter_height = 3 : i32, filter_width = 6 : i32, fused_activation_function = "NONE", padding = "VALID", stride_h = 3 : i32, stride_w = 1 : i32} : (tensor<1x1x1x16xf32>) -> tensor<1x1x1x16xf32>
// CHECK: %1 = "tf.MaxPool"(%arg0)
// CHECK: %2 = "tf.MaxPool"(%arg0)
// CHECK: %3 = "tf.MaxPool"(%arg0)
@@ -303,7 +303,7 @@
return %0 : tensor<8x16xf32>
// CHECK-LABEL:abs
-// CHECK: %0 = "tfl.abs"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
+// CHECK: "tfl.abs"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
}
func @any(%arg0: tensor<2x2xi1>, %arg1: tensor<i32>) -> tensor<i1> {
@@ -311,7 +311,7 @@
return %0 : tensor<i1>
// CHECK-LABEL:any
-// CHECK: %0 = "tfl.reduce_any"(%arg0, %arg1) {keep_dims = false} : (tensor<2x2xi1>, tensor<i32>) -> tensor<i1>
+// CHECK: "tfl.reduce_any"(%arg0, %arg1) {keep_dims = false} : (tensor<2x2xi1>, tensor<i32>) -> tensor<i1>
}
func @ceil(%arg0: tensor<8x16xf32>) -> tensor<8x16xf32> {
@@ -319,8 +319,8 @@
return %0 : tensor<8x16xf32>
// CHECK-LABEL: ceil
-// CHECK: %0 = "tfl.ceil"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
-// CHECK: return %0 : tensor<8x16xf32>
+// CHECK: "tfl.ceil"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
+// CHECK: return
}
func @cos(%arg0: tensor<f32>) -> tensor<f32> {
@@ -328,7 +328,7 @@
return %0 : tensor<f32>
// CHECK-LABEL:cos
-// CHECK: %0 = "tfl.cos"(%arg0) : (tensor<f32>) -> tensor<f32>
+// CHECK: "tfl.cos"(%arg0) : (tensor<f32>) -> tensor<f32>
}
func @elu(%arg0: tensor<11x16xf32>) -> tensor<11x16xf32> {
@@ -336,7 +336,7 @@
return %0 : tensor<11x16xf32>
// CHECK-LABEL:elu
-// CHECK: %0 = "tfl.elu"(%arg0) : (tensor<11x16xf32>) -> tensor<11x16xf32>
+// CHECK: "tfl.elu"(%arg0) : (tensor<11x16xf32>) -> tensor<11x16xf32>
}
func @expandDims(%arg0: tensor<2x2xf32>, %arg1: tensor<i32>) -> tensor<1x2x2xf32> {
@@ -344,7 +344,7 @@
return %0 : tensor<1x2x2xf32>
// CHECK-LABEL:expandDims
-// CHECK: %0 = "tfl.expand_dims"(%arg0, %arg1) : (tensor<2x2xf32>, tensor<i32>) -> tensor<1x2x2xf32>
+// CHECK: "tfl.expand_dims"(%arg0, %arg1) : (tensor<2x2xf32>, tensor<i32>) -> tensor<1x2x2xf32>
}
func @squeezeDefault(%arg0: tensor<1x2x2xf32>) -> tensor<2x2xf32> {
@@ -352,7 +352,7 @@
return %0 : tensor<2x2xf32>
// CHECK-LABEL:squeezeDefault
-// CHECK: %0 = "tfl.squeeze"(%arg0) {squeeze_dims = []} : (tensor<1x2x2xf32>) -> tensor<2x2xf32>
+// CHECK: "tfl.squeeze"(%arg0) {squeeze_dims = []} : (tensor<1x2x2xf32>) -> tensor<2x2xf32>
}
func @squeezeSingleAxis(%arg0: tensor<2x1x2xf32>) -> tensor<2x2xf32> {
@@ -360,7 +360,7 @@
return %0 : tensor<2x2xf32>
// CHECK-LABEL:squeezeSingleAxis
-// CHECK: %0 = "tfl.squeeze"(%arg0) {squeeze_dims = [1]} : (tensor<2x1x2xf32>) -> tensor<2x2xf32>
+// CHECK: "tfl.squeeze"(%arg0) {squeeze_dims = [1]} : (tensor<2x1x2xf32>) -> tensor<2x2xf32>
}
func @squeezeTwoAxes(%arg0: tensor<1x2x1x2xf32>) -> tensor<2x2xf32> {
@@ -368,7 +368,7 @@
return %0 : tensor<2x2xf32>
// CHECK-LABEL:squeezeTwoAxes
-// CHECK: %0 = "tfl.squeeze"(%arg0) {squeeze_dims = [0, 2]} : (tensor<1x2x1x2xf32>) -> tensor<2x2xf32>
+// CHECK: "tfl.squeeze"(%arg0) {squeeze_dims = [0, 2]} : (tensor<1x2x1x2xf32>) -> tensor<2x2xf32>
}
func @gatherScalarIndices(%arg0 : tensor<3x2xf32>, %arg1 : tensor<i32>) -> tensor<2xf32> {
@@ -376,7 +376,7 @@
return %0 : tensor<2xf32>
// CHECK-LABEL:gatherScalarIndices
-// CHECK: %0 = "tfl.gather"(%arg0, %arg1) {axis = 0 : i32} : (tensor<3x2xf32>, tensor<i32>) -> tensor<2xf32>
+// CHECK: "tfl.gather"(%arg0, %arg1) {axis = 0 : i32} : (tensor<3x2xf32>, tensor<i32>) -> tensor<2xf32>
}
func @gatherVectorIndices(%arg0 : tensor<2xf32>, %arg1 : tensor<3xi32>) -> tensor<3xf32> {
@@ -384,7 +384,7 @@
return %0 : tensor<3xf32>
// CHECK-LABEL:gatherVectorIndices
-// CHECK: %0 = "tfl.gather"(%arg0, %arg1) {axis = 0 : i32} : (tensor<2xf32>, tensor<3xi32>) -> tensor<3xf32>
+// CHECK: "tfl.gather"(%arg0, %arg1) {axis = 0 : i32} : (tensor<2xf32>, tensor<3xi32>) -> tensor<3xf32>
}
func @gatherHigherRankIndices(%arg0 : tensor<2x3x6xf32>, %arg1 : tensor<4x5xi32>) -> tensor<4x5x3x6xf32> {
@@ -392,7 +392,7 @@
return %0 : tensor<4x5x3x6xf32>
// CHECK-LABEL:gatherHigherRankIndices
-// CHECK: %0 = "tfl.gather"(%arg0, %arg1) {axis = 0 : i32} : (tensor<2x3x6xf32>, tensor<4x5xi32>) -> tensor<4x5x3x6xf32>
+// CHECK: "tfl.gather"(%arg0, %arg1) {axis = 0 : i32} : (tensor<2x3x6xf32>, tensor<4x5xi32>) -> tensor<4x5x3x6xf32>
}
func @gatherNdVectorIndices(%arg0 : tensor<3x2x2xf32>, %arg1 : tensor<2xi32>) -> tensor<2xf32> {
@@ -400,7 +400,7 @@
return %0 : tensor<2xf32>
// CHECK-LABEL:gatherNdVectorIndices
-// CHECK: %0 = "tfl.gather_nd"(%arg0, %arg1) : (tensor<3x2x2xf32>, tensor<2xi32>) -> tensor<2xf32>
+// CHECK: "tfl.gather_nd"(%arg0, %arg1) : (tensor<3x2x2xf32>, tensor<2xi32>) -> tensor<2xf32>
}
func @gatherNdHigherRankIndices(%arg0 : tensor<4x3x2xf32>, %arg1 : tensor<2x2xi32>) -> tensor<2x2xf32> {
@@ -408,7 +408,7 @@
return %0 : tensor<2x2xf32>
// CHECK-LABEL:gatherNdHigherRankIndices
-// CHECK: %0 = "tfl.gather_nd"(%arg0, %arg1) : (tensor<4x3x2xf32>, tensor<2x2xi32>) -> tensor<2x2xf32>
+// CHECK: "tfl.gather_nd"(%arg0, %arg1) : (tensor<4x3x2xf32>, tensor<2x2xi32>) -> tensor<2x2xf32>
}
func @gatherV2VectorIndices(%arg0 : tensor<1x2x20xf32>, %arg1 : tensor<3x5xi32>) -> tensor<1x3x5x20xf32> {
@@ -417,7 +417,7 @@
return %1 : tensor<1x3x5x20xf32>
// CHECK-LABEL:gatherV2VectorIndices
-// CHECK: %0 = "tfl.gather"(%arg0, %arg1) {axis = 1 : i32} : (tensor<1x2x20xf32>, tensor<3x5xi32>) -> tensor<1x3x5x20xf32>
+// CHECK: "tfl.gather"(%arg0, %arg1) {axis = 1 : i32} : (tensor<1x2x20xf32>, tensor<3x5xi32>) -> tensor<1x3x5x20xf32>
}
func @gatherV2VectorIndicesNegAxis(%arg0 : tensor<1x2x20xf32>, %arg1 : tensor<3x5xi32>) -> tensor<1x2x3x5xf32> {
@@ -426,7 +426,7 @@
return %1 : tensor<1x2x3x5xf32>
// CHECK-LABEL:gatherV2VectorIndices
-// CHECK: %0 = "tfl.gather"(%arg0, %arg1) {axis = -1 : i32} : (tensor<1x2x20xf32>, tensor<3x5xi32>) -> tensor<1x2x3x5xf32>
+// CHECK: "tfl.gather"(%arg0, %arg1) {axis = -1 : i32} : (tensor<1x2x20xf32>, tensor<3x5xi32>) -> tensor<1x2x3x5xf32>
}
func @gatherV2NonZeroBatchDims(%arg0 : tensor<1x2x20xf32>, %arg1 : tensor<3x5xi32>) -> tensor<1x2x3x5xf32> {
@@ -443,8 +443,8 @@
return %0 : tensor<8x16xi1>
// CHECK-LABEL: greater
-// CHECK: %0 = "tfl.greater"(%arg0, %arg1) : (tensor<8x16xf32>, tensor<8x16xf32>) -> tensor<8x16xi1>
-// CHECK: return %0 : tensor<8x16xi1>
+// CHECK: "tfl.greater"(%arg0, %arg1) : (tensor<8x16xf32>, tensor<8x16xf32>) -> tensor<8x16xi1>
+// CHECK: return
}
func @greater_equal(%arg0: tensor<8x16xf32>, %arg1: tensor<8x16xf32>) -> tensor<8x16xi1> {
@@ -452,8 +452,8 @@
return %0 : tensor<8x16xi1>
// CHECK-LABEL: greater_equal
-// CHECK: %0 = "tfl.greater_equal"(%arg0, %arg1) : (tensor<8x16xf32>, tensor<8x16xf32>) -> tensor<8x16xi1>
-// CHECK: return %0 : tensor<8x16xi1>
+// CHECK: "tfl.greater_equal"(%arg0, %arg1) : (tensor<8x16xf32>, tensor<8x16xf32>) -> tensor<8x16xi1>
+// CHECK: return
}
//TODO(b/136498739): Add failure test for non-broadcastable types, since currently
@@ -463,8 +463,8 @@
return %0 : tensor<8x16xi1>
// CHECK-LABEL: less_equal
-// CHECK: %0 = "tfl.less_equal"(%arg0, %arg1) : (tensor<8x16xf32>, tensor<8x16xf32>) -> tensor<8x16xi1>
-// CHECK: return %0 : tensor<8x16xi1>
+// CHECK: "tfl.less_equal"(%arg0, %arg1) : (tensor<8x16xf32>, tensor<8x16xf32>) -> tensor<8x16xi1>
+// CHECK: return
}
func @rank(%arg0: tensor<*xf32>) -> tensor<1xi32> {
@@ -472,7 +472,7 @@
return %0 : tensor<1xi32>
// CHECK-LABEL:rank
-// CHECK: %0 = "tfl.rank"(%arg0) : (tensor<*xf32>) -> tensor<1xi32>
+// CHECK: "tfl.rank"(%arg0) : (tensor<*xf32>) -> tensor<1xi32>
}
func @floor(%arg0: tensor<8x16xf32>) -> tensor<8x16xf32> {
@@ -480,8 +480,8 @@
return %0 : tensor<8x16xf32>
// CHECK-LABEL: floor
-// CHECK: %0 = "tfl.floor"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
-// CHECK: return %0 : tensor<8x16xf32>
+// CHECK: "tfl.floor"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16xf32>
+// CHECK: return
}
func @floor_div(tensor<8x16xf32>, tensor<8x16xf32>) -> tensor<8x16xf32> {
@@ -490,8 +490,8 @@
return %0 : tensor<8x16xf32>
// CHECK-LABEL: floor_div
-// CHECK: %0 = tfl.floor_div %arg0, %arg1 : tensor<8x16xf32>
-// CHECK: return %0 : tensor<8x16xf32>
+// CHECK: tfl.floor_div %arg0, %arg1 : tensor<8x16xf32>
+// CHECK: return
}
func @not_equal(%arg0: tensor<8x16xf32>, %arg1: tensor<8x16xf32>) -> tensor<8x16xi1> {
@@ -499,8 +499,8 @@
return %0 : tensor<8x16xi1>
// CHECK-LABEL: not_equal
-// CHECK: %0 = "tfl.not_equal"(%arg0, %arg1) : (tensor<8x16xf32>, tensor<8x16xf32>) -> tensor<8x16xi1>
-// CHECK: return %0 : tensor<8x16xi1>
+// CHECK: "tfl.not_equal"(%arg0, %arg1) : (tensor<8x16xf32>, tensor<8x16xf32>) -> tensor<8x16xi1>
+// CHECK: return
}
func @select(%arg0: tensor<8xi1>, %arg1: tensor<8xf32>, %arg2: tensor<8xf32>) -> tensor<8xf32> {
@@ -508,8 +508,8 @@
return %0: tensor<8xf32>
// CHECK-LABEL: select
-// CHECK: %0 = "tfl.select"(%arg0, %arg1, %arg2)
-// CHECK: return %0 : tensor<8xf32>
+// CHECK: "tfl.select"(%arg0, %arg1, %arg2)
+// CHECK: return
}
func @select_multidim(%arg0: tensor<8xi1>, %arg1: tensor<8x3xf32>, %arg2: tensor<8x3xf32>) -> tensor<8x3xf32> {
@@ -517,8 +517,8 @@
return %0: tensor<8x3xf32>
// CHECK-LABEL: select_multidim
-// CHECK: %0 = "tfl.select"(%arg0, %arg1, %arg2)
-// CHECK: return %0 : tensor<8x3xf32>
+// CHECK: "tfl.select"(%arg0, %arg1, %arg2)
+// CHECK: return
}
func @select_v2(%arg0: tensor<8xi1>, %arg1: tensor<8xf32>, %arg2: tensor<8xf32>) -> tensor<8xf32> {
@@ -526,8 +526,8 @@
return %0: tensor<8xf32>
// CHECK-LABEL: select_v2
-// CHECK: %0 = "tfl.select"(%arg0, %arg1, %arg2)
-// CHECK: return %0 : tensor<8xf32>
+// CHECK: "tfl.select"(%arg0, %arg1, %arg2)
+// CHECK: return
}
func @select_v2_multidim(%arg0: tensor<8xi1>, %arg1: tensor<8x3xf32>, %arg2: tensor<8x3xf32>) -> tensor<8x3xf32> {
@@ -535,8 +535,8 @@
return %0: tensor<8x3xf32>
// CHECK-LABEL: select_v2_multidim
-// CHECK: %0 = "tfl.select"(%arg0, %arg1, %arg2)
-// CHECK: return %0 : tensor<8x3xf32>
+// CHECK: "tfl.select"(%arg0, %arg1, %arg2)
+// CHECK: return
}
func @sin(%arg0: tensor<f32>) -> tensor<f32> {
@@ -544,7 +544,7 @@
return %0 : tensor<f32>
// CHECK-LABEL:sin
-// CHECK: %0 = "tfl.sin"(%arg0) : (tensor<f32>) -> tensor<f32>
+// CHECK: "tfl.sin"(%arg0) : (tensor<f32>) -> tensor<f32>
}
func @topk(%arg0: tensor<8xf32>, %arg1: tensor<i32>) -> (tensor<?xf32>, tensor<?xi32>) {
@@ -552,8 +552,8 @@
return %0, %1: tensor<?xf32>, tensor<?xi32>
// CHECK-LABEL: topk
-// CHECK: %0:2 = "tfl.topk_v2"(%arg0, %arg1)
-// CHECK: return %0
+// CHECK: "tfl.topk_v2"(%arg0, %arg1)
+// CHECK: return
}
func @topk_2(%arg0: tensor<8xf32>) -> (tensor<2xf32>, tensor<2xi32>) {
@@ -562,8 +562,8 @@
return %1#0, %1#1: tensor<2xf32>, tensor<2xi32>
// CHECK-LABEL: topk_2
-// CHECK: %0:2 = "tfl.topk_v2"(%arg0, %cst)
-// CHECK: return %0
+// CHECK: "tfl.topk_v2"(%arg0, %cst)
+// CHECK: return
}
func @topk_3(%arg0: tensor<?x8xf32>) -> (tensor<?x2xf32>, tensor<?x2xi32>) {
@@ -572,8 +572,8 @@
return %1#0, %1#1: tensor<?x2xf32>, tensor<?x2xi32>
// CHECK-LABEL: topk_3
-// CHECK: %0:2 = "tfl.topk_v2"(%arg0, %cst) : (tensor<?x8xf32>, tensor<i32>) -> (tensor<?x2xf32>, tensor<?x2xi32>)
-// CHECK: return %0
+// CHECK: "tfl.topk_v2"(%arg0, %cst) : (tensor<?x8xf32>, tensor<i32>) -> (tensor<?x2xf32>, tensor<?x2xi32>)
+// CHECK: return
}
func @topk_4(%arg0: tensor<1x2x3x4xf32>) -> (tensor<1x2x3x2xf32>, tensor<1x2x3x2xi32>) {
@@ -582,8 +582,8 @@
return %1#0, %1#1: tensor<1x2x3x2xf32>, tensor<1x2x3x2xi32>
// CHECK-LABEL: topk_4
-// CHECK: %0:2 = "tfl.topk_v2"(%arg0, %cst)
-// CHECK: return %0
+// CHECK: "tfl.topk_v2"(%arg0, %cst)
+// CHECK: return
}
func @topk_5(%arg0: tensor<*xf32>) -> (tensor<*xf32>, tensor<*xi32>) {
@@ -592,8 +592,8 @@
return %1#0, %1#1: tensor<*xf32>, tensor<*xi32>
// CHECK-LABEL: topk_5
-// CHECK: %0:2 = "tfl.topk_v2"(%arg0, %cst)
-// CHECK: return %0
+// CHECK: "tfl.topk_v2"(%arg0, %cst)
+// CHECK: return
}
func @logicalAnd(%arg0: tensor<8xi1>, %arg1: tensor<8xi1>) -> tensor<8xi1> {
@@ -601,15 +601,15 @@
return %0: tensor<8xi1>
// CHECK-LABEL: logicalAnd
-// CHECK: %0 = tfl.logical_and %arg0, %arg1 : tensor<8xi1>
-// CHECK: return %0 : tensor<8xi1>
+// CHECK: tfl.logical_and %arg0, %arg1 : tensor<8xi1>
+// CHECK: return
}
func @logicalNot(%arg0: tensor<8xi1>) -> tensor<8xi1> {
%0 = "tf.LogicalNot"(%arg0) : (tensor<8xi1>) -> tensor<8xi1>
return %0 : tensor<8xi1>
// CHECK-LABEL: logicalNot
-// CHECK: %0 = "tfl.logical_not"(%arg0) : (tensor<8xi1>) -> tensor<8xi1>
+// CHECK: "tfl.logical_not"(%arg0) : (tensor<8xi1>) -> tensor<8xi1>
}
func @logicalOr(%arg0: tensor<8xi1>, %arg1: tensor<8xi1>) -> tensor<8xi1> {
@@ -617,8 +617,8 @@
return %0: tensor<8xi1>
// CHECK-LABEL: logicalOr
-// CHECK: %0 = tfl.logical_or %arg0, %arg1 : tensor<8xi1>
-// CHECK: return %0 : tensor<8xi1>
+// CHECK: tfl.logical_or %arg0, %arg1 : tensor<8xi1>
+// CHECK: return
}
func @addV2(%arg0: tensor<1xi32>, %arg1: tensor<1xi32>) -> tensor<1xi32> {
@@ -626,7 +626,7 @@
return %0 : tensor<1xi32>
// CHECK-LABEL: addV2
-// CHECK: %0 = tfl.add %arg0, %arg1 {fused_activation_function = "NONE"} : tensor<1xi32>
+// CHECK: tfl.add %arg0, %arg1 {fused_activation_function = "NONE"} : tensor<1xi32>
}
func @addN(%arg0: tensor<2x3xi32>, %arg1: tensor<2x3xi32>, %arg2: tensor<2x3xi32>) -> tensor<2x3xi32> {
@@ -634,8 +634,8 @@
return %0 : tensor<2x3xi32>
// CHECK-LABEL: addN
-// CHECK: %0 = "tfl.add_n"(%arg0, %arg1, %arg2) : (tensor<2x3xi32>, tensor<2x3xi32>, tensor<2x3xi32>) -> tensor<2x3xi32>
-// CHECK: return %0 : tensor<2x3xi32>
+// CHECK: "tfl.add_n"(%arg0, %arg1, %arg2) : (tensor<2x3xi32>, tensor<2x3xi32>, tensor<2x3xi32>) -> tensor<2x3xi32>
+// CHECK: return
}
func @reverse_v2(%arg0: tensor<1x2x3x4xf32>, %arg1: tensor<1xi32>) -> tensor<1x2x3x4xf32> {
@@ -643,8 +643,8 @@
return %0 : tensor<1x2x3x4xf32>
// CHECK-LABEL:reverse_v2
-// CHECK: %0 = "tfl.reverse_v2"(%arg0, %arg1) : (tensor<1x2x3x4xf32>, tensor<1xi32>) -> tensor<1x2x3x4xf32>
-// CHECK: return %0 : tensor<1x2x3x4xf32>
+// CHECK: "tfl.reverse_v2"(%arg0, %arg1) : (tensor<1x2x3x4xf32>, tensor<1xi32>) -> tensor<1x2x3x4xf32>
+// CHECK: return
}
func @matrix_diag(%arg0: tensor<8x16xf32>) -> tensor<8x16x16xf32> {
@@ -652,7 +652,7 @@
return %0 : tensor<8x16x16xf32>
// CHECK-LABEL:matrix_diag
-// CHECK: %0 = "tfl.matrix_diag"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16x16xf32>
+// CHECK: "tfl.matrix_diag"(%arg0) : (tensor<8x16xf32>) -> tensor<8x16x16xf32>
}
func @matrix_diag_v2_no_match(%arg0: tensor<8x16xf32>) -> tensor<8x16x16xf32> {
@@ -726,7 +726,7 @@
return %0 : tensor<8x16xf32>
// CHECK-LABEL:maximum
-// CHECK: %0 = "tfl.maximum"(%arg0, %arg1) : (tensor<8x16xf32>, tensor<8x16xf32>) -> tensor<8x16xf32>
+// CHECK: "tfl.maximum"(%arg0, %arg1) : (tensor<8x16xf32>, tensor<8x16xf32>) -> tensor<8x16xf32>
}
func @minimum(%arg0: tensor<8x16xf32>, %arg1: tensor<8x16xf32>) -> tensor<8x16xf32> {
@@ -734,7 +734,7 @@
return %0 : tensor<8x16xf32>
// CHECK-LABEL:minimum
-// CHECK: %0 = "tfl.minimum"(%arg0, %arg1) : (tensor<8x16xf32>, tensor<8x16xf32>) -> tensor<8x16xf32>
+// CHECK: "tfl.minimum"(%arg0, %arg1) : (tensor<8x16xf32>, tensor<8x16xf32>) -> tensor<8x16xf32>
}
func @realDiv(%arg0: tensor<8x16xf32>, %arg1: tensor<8x16xf32>) -> tensor<8x16xf32> {
@@ -742,7 +742,7 @@
return %0 : tensor<8x16xf32>
// CHECK-LABEL: realDiv
-// CHECK: %0 = tfl.div %arg0, %arg1 {fused_activation_function = "NONE"} : tensor<8x16xf32>
+// CHECK: tfl.div %arg0, %arg1 {fused_activation_function = "NONE"} : tensor<8x16xf32>
}
func @equal(%arg0: tensor<8x16xf32>, %arg1: tensor<8x16xf32>) -> tensor<8x16xi1> {
@@ -750,8 +750,8 @@
return %0 : tensor<8x16xi1>
// CHECK-LABEL: equal
-// CHECK: %0 = "tfl.equal"(%arg0, %arg1) : (tensor<8x16xf32>, tensor<8x16xf32>) -> tensor<8x16xi1>
-// CHECK: return %0 : tensor<8x16xi1>
+// CHECK: "tfl.equal"(%arg0, %arg1) : (tensor<8x16xf32>, tensor<8x16xf32>) -> tensor<8x16xi1>
+// CHECK: return
}
func @pad(tensor<2x1x3xf32>, tensor<3x2xi32>) -> tensor<? x f32> {
@@ -760,8 +760,8 @@
return %0#0 : tensor<? x f32>
// CHECK-LABEL: pad
- // CHECK: %0 = "tfl.pad"(%arg0, %arg1) : (tensor<2x1x3xf32>, tensor<3x2xi32>) -> tensor<?xf32>
- // CHECK: return %0 : tensor<?xf32>
+ // CHECK: "tfl.pad"(%arg0, %arg1) : (tensor<2x1x3xf32>, tensor<3x2xi32>) -> tensor<?xf32>
+ // CHECK: return
}
func @pow(%arg0: tensor<2x1x3xf32>, %arg1: tensor<2x1x1xf32>) -> tensor<2x1x3xf32> {
@@ -770,7 +770,7 @@
// CHECK-LABEL: pow
// CHECK: %[[pow:.*]] = "tfl.pow"(%arg0, %arg1) : (tensor<2x1x3xf32>, tensor<2x1x1xf32>) -> tensor<2x1x3xf32>
- // CHECK: return %[[pow]] : tensor<2x1x3xf32>
+ // CHECK: return
}
func @tile(tensor<2x3xf32>, tensor<2xi32>) -> tensor<2x6xf32> {
@@ -780,8 +780,8 @@
return %0 : tensor<2x6xf32>
// CHECK-LABEL: tile
- // CHECK: %0 = "tfl.tile"(%arg0, %cst) : (tensor<2x3xf32>, tensor<2xi32>) -> tensor<2x6xf32>
- // CHECK: return %0 : tensor<2x6xf32>
+ // CHECK: "tfl.tile"(%arg0, %cst) : (tensor<2x3xf32>, tensor<2xi32>) -> tensor<2x6xf32>
+ // CHECK: return
}
func @padv2(tensor<2x1x3xf32>, tensor<3x2xi32>) -> tensor<? x f32> {
@@ -791,8 +791,8 @@
return %0#0 : tensor<? x f32>
// CHECK-LABEL: padv2
- // CHECK: %0 = "tfl.padv2"(%arg0, %arg1, %cst) : (tensor<2x1x3xf32>, tensor<3x2xi32>, tensor<f32>) -> tensor<?xf32>
- // CHECK: return %0 : tensor<?xf32>
+ // CHECK: "tfl.padv2"(%arg0, %arg1, %cst) : (tensor<2x1x3xf32>, tensor<3x2xi32>, tensor<f32>) -> tensor<?xf32>
+ // CHECK: return
}
func @pack2Tensors(%arg0: tensor<2xi32>, %arg1: tensor<2xi32>) -> tensor<2x2xi32> {
@@ -800,7 +800,7 @@
return %0 : tensor<2x2xi32>
// CHECK-LABEL: pack2Tensors
-// CHECK: %0 = "tfl.pack"(%arg0, %arg1) {axis = 0 : i32, values_count = 2 : i32} : (tensor<2xi32>, tensor<2xi32>) -> tensor<2x2xi32>
+// CHECK: "tfl.pack"(%arg0, %arg1) {axis = 0 : i32, values_count = 2 : i32} : (tensor<2xi32>, tensor<2xi32>) -> tensor<2x2xi32>
}
func @pack3Tensors(%arg0: tensor<2xi32>, %arg1: tensor<2xi32>, %arg2 : tensor<2xi32>) -> tensor<2x3xi32> {
@@ -808,7 +808,7 @@
return %0 : tensor<2x3xi32>
// CHECK-LABEL: pack3Tensors
-// CHECK: %0 = "tfl.pack"(%arg0, %arg1, %arg2) {axis = 1 : i32, values_count = 3 : i32} : (tensor<2xi32>, tensor<2xi32>, tensor<2xi32>) -> tensor<2x3xi32>
+// CHECK: "tfl.pack"(%arg0, %arg1, %arg2) {axis = 1 : i32, values_count = 3 : i32} : (tensor<2xi32>, tensor<2xi32>, tensor<2xi32>) -> tensor<2x3xi32>
}
func @packNegAxis(%arg0: tensor<2xi32>, %arg1: tensor<2xi32>, %arg2 : tensor<2xi32>) -> tensor<2x3xi32> {
@@ -816,7 +816,7 @@
return %0 : tensor<2x3xi32>
// CHECK-LABEL: packNegAxis
-// CHECK: %0 = "tfl.pack"(%arg0, %arg1, %arg2) {axis = -1 : i32, values_count = 3 : i32} : (tensor<2xi32>, tensor<2xi32>, tensor<2xi32>) -> tensor<2x3xi32>
+// CHECK: "tfl.pack"(%arg0, %arg1, %arg2) {axis = -1 : i32, values_count = 3 : i32} : (tensor<2xi32>, tensor<2xi32>, tensor<2xi32>) -> tensor<2x3xi32>
}
func @unpack2Tensors(%arg0: tensor<2x2xi32>) -> tensor<2xi32> {
@@ -824,7 +824,7 @@
return %0#0 : tensor<2xi32>
// CHECK-LABEL: unpack2Tensors
-// CHECK: %0:2 = "tfl.unpack"(%arg0) {axis = 0 : i32, num = 2 : i32} : (tensor<2x2xi32>) -> (tensor<2xi32>, tensor<2xi32>)
+// CHECK: "tfl.unpack"(%arg0) {axis = 0 : i32, num = 2 : i32} : (tensor<2x2xi32>) -> (tensor<2xi32>, tensor<2xi32>)
}
func @unpack3Tensors(%arg0: tensor<2x3xi32>) -> tensor<2xi32> {
@@ -832,7 +832,7 @@
return %0#0 : tensor<2xi32>
// CHECK-LABEL: unpack3Tensors
-// CHECK: %0:3 = "tfl.unpack"(%arg0) {axis = 1 : i32, num = 3 : i32} : (tensor<2x3xi32>) -> (tensor<2xi32>, tensor<2xi32>, tensor<2xi32>)
+// CHECK: "tfl.unpack"(%arg0) {axis = 1 : i32, num = 3 : i32} : (tensor<2x3xi32>) -> (tensor<2xi32>, tensor<2xi32>, tensor<2xi32>)
}
func @unpackNegAxis(%arg0: tensor<2x3xi32>) -> tensor<2xi32> {
@@ -840,7 +840,7 @@
return %0#0 : tensor<2xi32>
// CHECK-LABEL: unpackNegAxis
-// CHECK: %0:3 = "tfl.unpack"(%arg0) {axis = -1 : i32, num = 3 : i32} : (tensor<2x3xi32>) -> (tensor<2xi32>, tensor<2xi32>, tensor<2xi32>)
+// CHECK: "tfl.unpack"(%arg0) {axis = -1 : i32, num = 3 : i32} : (tensor<2x3xi32>) -> (tensor<2xi32>, tensor<2xi32>, tensor<2xi32>)
}
func @mean(%arg0: tensor<2x2xf32>, %arg1: tensor<1xi32>) -> tensor<1x2xf32> {
@@ -848,7 +848,7 @@
return %0 : tensor<1x2xf32>
// CHECK-LABEL: mean
-// CHECK: %0 = "tfl.mean"(%arg0, %arg1) {keep_dims = false} : (tensor<2x2xf32>, tensor<1xi32>) -> tensor<1x2xf32>
+// CHECK: "tfl.mean"(%arg0, %arg1) {keep_dims = false} : (tensor<2x2xf32>, tensor<1xi32>) -> tensor<1x2xf32>
}
func @mean_true(%arg0: tensor<2x2xf32>, %arg1: tensor<1xi32>) -> tensor<1x2xf32> {
@@ -856,7 +856,7 @@
return %0 : tensor<1x2xf32>
// CHECK-LABEL: mean_true
-// CHECK: %0 = "tfl.mean"(%arg0, %arg1) {keep_dims = true} : (tensor<2x2xf32>, tensor<1xi32>) -> tensor<1x2xf32>
+// CHECK: "tfl.mean"(%arg0, %arg1) {keep_dims = true} : (tensor<2x2xf32>, tensor<1xi32>) -> tensor<1x2xf32>
}
func @sum(%arg0: tensor<8x16x16xf32>, %arg1: tensor<2xi32>) -> tensor<?xf32> {
@@ -864,7 +864,7 @@
return %0 : tensor<?xf32>
// CHECK-LABEL: sum
- // CHECK: %0 = "tfl.sum"(%arg0, %arg1) {keep_dims = false} : (tensor<8x16x16xf32>, tensor<2xi32>) -> tensor<?xf32>
+ // CHECK: "tfl.sum"(%arg0, %arg1) {keep_dims = false} : (tensor<8x16x16xf32>, tensor<2xi32>) -> tensor<?xf32>
}
func @sum_true(%arg0: tensor<8x16x16xf32>, %arg1: tensor<2xi32>) -> tensor<?xf32> {
@@ -872,7 +872,7 @@
return %0 : tensor<?xf32>
// CHECK-LABEL: sum_true
- // CHECK: %0 = "tfl.sum"(%arg0, %arg1) {keep_dims = true} : (tensor<8x16x16xf32>, tensor<2xi32>) -> tensor<?xf32>
+ // CHECK: "tfl.sum"(%arg0, %arg1) {keep_dims = true} : (tensor<8x16x16xf32>, tensor<2xi32>) -> tensor<?xf32>
}
func @reduce_min(%arg0: tensor<8x16x16xf32>, %arg1: tensor<2xi32>) -> tensor<?xf32> {
@@ -880,7 +880,7 @@
return %0 : tensor<?xf32>
// CHECK-LABEL: reduce_min
- // CHECK: %0 = "tfl.reduce_min"(%arg0, %arg1) {keep_dims = false} : (tensor<8x16x16xf32>, tensor<2xi32>) -> tensor<?xf32>
+ // CHECK: "tfl.reduce_min"(%arg0, %arg1) {keep_dims = false} : (tensor<8x16x16xf32>, tensor<2xi32>) -> tensor<?xf32>
}
func @reduce_min_true(%arg0: tensor<8x16x16xf32>, %arg1: tensor<2xi32>) -> tensor<?xf32> {
@@ -888,7 +888,7 @@
return %0 : tensor<?xf32>
// CHECK-LABEL: reduce_min_true
- // CHECK: %0 = "tfl.reduce_min"(%arg0, %arg1) {keep_dims = true} : (tensor<8x16x16xf32>, tensor<2xi32>) -> tensor<?xf32>
+ // CHECK: "tfl.reduce_min"(%arg0, %arg1) {keep_dims = true} : (tensor<8x16x16xf32>, tensor<2xi32>) -> tensor<?xf32>
}
func @reduce_max(%arg0: tensor<8x16x16xf32>, %arg1: tensor<2xi32>) -> tensor<?xf32> {
@@ -896,7 +896,7 @@
return %0 : tensor<?xf32>
// CHECK-LABEL: reduce_max
- // CHECK: %0 = "tfl.reduce_max"(%arg0, %arg1) {keep_dims = false} : (tensor<8x16x16xf32>, tensor<2xi32>) -> tensor<?xf32>
+ // CHECK: "tfl.reduce_max"(%arg0, %arg1) {keep_dims = false} : (tensor<8x16x16xf32>, tensor<2xi32>) -> tensor<?xf32>
}
func @reduce_max_true(%arg0: tensor<8x16x16xf32>, %arg1: tensor<2xi32>) -> tensor<?xf32> {
@@ -904,7 +904,7 @@
return %0 : tensor<?xf32>
// CHECK-LABEL: reduce_max_true
- // CHECK: %0 = "tfl.reduce_max"(%arg0, %arg1) {keep_dims = true} : (tensor<8x16x16xf32>, tensor<2xi32>) -> tensor<?xf32>
+ // CHECK: "tfl.reduce_max"(%arg0, %arg1) {keep_dims = true} : (tensor<8x16x16xf32>, tensor<2xi32>) -> tensor<?xf32>
}
func @reduce_prod(%arg0: tensor<8x16x16xf32>, %arg1: tensor<2xi32>) -> tensor<?xf32> {
@@ -912,7 +912,7 @@
return %0 : tensor<?xf32>
// CHECK-LABEL: reduce_prod
- // CHECK: %0 = "tfl.reduce_prod"(%arg0, %arg1) {keep_dims = false} : (tensor<8x16x16xf32>, tensor<2xi32>) -> tensor<?xf32>
+ // CHECK: "tfl.reduce_prod"(%arg0, %arg1) {keep_dims = false} : (tensor<8x16x16xf32>, tensor<2xi32>) -> tensor<?xf32>
}
func @reduce_prod_true(%arg0: tensor<8x16x16xf32>, %arg1: tensor<2xi32>) -> tensor<?xf32> {
@@ -920,21 +920,21 @@
return %0 : tensor<?xf32>
// CHECK-LABEL: reduce_prod_true
- // CHECK: %0 = "tfl.reduce_prod"(%arg0, %arg1) {keep_dims = true} : (tensor<8x16x16xf32>, tensor<2xi32>) -> tensor<?xf32>
+ // CHECK: "tfl.reduce_prod"(%arg0, %arg1) {keep_dims = true} : (tensor<8x16x16xf32>, tensor<2xi32>) -> tensor<?xf32>
}
func @batch_to_space_nd(%arg0: tensor<4x2x2x3xf32>, %arg1: tensor<2xi32>, %arg2: tensor<2x2xi32>) -> tensor<?xf32> {
%0 = "tf.BatchToSpaceND"(%arg0, %arg1, %arg2) : (tensor<4x2x2x3xf32>, tensor<2xi32>, tensor<2x2xi32>) -> tensor<?xf32>
return %0 : tensor<?xf32>
// CHECK-LABEL: batch_to_space_nd
- // CHECK: %0 = "tfl.batch_to_space_nd"(%arg0, %arg1, %arg2) : (tensor<4x2x2x3xf32>, tensor<2xi32>, tensor<2x2xi32>) -> tensor<?xf32>
+ // CHECK: "tfl.batch_to_space_nd"(%arg0, %arg1, %arg2) : (tensor<4x2x2x3xf32>, tensor<2xi32>, tensor<2x2xi32>) -> tensor<?xf32>
}
func @space_to_batch_nd(%arg0: tensor<1x4x4x3xf32>, %arg1: tensor<2xi32>, %arg2: tensor<2x2xi32>) -> tensor<?xf32> {
%0 = "tf.SpaceToBatchND"(%arg0, %arg1, %arg2) : (tensor<1x4x4x3xf32>, tensor<2xi32>, tensor<2x2xi32>) -> tensor<?xf32>
return %0 : tensor<?xf32>
// CHECK-LABEL: space_to_batch_nd
- // CHECK: %0 = "tfl.space_to_batch_nd"(%arg0, %arg1, %arg2) : (tensor<1x4x4x3xf32>, tensor<2xi32>, tensor<2x2xi32>) -> tensor<?xf32>
+ // CHECK: "tfl.space_to_batch_nd"(%arg0, %arg1, %arg2) : (tensor<1x4x4x3xf32>, tensor<2xi32>, tensor<2x2xi32>) -> tensor<?xf32>
}
func @split(%arg0: tensor<i32>, %arg1: tensor<1x4x3x3xf32>) -> tensor<1x4x3xf32> {
@@ -942,7 +942,7 @@
return %0#0 : tensor<1x4x3xf32>
// CHECK-LABEL: split
- // CHECK: %0:3 = "tfl.split"(%arg0, %arg1) {num_splits = 3 : i32} : (tensor<i32>, tensor<1x4x3x3xf32>) -> (tensor<1x4x3xf32>, tensor<1x4x3xf32>, tensor<1x4x3xf32>)
+ // CHECK: "tfl.split"(%arg0, %arg1) {num_splits = 3 : i32} : (tensor<i32>, tensor<1x4x3x3xf32>) -> (tensor<1x4x3xf32>, tensor<1x4x3xf32>, tensor<1x4x3xf32>)
}
func @splitv(%arg0: tensor<1x4x3x3xf32>, %arg1: tensor<2xi32>, %arg2: tensor<i32>) -> tensor<1x4x2x3xf32> {
@@ -950,7 +950,7 @@
return %0#0 : tensor<1x4x2x3xf32>
// CHECK-LABEL: splitv
- // CHECK: %0:2 = "tfl.split_v"(%arg0, %arg1, %arg2) {num_splits = 2 : i32} : (tensor<1x4x3x3xf32>, tensor<2xi32>, tensor<i32>) -> (tensor<1x4x2x3xf32>, tensor<1x4x1x3xf32>)
+ // CHECK: "tfl.split_v"(%arg0, %arg1, %arg2) {num_splits = 2 : i32} : (tensor<1x4x3x3xf32>, tensor<2xi32>, tensor<i32>) -> (tensor<1x4x2x3xf32>, tensor<1x4x1x3xf32>)
}
func @matmul_transposed(%arg0: tensor<40x37xf32>, %arg1: tensor<40x37xf32>) -> tensor<40x40xf32> {
@@ -958,7 +958,7 @@
(tensor<40x37xf32>, tensor<40x37xf32>) -> tensor<40x40xf32>
return %0 : tensor<40x40xf32>
// CHECK-LABEL: matmul_transposed
-// CHECK: %0 = "tfl.fully_connected"(%arg0, %arg1, %cst) {fused_activation_function = "NONE", keep_num_dims = false, weights_format = "DEFAULT"} : (tensor<40x37xf32>, tensor<40x37xf32>, none) -> tensor<40x40xf32>
+// CHECK: "tfl.fully_connected"(%arg0, %arg1, %cst) {fused_activation_function = "NONE", keep_num_dims = false, weights_format = "DEFAULT"} : (tensor<40x37xf32>, tensor<40x37xf32>, none) -> tensor<40x40xf32>
}
func @concat2Tensors(%arg0: tensor<2x1xi32>, %arg1: tensor<2x1xi32>) -> tensor<2x2xi32> {
@@ -967,7 +967,7 @@
return %1 : tensor<2x2xi32>
// CHECK-LABEL: concat2Tensors
-// CHECK: %0 = "tfl.concatenation"(%arg0, %arg1) {axis = 1 : i32, fused_activation_function = "NONE"} : (tensor<2x1xi32>, tensor<2x1xi32>) -> tensor<2x2xi32>
+// CHECK: "tfl.concatenation"(%arg0, %arg1) {axis = 1 : i32, fused_activation_function = "NONE"} : (tensor<2x1xi32>, tensor<2x1xi32>) -> tensor<2x2xi32>
}
func @concat3Tensors(%arg0: tensor<2x1xi32>, %arg1: tensor<2x1xi32>, %arg2: tensor<2x1xi32>) -> tensor<2x3xi32> {
@@ -976,7 +976,7 @@
return %1 : tensor<2x3xi32>
// CHECK-LABEL: concat3Tensors
-// CHECK: %0 = "tfl.concatenation"(%arg0, %arg1, %arg2) {axis = -1 : i32, fused_activation_function = "NONE"} : (tensor<2x1xi32>, tensor<2x1xi32>, tensor<2x1xi32>) -> tensor<2x3xi32>
+// CHECK: "tfl.concatenation"(%arg0, %arg1, %arg2) {axis = -1 : i32, fused_activation_function = "NONE"} : (tensor<2x1xi32>, tensor<2x1xi32>, tensor<2x1xi32>) -> tensor<2x3xi32>
}
func @concatv2With3Tensors(%arg0: tensor<2x1xi32>, %arg1: tensor<2x1xi32>, %arg2: tensor<2x1xi32>) -> tensor<2x3xi32> {
@@ -985,7 +985,7 @@
return %1 : tensor<2x3xi32>
// CHECK-LABEL: concatv2With3Tensors
-// CHECK: %0 = "tfl.concatenation"(%arg0, %arg1, %arg2) {axis = -1 : i32, fused_activation_function = "NONE"} : (tensor<2x1xi32>, tensor<2x1xi32>, tensor<2x1xi32>) -> tensor<2x3xi32>
+// CHECK: "tfl.concatenation"(%arg0, %arg1, %arg2) {axis = -1 : i32, fused_activation_function = "NONE"} : (tensor<2x1xi32>, tensor<2x1xi32>, tensor<2x1xi32>) -> tensor<2x3xi32>
}
func @resize_with_bilinear(%arg0: tensor<1x100x100x3xf32>, %arg1: tensor<4xi32>) -> tensor<?xf32> {
@@ -1037,8 +1037,8 @@
return %0#0 : tensor<? x f32>
// CHECK-LABEL: mirror_pad
- // CHECK: %0 = "tfl.mirror_pad"(%arg0, %arg1) {mode = "SYMMETRIC"} : (tensor<2x1x3xf32>, tensor<3x2xi32>) -> tensor<?xf32>
- // CHECK: return %0 : tensor<?xf32>
+ // CHECK: "tfl.mirror_pad"(%arg0, %arg1) {mode = "SYMMETRIC"} : (tensor<2x1x3xf32>, tensor<3x2xi32>) -> tensor<?xf32>
+ // CHECK: return
}
func @mirror_pad_reflect(tensor<2x1x3xf32>, tensor<3x2xi32>) -> tensor<? x f32> {
@@ -1047,8 +1047,8 @@
return %0#0 : tensor<? x f32>
// CHECK-LABEL: mirror_pad_reflect
- // CHECK: %0 = "tfl.mirror_pad"(%arg0, %arg1) {mode = "REFLECT"} : (tensor<2x1x3xf32>, tensor<3x2xi32>) -> tensor<?xf32>
- // CHECK: return %0 : tensor<?xf32>
+ // CHECK: "tfl.mirror_pad"(%arg0, %arg1) {mode = "REFLECT"} : (tensor<2x1x3xf32>, tensor<3x2xi32>) -> tensor<?xf32>
+ // CHECK: return
}
func @Tanh(%arg0: tensor<1xf32>) -> tensor<1xf32> {
@@ -1072,8 +1072,7 @@
return %0, %1 : tensor<?xf32> , tensor<?xi32>
// CHECK-LABEL: unique
- // CHECK: %0:2 = "tfl.unique"(%arg0) : (tensor<5xf32>) -> (tensor<?xf32>, tensor<?xi32>)
- // CHECK: %0
+ // CHECK: "tfl.unique"(%arg0) : (tensor<5xf32>) -> (tensor<?xf32>, tensor<?xi32>)
}
func @unique64(%arg0: tensor<5xf32>) -> (tensor<?xf32>, tensor<?xi64>) {
@@ -1081,8 +1080,7 @@
return %0, %1 : tensor<?xf32> , tensor<?xi64>
// CHECK-LABEL: unique64
- // CHECK: %0:2 = "tfl.unique"(%arg0) : (tensor<5xf32>) -> (tensor<?xf32>, tensor<?xi64>)
- // CHECK: %0
+ // CHECK: "tfl.unique"(%arg0) : (tensor<5xf32>) -> (tensor<?xf32>, tensor<?xi64>)
}
func @ReverseSequence(%arg0: tensor<2x3xf32>, %arg1: tensor<2xi32>) -> tensor<2x3xf32> {
@@ -1098,7 +1096,7 @@
return %0: tensor<2x3x4x5xf32>
// CHECK-LABEL: LRN
- // CHECK: %0 = "tfl.local_response_normalization"(%arg0) {alpha = 1.000000e+00 : f32, beta = 5.000000e-01 : f32, bias = 1.000000e+00 : f32, radius = 5 : i32} : (tensor<2x3x4x5xf32>) -> tensor<2x3x4x5xf32>
+ // CHECK: "tfl.local_response_normalization"(%arg0) {alpha = 1.000000e+00 : f32, beta = 5.000000e-01 : f32, bias = 1.000000e+00 : f32, radius = 5 : i32} : (tensor<2x3x4x5xf32>) -> tensor<2x3x4x5xf32>
// CHECK: return %0 : tensor<2x3x4x5xf32>
}
@@ -1115,7 +1113,7 @@
return %0 : tensor<i32>
// CHECK-LABEL: argmax
-// CHECK: %0 = "tfl.arg_max"(%arg0, %arg1) : (tensor<3xi32>, tensor<i32>) -> tensor<i32>
+// CHECK: "tfl.arg_max"(%arg0, %arg1) : (tensor<3xi32>, tensor<i32>) -> tensor<i32>
}
func @argmax64(%arg0: tensor<3xi32>, %arg1: tensor<i32>) -> tensor<i64> {
@@ -1123,7 +1121,7 @@
return %0 : tensor<i64>
// CHECK-LABEL: argmax64
-// CHECK: %0 = "tfl.arg_max"(%arg0, %arg1) : (tensor<3xi32>, tensor<i32>) -> tensor<i64>
+// CHECK: "tfl.arg_max"(%arg0, %arg1) : (tensor<3xi32>, tensor<i32>) -> tensor<i64>
}
func @space_to_depth(%arg0: tensor<1x2x2x1xf32>) -> tensor<?xf32> {
@@ -1216,7 +1214,7 @@
return %0#0 : tensor<2xi32>
// CHECK-LABEL: non_max_suppression_v4
- // CHECK: %0:2 = "tfl.non_max_suppression_v4"(%arg0, %arg1, %arg2, %arg3, %arg4) : (tensor<3x4xf32>, tensor<3xf32>, tensor<i32>, tensor<f32>, tensor<f32>) -> (tensor<2xi32>, tensor<i32>)
+ // CHECK: "tfl.non_max_suppression_v4"(%arg0, %arg1, %arg2, %arg3, %arg4) : (tensor<3x4xf32>, tensor<3xf32>, tensor<i32>, tensor<f32>, tensor<f32>) -> (tensor<2xi32>, tensor<i32>)
}
func @non_max_suppression_v4_no_pad(%arg0: tensor<3x4xf32>, %arg1: tensor<3xf32>, %arg2: tensor<i32>, %arg3: tensor<f32>, %arg4: tensor<f32>) -> tensor<2xi32> {
@@ -1224,7 +1222,7 @@
return %0#0 : tensor<2xi32>
// CHECK-LABEL: non_max_suppression_v4_no_pad
- // CHECK: %0:2 = "tfl.non_max_suppression_v4"(%arg0, %arg1, %arg2, %arg3, %arg4) : (tensor<3x4xf32>, tensor<3xf32>, tensor<i32>, tensor<f32>, tensor<f32>) -> (tensor<2xi32>, tensor<i32>)
+ // CHECK: "tfl.non_max_suppression_v4"(%arg0, %arg1, %arg2, %arg3, %arg4) : (tensor<3x4xf32>, tensor<3xf32>, tensor<i32>, tensor<f32>, tensor<f32>) -> (tensor<2xi32>, tensor<i32>)
}
func @non_max_suppression_v5(%arg0: tensor<3x4xf32>, %arg1: tensor<3xf32>, %arg2: tensor<i32>, %arg3: tensor<f32>, %arg4: tensor<f32>, %arg5: tensor<f32>) -> tensor<2xi32> {
@@ -1232,7 +1230,7 @@
return %0#0 : tensor<2xi32>
// CHECK-LABEL: non_max_suppression_v5
- // CHECK: %0:3 = "tfl.non_max_suppression_v5"(%arg0, %arg1, %arg2, %arg3, %arg4, %arg5) : (tensor<3x4xf32>, tensor<3xf32>, tensor<i32>, tensor<f32>, tensor<f32>, tensor<f32>) -> (tensor<2xi32>, tensor<2xf32>, tensor<i32>)
+ // CHECK: "tfl.non_max_suppression_v5"(%arg0, %arg1, %arg2, %arg3, %arg4, %arg5) : (tensor<3x4xf32>, tensor<3xf32>, tensor<i32>, tensor<f32>, tensor<f32>, tensor<f32>) -> (tensor<2xi32>, tensor<2xf32>, tensor<i32>)
}
func @non_max_suppression_v5_no_pad(%arg0: tensor<3x4xf32>, %arg1: tensor<3xf32>, %arg2: tensor<i32>, %arg3: tensor<f32>, %arg4: tensor<f32>, %arg5: tensor<f32>) -> tensor<2xi32> {
@@ -1240,7 +1238,7 @@
return %0#0 : tensor<2xi32>
// CHECK-LABEL: non_max_suppression_v5_no_pad
- // CHECK: %0:3 = "tfl.non_max_suppression_v5"(%arg0, %arg1, %arg2, %arg3, %arg4, %arg5) : (tensor<3x4xf32>, tensor<3xf32>, tensor<i32>, tensor<f32>, tensor<f32>, tensor<f32>) -> (tensor<2xi32>, tensor<2xf32>, tensor<i32>)
+ // CHECK: "tfl.non_max_suppression_v5"(%arg0, %arg1, %arg2, %arg3, %arg4, %arg5) : (tensor<3x4xf32>, tensor<3xf32>, tensor<i32>, tensor<f32>, tensor<f32>, tensor<f32>) -> (tensor<2xi32>, tensor<2xf32>, tensor<i32>)
}
func @conv2d_backprop_input(%arg0: tensor<4xi32>, %arg1: tensor<3x3x1x32xf32>, %arg2: tensor<15x14x14x32xf32>) -> tensor<15x28x28x1xf32> {
diff --git a/tensorflow/compiler/mlir/lite/tests/ops.mlir b/tensorflow/compiler/mlir/lite/tests/ops.mlir
index 84a2af9..b77d89f 100644
--- a/tensorflow/compiler/mlir/lite/tests/ops.mlir
+++ b/tensorflow/compiler/mlir/lite/tests/ops.mlir
@@ -167,7 +167,7 @@
// test invalid Square input
func @testSquareWithWrongInputType(tensor<? x i32>) -> tensor<? x i32> {
^bb0(%arg0: tensor<? x i32>):
- // expected-error @+1 {{tfl.square' op operand #0 must be tensor of floating-point or QI8 type or QUI8 type values}}
+ // expected-error @+1 {{tfl.square' op operand #0 must be tensor of floating-point values}}
%0 = "tfl.square"(%arg0): (tensor<? x i32>) -> tensor<? x i32>
return %0#0 : tensor<? x i32>
}
@@ -190,12 +190,6 @@
return %0 : tensor<? x f32>
}
-func @testQuantizedSquare(tensor<? x !quant.uniform<u8:f32, 0.1>>) -> tensor<? x !quant.uniform<u8:f32, 0.1>> {
-^bb0(%arg0: tensor<? x !quant.uniform<u8:f32, 0.1>>):
- %0 = "tfl.square"(%arg0): (tensor<? x !quant.uniform<u8:f32, 0.1>>) -> tensor<? x !quant.uniform<u8:f32, 0.1>>
- return %0 : tensor<? x !quant.uniform<u8:f32, 0.1>>
-}
-
func @testQuantizedResizeNearestNeighbor(tensor<? x !quant.uniform<u8:f32, 0.1>>, tensor<? x i32>) -> tensor<? x !quant.uniform<u8:f32, 0.1>> {
^bb0(%arg0: tensor<? x !quant.uniform<u8:f32, 0.1>>, %arg1: tensor<? x i32>):
%0 = "tfl.resize_nearest_neighbor"(%arg0, %arg1) { align_corners = false } : (tensor<? x !quant.uniform<u8:f32, 0.1>>, tensor<? x i32>) -> tensor<? x !quant.uniform<u8:f32, 0.1>>
@@ -580,15 +574,6 @@
// -----
-// test invalid none type applied to a tensor type arg
-func @testUnidirectionalSequenceLstmWithInvalidNoneType(%arg0: tensor<? x f32>, %arg1: tensor<? x f32>, %arg2: none, %arg3: tensor<? x f32>, %arg4: tensor<? x f32>, %arg5: tensor<? x f32>, %arg6: tensor<? x f32>, %arg7: tensor<? x f32>, %arg8: tensor<? x f32>, %arg9: tensor<? x f32>, %arg10: tensor<? x f32>, %arg11: tensor<? x f32>, %arg12: tensor<? x f32>, %arg13: tensor<? x f32>, %arg14: tensor<? x f32>, %arg15: tensor<? x f32>, %arg16: tensor<? x f32>, %arg17: tensor<? x f32>, %arg18: tensor<? x f32>, %arg19: tensor<? x f32>, %arg20: tensor<? x f32>, %arg21: tensor<? x f32>, %arg22: tensor<? x f32>, %arg23: tensor<? x f32>) -> tensor<? x f32> {
- // expected-error @+1 {{'tfl.unidirectional_sequence_lstm' op operand #2 must be tensor of 32-bit float or 8-bit integer values}}
- %0 = "tfl.unidirectional_sequence_lstm"(%arg0, %arg1, %arg2, %arg3, %arg4, %arg5, %arg6, %arg7, %arg8, %arg9, %arg10, %arg11, %arg12, %arg13, %arg14, %arg15, %arg16, %arg17, %arg18, %arg19, %arg20, %arg21, %arg22, %arg23) {fused_activation_function = "NONE", time_major = false} : (tensor<?xf32>, tensor<? x f32>, none, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>) -> tensor<?xf32>
- return %0 : tensor<?xf32>
-}
-
-// -----
-
// test violation of projection weight and projection bias pred op trait
func @testUnidirectionalSequenceLstmWithInvalidNoneType(%arg0: tensor<? x f32>, %arg1: tensor<? x f32>, %arg2: tensor<? x f32>, %arg3: tensor<? x f32>, %arg4: tensor<? x f32>, %arg5: tensor<? x f32>, %arg6: tensor<? x f32>, %arg7: tensor<? x f32>, %arg8: tensor<? x f32>, %arg9: tensor<? x f32>, %arg10: tensor<? x f32>, %arg11: tensor<? x f32>, %arg12: tensor<? x f32>, %arg13: tensor<? x f32>, %arg14: tensor<? x f32>, %arg15: tensor<? x f32>, %arg16: none, %arg17: tensor<? x f32>, %arg18: tensor<? x f32>, %arg19: tensor<? x f32>, %arg20: tensor<? x f32>, %arg21: tensor<? x f32>, %arg22: tensor<? x f32>, %arg23: tensor<? x f32>) -> tensor<? x f32> {
// expected-error @+1 {{'tfl.unidirectional_sequence_lstm' op failed to verify that either projection weight must be specified or both projection weight and projection bias must not be specified}}
@@ -634,15 +619,6 @@
// -----
-// test invalid none type applied to a tensor type arg
-func @testLstmWithInvalidNoneType(%arg0: tensor<? x f32>, %arg1: tensor<? x f32>, %arg2: none, %arg3: tensor<? x f32>, %arg4: tensor<? x f32>, %arg5: tensor<? x f32>, %arg6: tensor<? x f32>, %arg7: tensor<? x f32>, %arg8: tensor<? x f32>, %arg9: tensor<? x f32>, %arg10: tensor<? x f32>, %arg11: tensor<? x f32>, %arg12: tensor<? x f32>, %arg13: tensor<? x f32>, %arg14: tensor<? x f32>, %arg15: tensor<? x f32>, %arg16: tensor<? x f32>, %arg17: tensor<? x f32>, %arg18: tensor<? x f32>, %arg19: tensor<? x f32>, %arg20: tensor<? x f32>, %arg21: tensor<? x f32>, %arg22: tensor<? x f32>, %arg23: tensor<? x f32>) -> tensor<? x f32> {
- // expected-error @+1 {{'tfl.lstm' op operand #2 must be tensor of 32-bit float or 8-bit integer values}}
- %0 = "tfl.lstm"(%arg0, %arg1, %arg2, %arg3, %arg4, %arg5, %arg6, %arg7, %arg8, %arg9, %arg10, %arg11, %arg12, %arg13, %arg14, %arg15, %arg16, %arg17, %arg18, %arg19, %arg20, %arg21, %arg22, %arg23) ({}) {fused_activation_function = "NONE"} : (tensor<?xf32>, tensor<? x f32>, none, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>, tensor<?xf32>) -> tensor<?xf32>
- return %0 : tensor<?xf32>
-}
-
-// -----
-
// test violation of projection weight and projection bias pred op trait
func @testLstmWithInvalidNoneType(%arg0: tensor<? x f32>, %arg1: tensor<? x f32>, %arg2: tensor<? x f32>, %arg3: tensor<? x f32>, %arg4: tensor<? x f32>, %arg5: tensor<? x f32>, %arg6: tensor<? x f32>, %arg7: tensor<? x f32>, %arg8: tensor<? x f32>, %arg9: tensor<? x f32>, %arg10: tensor<? x f32>, %arg11: tensor<? x f32>, %arg12: tensor<? x f32>, %arg13: tensor<? x f32>, %arg14: tensor<? x f32>, %arg15: tensor<? x f32>, %arg16: none, %arg17: tensor<? x f32>, %arg18: tensor<? x f32>, %arg19: tensor<? x f32>, %arg20: tensor<? x f32>, %arg21: tensor<? x f32>, %arg22: tensor<? x f32>, %arg23: tensor<? x f32>) -> tensor<? x f32> {
// expected-error @+1 {{'tfl.lstm' op failed to verify that either projection weight must be specified or both projection weight and projection bias must not be specified}}
@@ -1031,14 +1007,6 @@
// -----
-func @testConcatInvalidStorageType(%arg0: tensor<1x2x!quant.uniform<i9:f32, 0.1:128>>, %arg1: tensor<1x2x!quant.uniform<i8:f32, 0.1:128>>) -> tensor<2x2x!quant.uniform<i8:f32, 0.1:128>> {
- // expected-error @+1 {{'tfl.concatenation' op operand #0 must be tensor of 32-bit float or 64-bit integer or 32-bit integer or 16-bit integer or 8-bit integer or QI8 type or QUI8 type or TFLite uint8 type values}}
- %0 = "tfl.concatenation"(%arg0, %arg1) {axis = 0 : i32, fused_activation_function = "NONE"} : (tensor<1x2x!quant.uniform<i9:f32, 0.1:128>>, tensor<1x2x!quant.uniform<i8:f32, 0.1:128>>) -> tensor<2x2x!quant.uniform<i8:f32, 0.1:128>>
- return %0 : tensor<2x2x!quant.uniform<i8:f32, 0.1:128>>
-}
-
-// -----
-
func @testConcatInvalidAxis(%arg0: tensor<1x2xi32>, %arg1: tensor<1x2xi32>) -> tensor<2x2xi32> {
// expected-error @+1 {{'tfl.concatenation' op concatenation dimension must be in [-rank, rank)}}
%0 = "tfl.concatenation"(%arg0, %arg1) {axis = 2 : i32, fused_activation_function = "NONE"} : (tensor<1x2xi32>, tensor<1x2xi32>) -> tensor<2x2xi32>
@@ -1385,14 +1353,6 @@
// -----
-func @testEmbeddingLookupInvalidResultType(%arg0 : tensor<?xi32>, %arg1 : tensor<?xf32>) -> tensor<?xi32> {
- // expected-error @+1 {{'tfl.embedding_lookup' op result #0 must be tensor of 32-bit float or 8-bit integer or TFLite uint8 type values}}
- %0 = "tfl.embedding_lookup"(%arg0, %arg1) : (tensor<?xi32>,tensor<?xf32>) -> tensor<?xi32>
- return %0 : tensor<?xi32>
-}
-
-// -----
-
func @testEmbeddingLookupValueAndResultElementTypeTraitFailed(%arg0 : tensor<?xi32>, %arg1 : tensor<?xi8>) -> tensor<?xf32> {
// expected-error @+1 {{'tfl.embedding_lookup' op failed to verify that value and output must have same element type}}
%0 = "tfl.embedding_lookup"(%arg0, %arg1) : (tensor<?xi32>,tensor<?xi8>) -> tensor<?xf32>
@@ -1417,13 +1377,6 @@
// -----
-func @testSvdfUnsupportedType(%arg0: tensor<? x i32>, %arg1: tensor<? x i32>, %arg2: tensor<? x i32>, %arg3: tensor<? x i32>, %arg4: tensor<? x i32>) -> tensor<? x f32> {
- // expected-error @+1 {{'tfl.svdf' op operand #0 must be tensor of 32-bit float or 8-bit integer values}}
- %0 = "tfl.svdf"(%arg0, %arg1, %arg2, %arg3, %arg4) {fused_activation_function = "NONE", rank = 2 : i32} : (tensor<?xi32>, tensor<?xi32>, tensor<?xi32>, tensor<?xi32>, tensor<?xi32>) -> tensor<?xf32>
- return %0 : tensor<?xf32>
-}
-// -----
-
// CHECK-LABEL: testDepthToSpace
func @testDepthToSpaceF32(%arg0: tensor<1x1x1x4xf32>) -> tensor<1x2x2x1xf32> {
// CHECK: %[[ARG:.*]]: tensor<1x1x1x4xf32>
diff --git a/tensorflow/compiler/mlir/lite/tests/prepare-tf.mlir b/tensorflow/compiler/mlir/lite/tests/prepare-tf.mlir
index ccd3cca..5793c84 100644
--- a/tensorflow/compiler/mlir/lite/tests/prepare-tf.mlir
+++ b/tensorflow/compiler/mlir/lite/tests/prepare-tf.mlir
@@ -80,8 +80,8 @@
// offset - mean * scale * rsqrt(variance + epsilon)
// CHECK: %[[ADD2:.*]] = "tf.Add"(%[[MUL2]], %[[SUB]])
-// CHECK: %[[BATCHNORM1:.*]]:5 = "tf.FusedBatchNorm"(%[[ADD2]], %[[ARG1]], %[[ARG2]], %[[ARG3]], %[[ARG4]])
-// CHECK: {{.*}} = "tf.FusedBatchNorm"(%[[BATCHNORM1]]#0, %[[ARG1]], %[[ARG2]], %[[ARG3]], %[[ARG4]])
+// CHECK: %[[BATCHNORM1_a:[^,]+]], {{.*}} = "tf.FusedBatchNorm"(%[[ADD2]], %[[ARG1]], %[[ARG2]], %[[ARG3]], %[[ARG4]])
+// CHECK: "tf.FusedBatchNorm"(%[[BATCHNORM1_a]], %[[ARG1]], %[[ARG2]], %[[ARG3]], %[[ARG4]])
}
func @fusedBatchNormV3(tensor<8x8x8x8xf32>, tensor<8xf32>, tensor<8xf32>, tensor<8xf32>, tensor<8xf32>) -> (tensor<8x8x8x8xf32>, tensor<8xf32>) {
@@ -113,8 +113,8 @@
// offset - mean * scale * rsqrt(variance + epsilon)
// CHECK: %[[ADD2:.*]] = "tf.Add"(%[[MUL2]], %[[SUB]])
-// CHECK: %[[BATCHNORM1:.*]]:6 = "tf.FusedBatchNormV3"(%[[ADD2]], %[[ARG1]], %[[ARG2]], %[[ARG3]], %[[ARG4]])
-// CHECK: %[[BATCHNORM2:.*]]:6 = "tf.FusedBatchNormV3"(%[[BATCHNORM1]]#0, %[[ARG1]], %[[ARG2]], %[[ARG3]], %[[ARG4]])
+// CHECK: %[[BATCHNORM1_a:[^,]+]], {{.*}} = "tf.FusedBatchNormV3"(%[[ADD2]], %[[ARG1]], %[[ARG2]], %[[ARG3]], %[[ARG4]])
+// CHECK: "tf.FusedBatchNormV3"(%[[BATCHNORM1_a]], %[[ARG1]], %[[ARG2]], %[[ARG3]], %[[ARG4]])
}
// CHECK-LABEL: fakeQuantPerChannelForActivation
diff --git a/tensorflow/compiler/mlir/lite/tf_tfl_passes.cc b/tensorflow/compiler/mlir/lite/tf_tfl_passes.cc
index b330b72..58ff9ce 100644
--- a/tensorflow/compiler/mlir/lite/tf_tfl_passes.cc
+++ b/tensorflow/compiler/mlir/lite/tf_tfl_passes.cc
@@ -106,6 +106,10 @@
pass_manager->addNestedPass<mlir::FuncOp>(mlir::createCanonicalizerPass());
pass_manager->addNestedPass<mlir::FuncOp>(mlir::createCSEPass());
+ if (pass_config.inline_functions) {
+ pass_manager->addPass(mlir::createInlinerPass());
+ }
+
// The below passes only make sense if Builtin TFLite ops are enabled
// for emission.
if (pass_config.emit_builtin_tflite_ops) {
diff --git a/tensorflow/compiler/mlir/lite/tf_tfl_translate.cc b/tensorflow/compiler/mlir/lite/tf_tfl_translate.cc
index 535380a..a6c29f8 100644
--- a/tensorflow/compiler/mlir/lite/tf_tfl_translate.cc
+++ b/tensorflow/compiler/mlir/lite/tf_tfl_translate.cc
@@ -179,6 +179,7 @@
mlir::TFL::PassConfig pass_config(quant_specs);
pass_config.emit_builtin_tflite_ops = emit_builtin_tflite_ops;
pass_config.lower_tensor_list_ops = lower_tensor_list_ops;
+ pass_config.inline_functions = inline_functions;
tensorflow::AddTFToTFLConversionPasses(pass_config, &pm);
diff --git a/tensorflow/compiler/mlir/lite/tf_tfl_translate_cl.cc b/tensorflow/compiler/mlir/lite/tf_tfl_translate_cl.cc
index b7de1ac..b7fa624 100644
--- a/tensorflow/compiler/mlir/lite/tf_tfl_translate_cl.cc
+++ b/tensorflow/compiler/mlir/lite/tf_tfl_translate_cl.cc
@@ -76,3 +76,8 @@
llvm::cl::desc("<stats file>"),
llvm::cl::value_desc("filename"),
llvm::cl::init(""));
+
+// NOLINTNEXTLINE
+opt<bool> inline_functions(
+ "inline", llvm::cl::desc("Inline function calls within the main function "
+ "before legalization to TFLite."));
diff --git a/tensorflow/compiler/mlir/lite/tf_tfl_translate_cl.h b/tensorflow/compiler/mlir/lite/tf_tfl_translate_cl.h
index b556982..faa7486 100644
--- a/tensorflow/compiler/mlir/lite/tf_tfl_translate_cl.h
+++ b/tensorflow/compiler/mlir/lite/tf_tfl_translate_cl.h
@@ -35,6 +35,7 @@
extern llvm::cl::opt<bool> use_splatted_constant;
extern llvm::cl::opt<bool> input_mlir;
extern llvm::cl::opt<bool> output_mlir;
+extern llvm::cl::opt<bool> inline_functions;
extern llvm::cl::list<std::string> custom_opdefs;
extern llvm::cl::opt<bool> emit_quant_adaptor_ops;
extern llvm::cl::opt<std::string> quant_stats_file_name;
diff --git a/tensorflow/compiler/mlir/lite/transforms/lower_static_tensor_list.cc b/tensorflow/compiler/mlir/lite/transforms/lower_static_tensor_list.cc
index b355fc7..bf0e716 100644
--- a/tensorflow/compiler/mlir/lite/transforms/lower_static_tensor_list.cc
+++ b/tensorflow/compiler/mlir/lite/transforms/lower_static_tensor_list.cc
@@ -299,9 +299,9 @@
new_element_shape_values.push_back(dim_value);
}
- auto attr = DenseIntElementsAttr::get<int32_t>(
- element_shape->getType().cast<ShapedType>(),
- new_element_shape_values);
+ auto attr =
+ DenseIntElementsAttr::get(element_shape->getType().cast<ShapedType>(),
+ new_element_shape_values);
auto new_element_shape = rewriter.create<ConstantOp>(
op.getLoc(), element_shape->getType(), attr);
element_shape = new_element_shape;
diff --git a/tensorflow/compiler/mlir/tensorflow/ir/tf_executor.h b/tensorflow/compiler/mlir/tensorflow/ir/tf_executor.h
index bf4257d..8df3ecb 100644
--- a/tensorflow/compiler/mlir/tensorflow/ir/tf_executor.h
+++ b/tensorflow/compiler/mlir/tensorflow/ir/tf_executor.h
@@ -26,7 +26,7 @@
#include "mlir/IR/Builders.h" // TF:local_config_mlir
#include "mlir/IR/Dialect.h" // TF:local_config_mlir
#include "mlir/IR/Matchers.h" // TF:local_config_mlir
-#include "mlir/IR/OpDefinition.h" // TF:local_config_mlir
+#include "mlir/IR/OpImplementation.h" // TF:local_config_mlir
#include "mlir/IR/StandardTypes.h" // TF:local_config_mlir
#include "tensorflow/compiler/mlir/tensorflow/ir/tf_types.h"
diff --git a/tensorflow/compiler/mlir/tensorflow/ir/tf_generated_ops.td b/tensorflow/compiler/mlir/tensorflow/ir/tf_generated_ops.td
index 7cd076b..1c4a8a7 100644
--- a/tensorflow/compiler/mlir/tensorflow/ir/tf_generated_ops.td
+++ b/tensorflow/compiler/mlir/tensorflow/ir/tf_generated_ops.td
@@ -2199,6 +2199,33 @@
let hasCanonicalizer = 1;
}
+def TF_IsFiniteOp : TF_Op<"IsFinite", [NoSideEffect, SameOperandsAndResultShape]> {
+ let summary = "Returns which elements of x are finite.";
+
+ let description = [{
+@compatibility(numpy)
+Equivalent to np.isfinite
+@end_compatibility
+
+Example:
+
+```python
+x = tf.constant([5.0, 4.8, 6.8, np.inf, np.nan])
+tf.math.is_finite(x) ==> [True, True, True, False, False]
+```
+ }];
+
+ let arguments = (ins
+ TF_FpTensor:$x
+ );
+
+ let results = (outs
+ I1Tensor:$y
+ );
+
+ TF_DerivedOperandTypeAttr T = TF_DerivedOperandTypeAttr<0>;
+}
+
def TF_L2LossOp : TF_Op<"L2Loss", [NoSideEffect]> {
let summary = "L2 Loss.";
@@ -2441,6 +2468,68 @@
);
}
+def TF_LookupTableFindV2Op : TF_Op<"LookupTableFindV2", []> {
+ let summary = "Looks up keys in a table, outputs the corresponding values.";
+
+ let description = [{
+The tensor `keys` must of the same type as the keys of the table.
+The output `values` is of the type of the table values.
+
+The scalar `default_value` is the value output for keys not present in the
+table. It must also be of the same type as the table values.
+ }];
+
+ let arguments = (ins
+ TF_ResourceTensor:$table_handle,
+ TF_Tensor:$keys,
+ TF_Tensor:$default_value
+ );
+
+ let results = (outs
+ TF_Tensor:$values
+ );
+
+ TF_DerivedOperandTypeAttr Tin = TF_DerivedOperandTypeAttr<1>;
+ TF_DerivedOperandTypeAttr Tout = TF_DerivedOperandTypeAttr<2>;
+}
+
+def TF_LookupTableImportV2Op : TF_Op<"LookupTableImportV2", []> {
+ let summary = [{
+Replaces the contents of the table with the specified keys and values.
+ }];
+
+ let description = [{
+The tensor `keys` must be of the same type as the keys of the table.
+The tensor `values` must be of the type of the table values.
+ }];
+
+ let arguments = (ins
+ TF_ResourceTensor:$table_handle,
+ TF_Tensor:$keys,
+ TF_Tensor:$values
+ );
+
+ let results = (outs);
+
+ TF_DerivedOperandTypeAttr Tin = TF_DerivedOperandTypeAttr<1>;
+ TF_DerivedOperandTypeAttr Tout = TF_DerivedOperandTypeAttr<2>;
+}
+
+def TF_LookupTableSizeV2Op : TF_Op<"LookupTableSizeV2", []> {
+ let summary = "Computes the number of elements in the given table.";
+
+ let description = [{
+ }];
+
+ let arguments = (ins
+ TF_ResourceTensor:$table_handle
+ );
+
+ let results = (outs
+ I64Tensor:$size
+ );
+}
+
def TF_MatMulOp : TF_Op<"MatMul", [NoSideEffect]> {
let summary = [{
Multiply the matrix "a" by the matrix "b".
diff --git a/tensorflow/compiler/mlir/tensorflow/ir/tf_op_base.td b/tensorflow/compiler/mlir/tensorflow/ir/tf_op_base.td
index e324b70..33adf8a 100644
--- a/tensorflow/compiler/mlir/tensorflow/ir/tf_op_base.td
+++ b/tensorflow/compiler/mlir/tensorflow/ir/tf_op_base.td
@@ -263,6 +263,30 @@
"mlir::ResultElementTypeIterator(values.end())};"
>;
+// A derived attribute that returns the shape of the first result type.
+def TF_DerivedResultShapeAttr : DerivedAttr<"ShapedType",
+ "return (*getOperation()->result_type_begin()).cast<ShapedType>();">;
+
+// A derived attribute that returns the element type of the tensor held by a
+// named resource-type operand or result.
+class TF_DerivedOperandOrResultHandleTypeAttr<string name> : DerivedTypeAttr<
+ "auto resource_type =\n"
+ " mlir::getElementTypeOrSelf(this->" # name # "())\n"
+ " .cast<TF::ResourceType>();\n"
+ "assert(!resource_type.getSubtypes().empty() && \"unknown type\");\n"
+ "return mlir::getElementTypeOrSelf(*resource_type.getSubtypes().begin());">;
+
+
+// A derived attribute that returns the shape of the tensor held by a named
+// resource-type operand or result.
+class TF_DerivedOperandOrResultHandleShapeAttr<string name> : DerivedAttr<
+ "ShapedType",
+ "auto resource_type =\n"
+ " mlir::getElementTypeOrSelf(this->" # name # "())\n"
+ " .cast<TF::ResourceType>();\n"
+ "assert(!resource_type.getSubtypes().empty() && \"unknown shape\");\n"
+ "return resource_type.getSubtypes().begin()->cast<ShapedType>();">;
+
def TF_IntTypeAttr : TypeAttrBase<"IntegerType", "integer type"> {
let returnType = "Type";
}
diff --git a/tensorflow/compiler/mlir/tensorflow/ir/tf_ops.h b/tensorflow/compiler/mlir/tensorflow/ir/tf_ops.h
index 7aa7f67..47004e4 100644
--- a/tensorflow/compiler/mlir/tensorflow/ir/tf_ops.h
+++ b/tensorflow/compiler/mlir/tensorflow/ir/tf_ops.h
@@ -26,7 +26,7 @@
#include "mlir/IR/Dialect.h" // TF:local_config_mlir
#include "mlir/IR/Matchers.h" // TF:local_config_mlir
#include "mlir/IR/Module.h" // TF:local_config_mlir
-#include "mlir/IR/OpDefinition.h" // TF:local_config_mlir
+#include "mlir/IR/OpImplementation.h" // TF:local_config_mlir
#include "mlir/IR/StandardTypes.h" // TF:local_config_mlir
#include "mlir/IR/TypeUtilities.h" // TF:local_config_mlir
#include "tensorflow/compiler/mlir/tensorflow/ir/tf_traits.h"
diff --git a/tensorflow/compiler/mlir/tensorflow/ir/tf_ops.td b/tensorflow/compiler/mlir/tensorflow/ir/tf_ops.td
index df42edb..b22c16f 100644
--- a/tensorflow/compiler/mlir/tensorflow/ir/tf_ops.td
+++ b/tensorflow/compiler/mlir/tensorflow/ir/tf_ops.td
@@ -33,10 +33,6 @@
include "mlir/Analysis/CallInterfaces.td"
#endif // MLIR_CALLINTERFACES
-// A derived attribute that returns the shape of the first result type.
-def TF_DerivedResultShapeAttr : DerivedAttr<"ShapedType",
- "return (*getOperation()->result_type_begin()).cast<ShapedType>();">;
-
class TF_TensorListInitOp<string mnemonic> : TF_Op<mnemonic, [NoSideEffect]> {
let results = (outs
TF_VariantTensor:$handle
@@ -404,4 +400,35 @@
let results = (outs);
}
+
+def TF_VarHandleOp : TF_Op<"VarHandleOp", []> {
+ let summary = "Creates a handle to a Variable resource from its name.";
+
+ let description = [{
+container: the container this variable is placed in.
+shared_name: the name by which this variable is referred to.
+dtype and shape: attributes representing the data type and shape held in the
+ variable.
+
+Example:
+ resource_variable_ops.var_handle_op(
+ dtype=dtypes.int32, shape=[8, 16], container="foo", shared_name="bar")
+ returns a handle for a variable with name "bar" in container "foo", and the
+ variable holds a tensor of shape [8, 16] and dtype int32.
+ }];
+
+ let arguments = (ins
+ DefaultValuedAttr<StrAttr, "">:$container,
+ DefaultValuedAttr<StrAttr, "">:$shared_name
+ );
+
+ let results = (outs
+ TF_ResourceTensor:$resource
+ );
+
+ TF_DerivedOperandOrResultHandleTypeAttr dtype =
+ TF_DerivedOperandOrResultHandleTypeAttr<"resource">;
+ TF_DerivedOperandOrResultHandleShapeAttr shape =
+ TF_DerivedOperandOrResultHandleShapeAttr<"resource">;
+}
#endif // TF_OPS
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/breakup-islands.mlir b/tensorflow/compiler/mlir/tensorflow/tests/breakup-islands.mlir
index c6a23fb..67c3982 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/breakup-islands.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/breakup-islands.mlir
@@ -17,11 +17,11 @@
// CHECK-LABEL: func @multiple_return
// CHECK: %[[GRAPH:.*]]:2 = tf_executor.graph {
-// CHECK: %[[ADD1:.*]]:2 = tf_executor.island wraps "tf.Add"(%arg0, %arg1)
-// CHECK: %[[ADD2:.*]]:2 = tf_executor.island(%[[ADD1]]#1) wraps "tf.Add"(%[[ADD1]]#0, %arg1)
-// CHECK: tf_executor.fetch %[[ADD1]]#0, %[[ADD2]]#0 :
+// CHECK: %[[ADD1:.*]], %[[ADD1_control:.*]] = tf_executor.island wraps "tf.Add"(%arg0, %arg1)
+// CHECK: %[[ADD2:.*]], %[[ADD2_control:.*]] = tf_executor.island(%[[ADD1_control]]) wraps "tf.Add"(%[[ADD1]], %arg1)
+// CHECK: tf_executor.fetch %[[ADD1]], %[[ADD2]] :
// CHECK: }
-// CHECK: return %[[GRAPH]]#0, %[[GRAPH]]#1
+// CHECK: return %[[GRAPH]]#0, %[[GRAPH]]#1
// CHECK: }
func @multiple_islands(%arg0: tensor<*xi32>, %arg1: tensor<i32>) -> (tensor<*xi32>, tensor<*xi32>) {
@@ -48,13 +48,13 @@
// CHECK-LABEL: func @multiple_islands
// CHECK: %[[GRAPH:.*]]:2 = tf_executor.graph {
-// CHECK: %[[ADD1:.*]]:2 = tf_executor.island wraps "tf.Add"(%arg0, %arg1)
-// CHECK: %[[ADD2:.*]]:2 = tf_executor.island(%[[ADD1]]#1) wraps "tf.Add"(%[[ADD1]]#0, %arg1)
-// CHECK: %[[SUB1:.*]]:2 = tf_executor.island(%[[ADD2]]#1) wraps "tf.Sub"(%arg0, %arg1)
-// CHECK: %[[MUL:.*]]:2 = tf_executor.island(%[[SUB1]]#1) wraps "tf.Mul"(%[[SUB1]]#0, %arg1)
-// CHECK: %[[SUB2:.*]]:2 = tf_executor.island(%[[ADD2]]#1, %[[MUL]]#1) wraps "tf.Sub"(%[[ADD1]]#0, %[[SUB1]]#0)
-// CHECK: %[[PRINT:.*]]:2 = tf_executor.island(%[[SUB2]]#1) wraps "tf.Print"(%[[SUB2]]#0) {message = "sub result"}
-// CHECK: tf_executor.fetch %[[ADD2]]#0, %[[MUL]]#0, %[[PRINT]]#1 :
+// CHECK: %[[ADD1:.*]], %[[ADD1_control:.*]] = tf_executor.island wraps "tf.Add"(%arg0, %arg1)
+// CHECK: %[[ADD2:.*]], %[[ADD2_control:.*]] = tf_executor.island(%[[ADD1_control]]) wraps "tf.Add"(%[[ADD1]], %arg1)
+// CHECK: %[[SUB1:.*]], %[[SUB1_control:.*]] = tf_executor.island(%[[ADD2_control]]) wraps "tf.Sub"(%arg0, %arg1)
+// CHECK: %[[MUL:.*]], %[[MUL_control:.*]] = tf_executor.island(%[[SUB1_control]]) wraps "tf.Mul"(%[[SUB1]], %arg1)
+// CHECK: %[[SUB2:.*]], %[[SUB2_control:.*]] = tf_executor.island(%[[ADD2_control]], %[[MUL_control]]) wraps "tf.Sub"(%[[ADD1]], %[[SUB1]])
+// CHECK: %[[PRINT:.*]], %[[PRINT_control:.*]] = tf_executor.island(%[[SUB2_control]]) wraps "tf.Print"(%[[SUB2]]) {message = "sub result"}
+// CHECK: tf_executor.fetch %[[ADD2]], %[[MUL]], %[[PRINT_control]] :
// CHECK: }
// CHECK: return %[[GRAPH]]#0, %[[GRAPH]]#1
@@ -73,10 +73,10 @@
// CHECK-LABEL: func @dangling_print
// CHECK: %[[GRAPH:.*]]:2 = tf_executor.graph {
-// CHECK: %[[ADD1:.*]]:2 = tf_executor.island wraps "tf.Add"(%arg0, %arg1)
-// CHECK: %[[ADD2:.*]]:2 = tf_executor.island(%[[ADD1]]#1) wraps "tf.Add"(%1#0, %arg1)
-// CHECK: %[[PRINT:.*]]:2 = tf_executor.island(%[[ADD2]]#1) wraps "tf.Print"(%2#0) {message = "add result"}
-// CHECK: tf_executor.fetch %[[ADD1]]#0, %[[ADD2]]#0, %[[PRINT]]#1 :
+// CHECK: %[[ADD1:.*]], %[[ADD1_control:.*]] = tf_executor.island wraps "tf.Add"(%arg0, %arg1)
+// CHECK: %[[ADD2:.*]], %[[ADD2_control:.*]] = tf_executor.island(%[[ADD1_control]]) wraps "tf.Add"(%[[ADD1_control:.*]], %arg1)
+// CHECK: %[[PRINT:.*]], %[[PRINT_control:.*]] = tf_executor.island(%[[ADD2_control]]) wraps "tf.Print"(%[[ADD2_control:.*]]) {message = "add result"}
+// CHECK: tf_executor.fetch %[[ADD1]], %[[ADD2]], %[[PRINT_control]] :
// CHECK: }
// CHECK: return %[[GRAPH]]#0, %[[GRAPH]]#1
@@ -102,14 +102,14 @@
// CHECK-LABEL: func @switch_and_merge(%arg0: tensor<*xi32>, %arg1: tensor<i32>) -> (tensor<*xi32>, tensor<i32>) {
// CHECK: %[[GRAPH:.*]]:2 = tf_executor.graph {
-// CHECK: %[[ADD1:.*]]:2 = tf_executor.island wraps "tf.Add"(%arg0, %arg1)
-// CHECK: %[[LESS:.*]]:2 = tf_executor.island(%[[ADD1]]#1) wraps "tf.Less"(%arg1, %arg1)
-// CHECK: %[[PRINT1:.*]]:2 = tf_executor.island(%[[LESS]]#1) wraps "tf.Print"(%[[ADD1]]#0) {message = "add result 1"}
-// CHECK: %[[SWITCH:.*]]:3 = tf_executor.Switch %[[ADD1]]#0, %[[LESS]]#0, %[[PRINT1]]#1
-// CHECK: %[[ADD2:.*]]:2 = tf_executor.island wraps "tf.Add"(%[[SWITCH]]#0, %arg1)
-// CHECK: %[[PRINT2:.*]]:2 = tf_executor.island(%[[ADD2]]#1) wraps "tf.Print"(%[[ADD2]]#0) {message = "add result 2"}
-// CHECK: %[[MERGE:.*]]:3 = tf_executor.Merge %[[ADD2]]#0, %[[SWITCH]]#1, %[[PRINT2]]#1
-// CHECK: tf_executor.fetch %[[MERGE]]#0, %[[MERGE]]#1
+// CHECK: %[[ADD1:.*]], %[[ADD1_control:.*]] = tf_executor.island wraps "tf.Add"(%arg0, %arg1)
+// CHECK: %[[LESS:.*]], %[[LESS_control:.*]] = tf_executor.island(%[[ADD1_control]]) wraps "tf.Less"(%arg1, %arg1)
+// CHECK: %[[PRINT1:.*]], %[[PRINT1_control:.*]] = tf_executor.island(%[[LESS_control]]) wraps "tf.Print"(%[[ADD1]]) {message = "add result 1"}
+// CHECK: %[[SWITCH_false:.*]], %[[SWITCH_true:.*]], {{.*}} = tf_executor.Switch %[[ADD1]], %[[LESS]], %[[PRINT1_control]]
+// CHECK: %[[ADD2:.*]], %[[ADD2_control:.*]] = tf_executor.island wraps "tf.Add"(%[[SWITCH_false]], %arg1)
+// CHECK: %[[PRINT2:.*]], %[[PRINT2_control:.*]] = tf_executor.island(%[[ADD2_control]]) wraps "tf.Print"(%[[ADD2]]) {message = "add result 2"}
+// CHECK: %[[MERGE:.*]], %[[MERGE_index:.*]], %{{.*}} = tf_executor.Merge %[[ADD2]], %[[SWITCH_true]], %[[PRINT2_control]]
+// CHECK: tf_executor.fetch %[[MERGE]], %[[MERGE_index]]
// CHECK: }
// CHECK: return %[[GRAPH]]#0, %[[GRAPH]]#1
@@ -128,10 +128,10 @@
// CHECK-LABEL: func @control_flow_plumbing
// CHECK: %[[GRAPH:.*]] = tf_executor.graph {
-// CHECK: %[[PRINT:.*]]:2 = tf_executor.island wraps "tf.Print"(%arg0) {message = "Random Print"}
-// CHECK: %[[ADD1:.*]]:2 = tf_executor.island(%[[PRINT]]#1) wraps "tf.Add"(%arg0, %arg1)
-// CHECK: %[[ADD2:.*]]:2 = tf_executor.island(%[[ADD1]]#1) wraps "tf.Add"(%2#0, %arg1)
-// CHECK: tf_executor.fetch %[[ADD2]]#0 : tensor<*xi32>
+// CHECK: %[[PRINT:.*]], %[[PRINT_control:.*]] = tf_executor.island wraps "tf.Print"(%arg0) {message = "Random Print"}
+// CHECK: %[[ADD1:.*]], %[[ADD1_control:.*]] = tf_executor.island(%[[PRINT_control]]) wraps "tf.Add"(%arg0, %arg1)
+// CHECK: %[[ADD2:.*]], %[[ADD2_control:.*]] = tf_executor.island(%[[ADD1_control]]) wraps "tf.Add"(%[[ADD1]], %arg1)
+// CHECK: tf_executor.fetch %[[ADD2]] : tensor<*xi32>
// CHECK: }
// CHECK: return %[[GRAPH]] : tensor<*xi32>
@@ -149,7 +149,7 @@
// CHECK-LABEL: func @fetching_arg
// CHECK: tf_executor.graph {
-// CHECK: %[[ADD1:.*]]:2 = tf_executor.island wraps "tf.Add"(%arg0, %arg0)
-// CHECK: %[[ADD2:.*]]:2 = tf_executor.island(%[[ADD1]]#1) wraps "tf.Add"(%[[ADD1]]#0, %arg0)
-// CHECK: tf_executor.fetch %[[ADD2]]#1 : !tf_executor.control
+// CHECK: %[[ADD1:.*]], %[[ADD1_control:.*]] = tf_executor.island wraps "tf.Add"(%arg0, %arg0)
+// CHECK: %[[ADD2:.*]], %[[ADD2_control:.*]] = tf_executor.island(%[[ADD1_control]]) wraps "tf.Add"(%[[ADD1]], %arg0)
+// CHECK: tf_executor.fetch %[[ADD2_control]] : !tf_executor.control
// CHECK: }
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/cluster_formation.mlir b/tensorflow/compiler/mlir/tensorflow/tests/cluster_formation.mlir
index 0356070..8a5375d 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/cluster_formation.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/cluster_formation.mlir
@@ -99,7 +99,7 @@
// CHECK-SAME: (%[[ARG_0:[a-z0-9]*]]: tensor<?xi32>)
func @argliveinotherislands(%arg0: tensor<?xi32>) -> tensor<?xi32> {
%0 = tf_executor.graph {
- // CHECK: %[[OTHER_ISLAND_OUTPUT:[0-9]*]]:2 = tf_executor.island wraps "tf.D"
+ // CHECK: %[[OTHER_ISLAND_OUTPUT:[a-z0-9]*]], %{{.*}} = tf_executor.island wraps "tf.D"
%1:2 = tf_executor.island wraps "tf.D"(%arg0) : (tensor<?xi32>) -> tensor<?xi32>
%2:2 = tf_executor.island {
@@ -107,7 +107,7 @@
// CHECK: %[[A_OUTPUT:[0-9]*]] = "tf.A"(%[[ARG_0]]) : (tensor<?xi32>) -> tensor<?xi32>
%3 = "tf.A"(%arg0) {device = "tpu0"} : (tensor<?xi32>) -> tensor<?xi32>
- // CHECK: %[[B_OUTPUT:[0-9]*]] = "tf.B"(%[[A_OUTPUT]], %[[OTHER_ISLAND_OUTPUT]]#0) : (tensor<?xi32>, tensor<?xi32>) -> tensor<?xi32>
+ // CHECK: %[[B_OUTPUT:[0-9]*]] = "tf.B"(%[[A_OUTPUT]], %[[OTHER_ISLAND_OUTPUT]]) : (tensor<?xi32>, tensor<?xi32>) -> tensor<?xi32>
%4 = "tf.B"(%3, %1#0) {device = "tpu0"} : (tensor<?xi32>, tensor<?xi32>) -> tensor<?xi32>
// CHECK: tf_device.return %[[B_OUTPUT]]
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/cluster_outlining.mlir b/tensorflow/compiler/mlir/tensorflow/tests/cluster_outlining.mlir
index 519bea5..5f11ad2 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/cluster_outlining.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/cluster_outlining.mlir
@@ -91,12 +91,12 @@
func @multiplelaunches(%arg0: tensor<?xi32>) -> tensor<?xi32> {
%0 = tf_executor.graph {
%1:2 = tf_executor.island wraps
- // CHECK: %[[A_OUTPUT:[0-9]*]]:2 = {{.*}} "tf_device.launch_func"() {device = "tpu0", func = @tpu0_func}
+ // CHECK: %[[A_OUTPUT:[a-z0-9]*]], %{{.*}} = {{.*}} "tf_device.launch_func"() {device = "tpu0", func = @tpu0_func}
"tf_device.launch"() ( {
%3 = "tf.A"() : () -> tensor<?xi32>
tf_device.return %3 : tensor<?xi32>
}) {device = "tpu0"} : () -> tensor<?xi32>
- // CHECK: tf_executor.fetch %[[A_OUTPUT]]#0
+ // CHECK: tf_executor.fetch %[[A_OUTPUT]]
tf_executor.fetch %1#0 : tensor<?xi32>
}
return %0 : tensor<?xi32>
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/control_to_executor_dialect.mlir b/tensorflow/compiler/mlir/tensorflow/tests/control_to_executor_dialect.mlir
index 25adff9..1d735e0 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/control_to_executor_dialect.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/control_to_executor_dialect.mlir
@@ -11,9 +11,9 @@
}
// CHECK-NEXT: %[[GRAPH:[0-9]*]] = tf_executor.graph {
-// CHECK-NEXT: %[[IDENTITY:[0-9]*]]:2 = tf_executor.island wraps "tf.Identity"(%[[ARG0]]) : (tensor<*xf32>) -> tensor<*xf32>
-// CHECK-NEXT: %[[ADD:[0-9]*]]:2 = tf_executor.island(%[[IDENTITY]]#1) wraps "tf.Add"(%[[ARG0]], %[[ARG0]]) : (tensor<*xf32>, tensor<*xf32>) -> tensor<*xf32>
-// CHECK-NEXT: tf_executor.fetch %[[ADD]]#0 : tensor<*xf32>
+// CHECK-NEXT: %[[IDENTITY:.*]], %[[IDENTITY_control:.*]] = tf_executor.island wraps "tf.Identity"(%[[ARG0]]) : (tensor<*xf32>) -> tensor<*xf32>
+// CHECK-NEXT: %[[ADD:.*]], %[[ADD_control:.*]] = tf_executor.island(%[[IDENTITY_control]]) wraps "tf.Add"(%[[ARG0]], %[[ARG0]]) : (tensor<*xf32>, tensor<*xf32>) -> tensor<*xf32>
+// CHECK-NEXT: tf_executor.fetch %[[ADD]] : tensor<*xf32>
// CHECK-NEXT: }
// CHECK-NEXT: return %[[GRAPH]] : tensor<*xf32>
@@ -39,21 +39,21 @@
}
// CHECK-NEXT: tf_executor.graph {
-// CHECK-NEXT: %[[CONST:[0-9]*]]:2 = tf_executor.island wraps "tf.Const"() {device = "", dtype = "tfdtype$DT_INT32", name = "Const", value = dense<1> : tensor<i32>} : () -> tensor<i32>
-// CHECK-NEXT: %[[ENTER:[0-9]*]]:2 = tf_executor.Enter %[[CONST]]#0 frame "while/while_context" : (tensor<i32>) -> (tensor<*xi32>, !tf_executor.control) {T = "tfdtype$DT_INT32", device = "", name = "while/Enter"}
-// CHECK-NEXT: %[[NOOP:[0-9]*]] = tf_executor.island wraps "tf.NoOp"() {device = "", name = "cluster/pivot"} : () -> ()
-// CHECK-NEXT: %[[NEXTIT_SRC:[0-9]*]]:3 = tf_executor.NextIteration.Source : tensor<*xi32> {T = "tfdtype$DT_INT32", device = "", id = 0 : i64, name = "while/NextIteration"}
-// CHECK-NEXT: %[[MERGE:[0-9]*]]:3 = tf_executor.Merge %[[NEXTIT_SRC]]#0, %[[ENTER]]#0 : tensor<*xi32> {N = 2 : i64, T = "tfdtype$DT_INT32", device = "", name = "while/Merge"}
-// CHECK-NEXT: %[[CONST_LESS:[0-9]*]]:2 = tf_executor.island(%[[MERGE]]#2) wraps "tf.Const"() {device = "", dtype = "tfdtype$DT_INT32", name = "while/Less/y", value = dense<2> : tensor<i32>} : () -> tensor<i32>
-// CHECK-NEXT: %[[LESS:[0-9]*]]:2 = tf_executor.island wraps "tf.Less"(%[[MERGE]]#0, %[[CONST_LESS]]#0) {T = "tfdtype$DT_INT32", device = "", name = "while/Less"} : (tensor<*xi32>, tensor<i32>) -> tensor<*xi1>
-// CHECK-NEXT: %[[COND:[0-9]*]]:2 = tf_executor.LoopCond %[[LESS:[0-9]*]]#0 : (tensor<*xi1>) -> (tensor<i1>, !tf_executor.control) {device = "", name = "while/LoopCond"}
-// CHECK-NEXT: %[[SWITCH:[0-9]*]]:3 = tf_executor.Switch %[[MERGE]]#0, %[[COND]]#0 : tensor<*xi32> {T = "tfdtype$DT_INT32", _class = ["loc = @while/Merge"], device = "", name = "while/Switch"}
-// CHECK-NEXT: %[[EXIT:[0-9]*]]:2 = tf_executor.Exit %[[SWITCH]]#0 : tensor<*xi32> {T = "tfdtype$DT_INT32", device = "", name = "while/Exit"}
-// CHECK-NEXT: %[[IDENTITY:[0-9]*]]:2 = tf_executor.island wraps "tf.Identity"(%[[SWITCH]]#1) {T = "tfdtype$DT_INT32", device = "", name = "while/Identity"} : (tensor<*xi32>) -> tensor<*xi32>
-// CHECK-NEXT: %[[CONST_ADD:[0-9]*]]:2 = tf_executor.island(%[[IDENTITY]]#1) wraps "tf.Const"() {device = "", dtype = "tfdtype$DT_INT32", name = "while/Add/y", value = dense<3> : tensor<i32>} : () -> tensor<i32>
-// CHECK-NEXT: %[[ADD:[0-9]*]]:2 = tf_executor.island wraps "tf.Add"(%[[IDENTITY]]#0, %[[CONST_ADD]]#0) {T = "tfdtype$DT_INT32", device = "", name = "while/Add"} : (tensor<*xi32>, tensor<i32>) -> tensor<*xi32>
-// CHECK-NEXT: %[[CT:[0-9]*]] = tf_executor.ControlTrigger %[[NOOP]], %[[ADD]]#1, %[[EXIT]]#1 {_tpu_replicate = "cluster", device = "", name = "gradients/while/mul_2_Da30D05wlPU_grad/SymbolicGradient/b_sync"}
-// CHECK-NEXT: tf_executor.NextIteration.Sink [%[[NEXTIT_SRC]]#1] %[[ADD]]#0, %[[CT]] : tensor<*xi32> {T = "tfdtype$DT_INT32", device = "", id = 0 : i64, name = "while/NextIteration"}
+// CHECK-NEXT: %[[CONST:.*]], %[[CONST_control:.*]] = tf_executor.island wraps "tf.Const"() {device = "", dtype = "tfdtype$DT_INT32", name = "Const", value = dense<1> : tensor<i32>} : () -> tensor<i32>
+// CHECK-NEXT: %[[ENTER:.*]], %[[ENTER_control:.*]] = tf_executor.Enter %[[CONST]] frame "while/while_context" : (tensor<i32>) -> (tensor<*xi32>, !tf_executor.control) {T = "tfdtype$DT_INT32", device = "", name = "while/Enter"}
+// CHECK-NEXT: %[[NOOP:[a-z_0-9 ]*]] = tf_executor.island wraps "tf.NoOp"() {device = "", name = "cluster/pivot"} : () -> ()
+// CHECK-NEXT: %[[NEXTIT_SRC:.*]], %[[NEXTIT_SRC_token:.*]], %{{.*}} = tf_executor.NextIteration.Source : tensor<*xi32> {T = "tfdtype$DT_INT32", device = "", id = 0 : i64, name = "while/NextIteration"}
+// CHECK-NEXT: %[[MERGE:.*]], %[[MERGE_index:.*]], %[[MERGE_control:.*]] = tf_executor.Merge %[[NEXTIT_SRC]], %[[ENTER]] : tensor<*xi32> {N = 2 : i64, T = "tfdtype$DT_INT32", device = "", name = "while/Merge"}
+// CHECK-NEXT: %[[CONST_LESS:.*]], %[[CONST_LESS_control:.*]] = tf_executor.island(%[[MERGE_control]]) wraps "tf.Const"() {device = "", dtype = "tfdtype$DT_INT32", name = "while/Less/y", value = dense<2> : tensor<i32>} : () -> tensor<i32>
+// CHECK-NEXT: %[[LESS:.*]], %[[LESS_control:.*]] = tf_executor.island wraps "tf.Less"(%[[MERGE]], %[[CONST_LESS]]) {T = "tfdtype$DT_INT32", device = "", name = "while/Less"} : (tensor<*xi32>, tensor<i32>) -> tensor<*xi1>
+// CHECK-NEXT: %[[COND:.*]], %[[COND_control:.*]] = tf_executor.LoopCond %[[LESS]] : (tensor<*xi1>) -> (tensor<i1>, !tf_executor.control) {device = "", name = "while/LoopCond"}
+// CHECK-NEXT: %[[SWITCH_false:.*]], %[[SWITCH_true:.*]], %[[SWITCH_control:.*]] = tf_executor.Switch %[[MERGE]], %[[COND]] : tensor<*xi32> {T = "tfdtype$DT_INT32", _class = ["loc = @while/Merge"], device = "", name = "while/Switch"}
+// CHECK-NEXT: %[[EXIT:.*]], %[[EXIT_control:.*]] = tf_executor.Exit %[[SWITCH_false]] : tensor<*xi32> {T = "tfdtype$DT_INT32", device = "", name = "while/Exit"}
+// CHECK-NEXT: %[[IDENTITY:.*]], %[[IDENTITY_control:.*]] = tf_executor.island wraps "tf.Identity"(%[[SWITCH_true]]) {T = "tfdtype$DT_INT32", device = "", name = "while/Identity"} : (tensor<*xi32>) -> tensor<*xi32>
+// CHECK-NEXT: %[[CONST_ADD:.*]], %[[CONST_ADD_control:.*]] = tf_executor.island(%[[IDENTITY_control]]) wraps "tf.Const"() {device = "", dtype = "tfdtype$DT_INT32", name = "while/Add/y", value = dense<3> : tensor<i32>} : () -> tensor<i32>
+// CHECK-NEXT: %[[ADD:.*]], %[[ADD_control:.*]] = tf_executor.island wraps "tf.Add"(%[[IDENTITY]], %[[CONST_ADD]]) {T = "tfdtype$DT_INT32", device = "", name = "while/Add"} : (tensor<*xi32>, tensor<i32>) -> tensor<*xi32>
+// CHECK-NEXT: %[[CT:[0-9]*]] = tf_executor.ControlTrigger %[[NOOP]], %[[ADD_control]], %[[EXIT_control]] {_tpu_replicate = "cluster", device = "", name = "gradients/while/mul_2_Da30D05wlPU_grad/SymbolicGradient/b_sync"}
+// CHECK-NEXT: tf_executor.NextIteration.Sink [%[[NEXTIT_SRC_token]]] %[[ADD]], %[[CT]] : tensor<*xi32> {T = "tfdtype$DT_INT32", device = "", id = 0 : i64, name = "while/NextIteration"}
// CHECK-NEXT: tf_executor.fetch
// CHECK-NEXT: }
// CHECK-NEXT: return
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/executor_canonicalize.mlir b/tensorflow/compiler/mlir/tensorflow/tests/executor_canonicalize.mlir
index 770fc37..029a1e7 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/executor_canonicalize.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/executor_canonicalize.mlir
@@ -93,12 +93,12 @@
}
// CHECK-NEXT: %[[GRAPH:[0-9]*]]:3 = tf_executor.graph {
-// CHECK-NEXT: %[[ISLAND_0:[0-9]*]]:4 = tf_executor.island {
+// CHECK-NEXT: %[[ISLAND_0:.*]]:3, %{{.*}} = tf_executor.island {
// CHECK-NEXT: %[[OP_A:[0-9]*]] = "tf.opA"(%[[ARG_0]])
// CHECK-NEXT: %[[OP_B:[0-9]*]] = "tf.opB"(%[[OP_A]])
// CHECK-NEXT: %[[OP_C:[0-9]*]] = "tf.opC"(%[[OP_B]])
// CHECK-NEXT: tf_executor.yield %[[OP_A]], %[[OP_C]], %[[OP_B]] : tensor<i1>, tensor<i1>, tensor<i1>
-// CHECK: %[[ISLAND_1:[0-9]*]]:3 = tf_executor.island {
+// CHECK: %[[ISLAND_1:.*]]:2, %{{.*}} = tf_executor.island {
// CHECK-NEXT: %[[OP_D:[0-9]*]] = "tf.opD"(%[[ARG_0]])
// CHECK-NEXT: %[[OP_E:[0-9]*]] = "tf.opE"(%[[OP_D]])
// CHECK-NEXT: tf_executor.yield %[[OP_E]], %[[OP_D]] : tensor<i1>, tensor<i1>
@@ -124,13 +124,13 @@
}
// CHECK-NEXT: %[[GRAPH:[0-9]*]]:3 = tf_executor.graph {
-// CHECK-NEXT: %[[ISLAND:[0-9]*]]:4 = tf_executor.island {
+// CHECK-NEXT: %[[ISLAND:.*]]:3, %{{.*}} = tf_executor.island {
// CHECK-NEXT: %[[OP_A:[0-9]*]] = "tf.opA"(%[[ARG_0]])
// CHECK-NEXT: %[[OP_B:[0-9]*]] = "tf.opB"(%[[OP_A]])
// CHECK-NEXT: %[[OP_C:[0-9]*]] = "tf.opC"(%[[OP_B]])
// CHECK-NEXT: tf_executor.yield %[[OP_A]], %[[OP_C]], %[[OP_B]] : tensor<i1>, tensor<i1>, tensor<i1>
-// CHECK: %[[LOOP_COND:[0-9]*]]:2 = tf_executor.LoopCond %[[ISLAND]]#0
-// CHECK-NEXT: tf_executor.fetch %[[ISLAND]]#1, %[[ISLAND]]#0, %[[LOOP_COND]]#0 : tensor<i1>, tensor<i1>, tensor<i1>
+// CHECK: %[[LOOP_COND:.*]], %[[LOOP_COND_control:.*]] = tf_executor.LoopCond %[[ISLAND]]#0
+// CHECK-NEXT: tf_executor.fetch %[[ISLAND]]#1, %[[ISLAND]]#0, %[[LOOP_COND]] : tensor<i1>, tensor<i1>, tensor<i1>
// CHECK: return %[[GRAPH]]#2, %[[GRAPH]]#1 : tensor<i1>, tensor<i1>
@@ -285,7 +285,7 @@
return
}
-// CHECK: %[[ISLAND_0:[0-9]*]] = tf_executor.island
+// CHECK: %[[ISLAND_0:.*]] = tf_executor.island
// CHECK-NEXT: "tf.opA"
// CHECK: tf_executor.island(%[[ISLAND_0]])
// CHECK-NEXT: "tf.opB"
@@ -313,7 +313,7 @@
return
}
-// CHECK: %[[ISLAND_1:[0-9]*]] = tf_executor.island
+// CHECK: %[[ISLAND_1:.*]] = tf_executor.island
// CHECK-NEXT: "tf.opA"
// CHECK: tf_executor.island(%[[ISLAND_1]])
// CHECK-NEXT: "tf.opB"
@@ -368,7 +368,7 @@
return
}
-// CHECK: %[[ISLAND_0:[0-9]*]] = tf_executor.island
+// CHECK: %[[ISLAND_0:.*]] = tf_executor.island
// CHECK-NEXT: "tf.opA"
// CHECK: tf_executor.island(%[[ISLAND_0]])
// CHECK-NEXT: "tf.opB"
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/executor_island_coarsening.mlir b/tensorflow/compiler/mlir/tensorflow/tests/executor_island_coarsening.mlir
index 478f734..509a496 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/executor_island_coarsening.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/executor_island_coarsening.mlir
@@ -19,11 +19,11 @@
return %0 : tensor<f32>
}
-// CHECK: %[[ISLAND:[0-9]*]]:2 = tf_executor.island {
+// CHECK: %[[ISLAND:.*]], %[[ISLAND_control:.*]] = tf_executor.island {
// CHECK-NEXT: "tf.opA"(%[[ARG_0]])
// CHECK-NEXT: %[[OP_B:[0-9]*]] = "tf.opB"
// CHECK-NEXT: tf_executor.yield %[[OP_B]] : tensor<f32>
-// CHECK: tf_executor.fetch %[[ISLAND]]#0 : tensor<f32>
+// CHECK: tf_executor.fetch %[[ISLAND]] : tensor<f32>
// Test that islands linked by a data dependency are merged.
@@ -44,11 +44,11 @@
return %0 : tensor<i1>
}
-// CHECK: %[[ISLAND:[0-9]*]]:2 = tf_executor.island {
+// CHECK: %[[ISLAND:.*]], %[[ISLAND_control:.*]] = tf_executor.island {
// CHECK-NEXT: %[[OP_A:[0-9]*]] = "tf.opA"(%[[ARG_0]])
// CHECK-NEXT: %[[OP_B:[0-9]*]] = "tf.opB"(%[[OP_A]])
// CHECK-NEXT: tf_executor.yield %[[OP_B]] : tensor<i1>
-// CHECK: tf_executor.fetch %[[ISLAND]]#0 : tensor<i1>
+// CHECK: tf_executor.fetch %[[ISLAND]] : tensor<i1>
// Test empty/trivial islands are merged.
@@ -86,7 +86,7 @@
return %0#0, %0#1 : tensor<i1>, tensor<i1>
}
-// CHECK: %[[ISLAND:[0-9]*]]:3 = tf_executor.island
+// CHECK: %[[ISLAND:.*]]:2, %{{.*}} = tf_executor.island
// CHECK-NEXT: "tf.opA"(%[[ARG_1]], %[[ARG_0]])
// CHECK: tf_executor.fetch %[[ISLAND]]#0, %[[ISLAND]]#1 : tensor<i1>, tensor<i1>
@@ -103,7 +103,7 @@
return %0#0, %0#1 : tensor<i1>, tensor<i1>
}
-// CHECK: %[[ISLAND:[0-9]*]]:3 = tf_executor.island {
+// CHECK: %[[ISLAND:.*]]:2, %{{.*}} = tf_executor.island {
// CHECK-NEXT: %[[OP_A:[0-9]*]] = "tf.opA"(%[[ARG_0]])
// CHECK-NEXT: %[[OP_B:[0-9]*]] = "tf.opB"(%[[ARG_1]])
// CHECK-NEXT: tf_executor.yield %[[OP_A]], %[[OP_B]] : tensor<i1>, tensor<i1>
@@ -130,13 +130,13 @@
return %0 : tensor<i32>
}
-// CHECK: %[[ISLAND:[0-9]*]]:2 = tf_executor.island {
+// CHECK: %[[ISLAND:.*]], %[[ISLAND_control:.*]] = tf_executor.island {
// CHECK-NEXT: %[[OP_A:[0-9]*]] = "tf.opA"(%[[ARG_0]], %[[ARG_0]])
// CHECK-NEXT: %[[OP_B:[0-9]*]] = "tf.opB"(%[[OP_A]], %[[ARG_0]])
// CHECK-NEXT: %[[OP_C:[0-9]*]] = "tf.opC"(%[[OP_B]], %[[ARG_1]])
// CHECK-NEXT: %[[OP_D:[0-9]*]] = "tf.opD"(%[[OP_C]], %[[ARG_0]])
// CHECK-NEXT: tf_executor.yield %[[OP_D]] : tensor<i32>
-// CHECK: tf_executor.fetch %[[ISLAND]]#0 : tensor<i32>
+// CHECK: tf_executor.fetch %[[ISLAND]] : tensor<i32>
// Test merging multiple islands with multiple inner ops preserves order.
@@ -164,7 +164,7 @@
return %0 : tensor<i32>
}
-// CHECK: %[[ISLAND:[0-9]*]]:2 = tf_executor.island {
+// CHECK: %[[ISLAND:.*]], %[[ISLAND_control:.*]] = tf_executor.island {
// CHECK-NEXT: %[[OP_A:[0-9]*]] = "tf.opA"(%[[ARG_0]], %[[ARG_0]])
// CHECK-NEXT: %[[OP_B:[0-9]*]] = "tf.opB"(%[[OP_A]], %[[ARG_0]])
// CHECK-NEXT: %[[OP_C:[0-9]*]] = "tf.opC"(%[[OP_B]], %[[ARG_1]])
@@ -172,7 +172,7 @@
// CHECK-NEXT: %[[OP_E:[0-9]*]] = "tf.opE"(%[[OP_D]], %[[OP_B]])
// CHECK-NEXT: %[[OP_F:[0-9]*]] = "tf.opF"(%[[OP_E]], %[[OP_E]])
// CHECK-NEXT: tf_executor.yield %[[OP_F]] : tensor<i32>
-// CHECK: tf_executor.fetch %[[ISLAND]]#0 : tensor<i32>
+// CHECK: tf_executor.fetch %[[ISLAND]] : tensor<i32>
// Test if islands can be merged when non dependent islands are interleaved.
@@ -191,7 +191,7 @@
return %0#0, %0#1 : tensor<i32>, tensor<i32>
}
-// CHECK: %[[ISLAND_0:[0-9]*]]:3 = tf_executor.island {
+// CHECK: %[[ISLAND_0:.*]]:2, %{{.*}} = tf_executor.island {
// CHECK-NEXT: %[[OP_A:[0-9]*]] = "tf.opA"(%[[ARG_0]])
// CHECK-NEXT: %[[OP_C:[0-9]*]] = "tf.opC"(%[[OP_A]])
// CHECK-NEXT: %{{[0-9]*}} = "tf.opE"(%[[ARG_0]])
@@ -226,25 +226,25 @@
return
}
-// CHECK: %[[ISLAND_0:[0-9]*]]:2 = tf_executor.island wraps "tf.opA"
-// CHECK: %[[ENTER:[0-9]*]]:2 = tf_executor.Enter %[[ISLAND_0]]#0
-// CHECK-NEXT: %[[ISLAND_1:[0-9]*]] = tf_executor.island wraps "tf.opB"()
-// CHECK: %[[NEXTIT_SRC:[0-9]*]]:3 = tf_executor.NextIteration.Source
-// CHECK-NEXT: %[[MERGE:[0-9]*]]:3 = tf_executor.Merge %[[NEXTIT_SRC]]#0, %[[ENTER]]#0
-// CHECK-NEXT: %[[ISLAND_2:[0-9]*]]:2 = tf_executor.island(%[[MERGE]]#2) {
+// CHECK: %[[ISLAND_0:.*]], %[[ISLAND_0_control:.*]] = tf_executor.island wraps "tf.opA"
+// CHECK: %[[ENTER:.*]], %[[ENTER_control:.*]] = tf_executor.Enter %[[ISLAND_0]]
+// CHECK-NEXT: %[[ISLAND_1:.*]] = tf_executor.island wraps "tf.opB"()
+// CHECK: %[[NEXTIT_SRC:.*]], %[[NEXTIT_SRC_token:.*]], %[[NEXTIT_SRC_control:.*]] = tf_executor.NextIteration.Source
+// CHECK-NEXT: %[[MERGE:.*]], %[[MERGE_index:.*]], %[[MERGE_control:.*]] = tf_executor.Merge %[[NEXTIT_SRC]], %[[ENTER]]
+// CHECK-NEXT: %[[ISLAND_2:.*]], %[[ISLAND_2_control:.*]] = tf_executor.island(%[[MERGE_control]]) {
// CHECK-NEXT: %[[OP_C:.*]] = "tf.opC"
-// CHECK-NEXT: %[[OP_D:[0-9]*]] = "tf.opD"(%[[MERGE]]#0, %[[OP_C]])
+// CHECK-NEXT: %[[OP_D:[0-9]*]] = "tf.opD"(%[[MERGE]], %[[OP_C]])
// CHECK-NEXT: tf_executor.yield %[[OP_D]] : tensor<*xi1>
-// CHECK: %[[COND:[0-9]*]]:2 = tf_executor.LoopCond %[[ISLAND_2:[0-9]*]]#0
-// CHECK-NEXT: %[[SWITCH:[0-9]*]]:3 = tf_executor.Switch %[[MERGE]]#0, %[[COND]]#0
-// CHECK-NEXT: %[[EXIT:[0-9]*]]:2 = tf_executor.Exit %[[SWITCH]]#0
-// CHECK-NEXT: %[[ISLAND_3:[0-9]*]]:2 = tf_executor.island {
-// CHECK-NEXT: %[[OP_E:[0-9]*]] = "tf.opE"(%[[SWITCH]]#1)
+// CHECK: %[[COND:.*]], %[[COND_control:.*]] = tf_executor.LoopCond %[[ISLAND_2]]
+// CHECK-NEXT: %[[SWITCH_false:.*]], %[[SWITCH_true:.*]], %[[SWITCH_control:.*]] = tf_executor.Switch %[[MERGE]], %[[COND]]
+// CHECK-NEXT: %[[EXIT:.*]], %[[EXIT_control:.*]] = tf_executor.Exit %[[SWITCH_false]]
+// CHECK-NEXT: %[[ISLAND_3:.*]], %[[ISLAND_3_control:.*]] = tf_executor.island {
+// CHECK-NEXT: %[[OP_E:[0-9]*]] = "tf.opE"(%[[SWITCH_true]])
// CHECK-NEXT: %[[OP_F:.*]] = "tf.opF"
// CHECK-NEXT: %[[OP_G:[0-9]*]] = "tf.opG"(%[[OP_E]], %[[OP_F]])
// CHECK-NEXT: tf_executor.yield %[[OP_G]] : tensor<*xi32>
-// CHECK: %[[CT:[0-9]*]] = tf_executor.ControlTrigger %[[ISLAND_1]], %[[ISLAND_3]]#1, %[[EXIT]]#1
-// CHECK-NEXT: tf_executor.NextIteration.Sink [%[[NEXTIT_SRC]]#1] %[[ISLAND_3]]#0, %[[CT]]
+// CHECK: %[[CT:.*]] = tf_executor.ControlTrigger %[[ISLAND_1]], %[[ISLAND_3_control]], %[[EXIT_control]]
+// CHECK-NEXT: tf_executor.NextIteration.Sink [%[[NEXTIT_SRC_token]]] %[[ISLAND_3]], %[[CT]]
// Test no merging took place as cycle would be formed otherwise.
@@ -262,11 +262,11 @@
return
}
-// CHECK: %[[ISLAND:[0-9]*]]:2 = tf_executor.island wraps "tf.opA"
-// CHECK: %[[CT:[0-9]*]] = tf_executor.ControlTrigger %[[ISLAND]]#1
+// CHECK: %[[ISLAND:.*]], %[[ISLAND_control:.*]] = tf_executor.island wraps "tf.opA"
+// CHECK: %[[CT:.*]] = tf_executor.ControlTrigger %[[ISLAND_control]]
// CHECK-NEXT: tf_executor.island(%[[CT]]) {
// CHECK-NEXT: %[[OP_B:[0-9]*]] = "tf.opB"
-// CHECK-NEXT: tf_executor.yield %[[ISLAND]]#0, %[[OP_B]] : tensor<1xf32>, tensor<1xf32>
+// CHECK-NEXT: tf_executor.yield %[[ISLAND]], %[[OP_B]] : tensor<1xf32>, tensor<1xf32>
// Test if island was merged into its result.
@@ -316,9 +316,9 @@
// CHECK-NEXT: [[OP_A:[0-9*]]] = "tf.opA"
// CHECK-NEXT: [[INNER_GRAPH:[0-9]*]] = tf_executor.graph {
// CHECK-NEXT: [[CT:[0-9]*]] = tf_executor.ControlTrigger
-// CHECK-NEXT: [[ISLAND_1:[0-9]*]]:2 = tf_executor.island(%[[CT]])
+// CHECK-NEXT: %[[ISLAND_1:.*]], %{{.*}} = tf_executor.island(%[[CT]])
// CHECK-NEXT: "tf.opB"(%[[OP_A]])
-// CHECK: tf_executor.fetch %[[ISLAND_1]]#0 : tensor<1xf32>
+// CHECK: tf_executor.fetch %[[ISLAND_1]] : tensor<1xf32>
// CHECK: tf_executor.yield
@@ -344,12 +344,12 @@
// CHECK: tf_executor.island wraps "tf.opA"
// CHECK: tf_executor.island
// CHECK: tf_executor.graph {
-// CHECK-NEXT: [[ISLAND_1:[0-9]*]]:2 = tf_executor.island {
+// CHECK-NEXT: %[[ISLAND_1:.*]], %{{.*}} = tf_executor.island {
// CHECK-NEXT: "tf.opB"
// CHECK-NEXT: [[OP_C:[0-9]*]] = "tf.opC"
// CHECK-NEXT: [[OP_D:[0-9]*]] = "tf.opD"(%[[OP_C]])
// CHECK-NEXT: tf_executor.yield %[[OP_D]] : tensor<1xf32>
-// CHECK: tf_executor.fetch %[[ISLAND_1]]#0 : tensor<1xf32>
+// CHECK: tf_executor.fetch %[[ISLAND_1]] : tensor<1xf32>
// Test merging islands with control island operands and island results only if
@@ -370,7 +370,7 @@
return
}
-// CHECK: %[[ISLAND:[0-9]*]] = tf_executor.island
+// CHECK: %[[ISLAND:.*]] = tf_executor.island
// CHECK: tf_executor.ControlTrigger %[[ISLAND]]
// CHECK: %[[CT:[0-9]*]] = tf_executor.ControlTrigger
// CHECK: tf_executor.island(%[[ISLAND]], %[[CT]])
@@ -405,11 +405,11 @@
}
// CHECK: tf_executor.graph
-// CHECK-NEXT: %[[ISLAND:[0-9]*]]:2 = tf_executor.island {
+// CHECK-NEXT: %[[ISLAND:.*]], %[[ISLAND_control:.*]] = tf_executor.island {
// CHECK-NEXT: "tf.Const"
// CHECK-NEXT: tf_executor.yield
// CHECK-NEXT: }
-// CHECK-NEXT: tf_executor.fetch %[[ISLAND]]#0, %[[ISLAND]]#1
+// CHECK-NEXT: tf_executor.fetch %[[ISLAND]], %[[ISLAND_control]]
// Check that we merge two islands with independent fetches, when both are
// control fetches.
@@ -423,7 +423,7 @@
return
}
-// CHECK: %[[ISLAND:[0-9]*]] = tf_executor.island
+// CHECK: %[[ISLAND:.*]] = tf_executor.island
// CHECK-NOT: tf_executor.island
// CHECK: tf_executor.fetch %[[ISLAND]]
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/graph_pruning.mlir b/tensorflow/compiler/mlir/tensorflow/tests/graph_pruning.mlir
index bd10512..8585790 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/graph_pruning.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/graph_pruning.mlir
@@ -84,3 +84,86 @@
return %0 : i32
}
+// Check that NextIteration.source/sink ops and associated ops are deleted when
+// associated loop is unreachable.
+// CHECK-LABEL: func @unreachable_loop
+func @unreachable_loop() {
+// CHECK: tf_executor.graph
+// CHECK-NEXT: tf_executor.fetch
+ tf_executor.graph {
+ %0:3 = tf_executor.NextIteration.Source : tensor<*xi32> {T = "tfdtype$DT_INT32"}
+ %1:2 = tf_executor.island wraps "tf.Const"() {dtype = "tfdtype$DT_INT32", value = dense<0> : tensor<i32>} : () -> tensor<i32>
+ %2:2 = tf_executor.Enter %1#0 frame "while/while_context" : (tensor<i32>) -> (tensor<*xi32>, !tf_executor.control) {T = "tfdtype$DT_INT32"}
+ %3:3 = tf_executor.Merge %2#0, %0#0 : tensor<*xi32> {N = 2 : i64, T = "tfdtype$DT_INT32"}
+ %4:2 = tf_executor.island(%3#2) wraps "tf.Const"() {dtype = "tfdtype$DT_INT32", value = dense<10> : tensor<i32>} : () -> tensor<i32>
+ %5:2 = tf_executor.island wraps "tf.Less"(%3#0, %4#0) {T = "tfdtype$DT_INT32"} : (tensor<*xi32>, tensor<i32>) -> tensor<*xi1>
+ %6:2 = tf_executor.LoopCond %5#0 : (tensor<*xi1>) -> (tensor<i1>, !tf_executor.control) {}
+ %7:3 = tf_executor.Switch %3#0, %6#0 : tensor<*xi32> {T = "tfdtype$DT_INT32"}
+ %8:2 = tf_executor.Exit %7#0 : tensor<*xi32> {T = "tfdtype$DT_INT32"}
+ %9:2 = tf_executor.island wraps "tf.Identity"(%7#1) {T = "tfdtype$DT_INT32"} : (tensor<*xi32>) -> tensor<*xi32>
+ %10:2 = tf_executor.island(%9#1) wraps "tf.Const"() {dtype = "tfdtype$DT_INT32", value = dense<1> : tensor<i32>} : () -> tensor<i32>
+ %11:2 = tf_executor.island wraps "tf.Add"(%9#0, %10#0) {T = "tfdtype$DT_INT32"} : (tensor<*xi32>, tensor<i32>) -> tensor<*xi32>
+ tf_executor.NextIteration.Sink [%0#1] %11#0 : tensor<*xi32> {T = "tfdtype$DT_INT32"}
+ tf_executor.fetch
+ }
+ return
+}
+
+// Check that NextIteration.sink and associated ops are not deleted when
+// associated loop is reachable.
+// CHECK-LABEL: func @reachable_loop
+func @reachable_loop() {
+// CHECK: tf_executor.NextIteration.Source
+// CHECK: "tf.Const"
+// CHECK: tf_executor.Enter
+// CHECK: tf_executor.Merge
+// CHECK: "tf.Const"
+// CHECK: "tf.Less"
+// CHECK: tf_executor.LoopCond
+// CHECK: tf_executor.Switch
+// CHECK: %[[EXIT:.*]], %{{.*}} = tf_executor.Exit
+// CHECK: "tf.Identity"
+// CHECK: "tf.Const"
+// CHECK: "tf.Add"
+// CHECK: tf_executor.NextIteration.Sink
+// CHECK: tf_executor.fetch %[[EXIT]]
+ %0 = tf_executor.graph {
+ %0:3 = tf_executor.NextIteration.Source : tensor<*xi32> {T = "tfdtype$DT_INT32"}
+ %1:2 = tf_executor.island wraps "tf.Const"() {dtype = "tfdtype$DT_INT32", value = dense<0> : tensor<i32>} : () -> tensor<i32>
+ %2:2 = tf_executor.Enter %1#0 frame "while/while_context" : (tensor<i32>) -> (tensor<*xi32>, !tf_executor.control) {T = "tfdtype$DT_INT32"}
+ %3:3 = tf_executor.Merge %2#0, %0#0 : tensor<*xi32> {N = 2 : i64, T = "tfdtype$DT_INT32"}
+ %4:2 = tf_executor.island(%3#2) wraps "tf.Const"() {dtype = "tfdtype$DT_INT32", value = dense<10> : tensor<i32>} : () -> tensor<i32>
+ %5:2 = tf_executor.island wraps "tf.Less"(%3#0, %4#0) {T = "tfdtype$DT_INT32"} : (tensor<*xi32>, tensor<i32>) -> tensor<*xi1>
+ %6:2 = tf_executor.LoopCond %5#0 : (tensor<*xi1>) -> (tensor<i1>, !tf_executor.control) {}
+ %7:3 = tf_executor.Switch %3#0, %6#0 : tensor<*xi32> {T = "tfdtype$DT_INT32"}
+ %8:2 = tf_executor.Exit %7#0 : tensor<*xi32> {T = "tfdtype$DT_INT32"}
+ %9:2 = tf_executor.island wraps "tf.Identity"(%7#1) {T = "tfdtype$DT_INT32"} : (tensor<*xi32>) -> tensor<*xi32>
+ %10:2 = tf_executor.island(%9#1) wraps "tf.Const"() {dtype = "tfdtype$DT_INT32", value = dense<1> : tensor<i32>} : () -> tensor<i32>
+ %11:2 = tf_executor.island wraps "tf.Add"(%9#0, %10#0) {T = "tfdtype$DT_INT32"} : (tensor<*xi32>, tensor<i32>) -> tensor<*xi32>
+ tf_executor.NextIteration.Sink [%0#1] %11#0 : tensor<*xi32> {T = "tfdtype$DT_INT32"}
+ tf_executor.fetch %8#0 : tensor<*xi32>
+ }
+ return
+}
+
+// Check that ops leading to a fetch via a control are not removed.
+// CHECK-LABEL: func @control_fetch
+func @control_fetch(%arg0 : i32) {
+// CHECK: tf_executor.island
+// CHECK: tf_executor.island
+// CHECK: tf_executor.island
+ tf_executor.graph {
+ %0 = tf_executor.island {
+ tf_executor.yield
+ }
+ %1:2 = tf_executor.island {
+ tf_executor.yield %arg0 : i32
+ }
+ %2 = tf_executor.island(%0) {
+ %a = "op.A"(%1#0) : (i32) -> i32
+ tf_executor.yield
+ }
+ tf_executor.fetch %2 : !tf_executor.control
+ }
+ return
+}
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/add.pbtxt b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/add.pbtxt
index d1cdb12..0b9a8ac 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/add.pbtxt
+++ b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/add.pbtxt
@@ -40,15 +40,15 @@
# CHECK: func @main(%arg0: tensor<10xi32>, %arg1: tensor<10xi32>) -> tensor<10xi32>
# CHECK: attributes {tf.entry_function = {inputs = "input0,input1", outputs = "Add"}} {
-# CHECK: %[[add:[0-9]+]]:2 = tf_executor.island wraps "tf.Add"(%arg0, %arg1)
-# CHECK: fetch %[[add]]#0
+# CHECK: %[[add:.*]], %[[add_control:.*]] = tf_executor.island wraps "tf.Add"(%arg0, %arg1)
+# CHECK: fetch %[[add]]
# SOME: func @main(%arg0: tensor<10xi32>, %arg1: tensor<10xi32>) -> tensor<10xi32>
# SOME: attributes {tf.entry_function = {inputs = "input0,input1", outputs = "Add"}} {
-# SOME: %[[add:[0-9]+]]:2 = tf_executor.island wraps "tf.Add"(%arg0, %arg1)
-# SOME: fetch %[[add]]#0
+# SOME: %[[add:.*]], %[[add_control:.*]] = tf_executor.island wraps "tf.Add"(%arg0, %arg1)
+# SOME: fetch %[[add]]
# NONE: func @main(%arg0: tensor<10xi32>, %arg1: tensor<10xi32>) -> tensor<10xi32>
# NONE: attributes {tf.entry_function = {inputs = "input0,input1", outputs = "Add"}} {
-# NONE: %[[add:[0-9]+]]:2 = tf_executor.island wraps "tf.Add"(%arg0, %arg1)
-# NONE: fetch %[[add]]#0
+# NONE: %[[add:.*]], %[[add_control:.*]] = tf_executor.island wraps "tf.Add"(%arg0, %arg1)
+# NONE: fetch %[[add]]
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/feed-control-dep.pbtxt b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/feed-control-dep.pbtxt
index 258d205..b5439c5 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/feed-control-dep.pbtxt
+++ b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/feed-control-dep.pbtxt
@@ -60,9 +60,9 @@
}
# CHECK: func @main(%[[ARG_0:[a-z0-9]+]]: tensor<f32>) -> tensor<f32>
-# CHECK-NEXT: tf.entry_function = {inputs = "input", outputs = "output_node"}
+# CHECK: tf.entry_function = {inputs = "input", outputs = "output_node"}
# CHECK: %[[GRAPH:[0-9]+]] = tf_executor.graph
-# CHECK: %[[CONST:[0-9]+]]:2 = tf_executor.island wraps "tf.Const"()
-# CHECK: %[[OUTPUT:[0-9]+]]:2 = tf_executor.island wraps "tf.Identity"(%[[CONST]]#0)
-# CHECK: tf_executor.fetch %[[OUTPUT]]#0
+# CHECK: %[[CONST:.*]], %[[CONST_control:.*]] = tf_executor.island wraps "tf.Const"()
+# CHECK: %[[OUTPUT:.*]], %[[OUTPUT_control:.*]] = tf_executor.island wraps "tf.Identity"(%[[CONST]])
+# CHECK: tf_executor.fetch %[[OUTPUT]]
# CHECK: return %[[GRAPH]]
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-as-function.pbtxt b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-as-function.pbtxt
index 409e198..3444f3e 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-as-function.pbtxt
+++ b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-as-function.pbtxt
@@ -6,11 +6,11 @@
# CHECK: func @main(%arg0: tensor<*x!tf.resource>, %arg1: tensor<*x!tf.resource<tensor<3x3x1x32xf32>>>, %arg2: tensor<*xf32>, %arg3: tensor<2x4x6x8xi32>) -> (tensor<f32>, tensor<f32>)
# CHECK: attributes {tf.entry_function = {inputs = "args_0,args_1,args_2,args_3", outputs = "rets_0,rets_1"}} {
-# CHECK: %[[ISLAND_0:[0-9]]]:2 = tf_executor.island wraps "tf.Const"
-# CHECK: %[[ISLAND_1:[0-9]]]:2 = tf_executor.island wraps "tf.Identity"(%[[ISLAND_0]]#0)
-# CHECK: %[[ISLAND_2:[0-9]]]:2 = tf_executor.island wraps "tf.StatefulPartitionedCall"
+# CHECK: %[[ISLAND_0:.*]], %[[ISLAND_0_control:.*]] = tf_executor.island wraps "tf.Const"
+# CHECK: %[[ISLAND_1:.*]], %[[ISLAND_1_control:.*]] = tf_executor.island wraps "tf.Identity"(%[[ISLAND_0]])
+# CHECK: %[[ISLAND_2:.*]], %[[ISLAND_2_control:.*]] = tf_executor.island wraps "tf.StatefulPartitionedCall"
# CHECK-SAME: f = @[[FUNC:[a-z0-9]*]]
-# CHECK: tf_executor.fetch %[[ISLAND_1]]#0, %[[ISLAND_2]]#0 : tensor<f32>, tensor<f32>
+# CHECK: tf_executor.fetch %[[ISLAND_1]], %[[ISLAND_2]] : tensor<f32>, tensor<f32>
# CHECK: func @[[FUNC]](%arg0: tensor<*xf32>, %arg1: tensor<*x!tf.resource>) -> tensor<*xf32>
node {
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-function-control-ret-diff-island.pbtxt b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-function-control-ret-diff-island.pbtxt
index fa095a1..2c65237 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-function-control-ret-diff-island.pbtxt
+++ b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-function-control-ret-diff-island.pbtxt
@@ -5,10 +5,10 @@
# FetchOp.
# Match the island containing the "tf.Neg", capture the output
-# CHECK: %[[ISLAND_0:[0-9]*]]:2 = tf_executor.island wraps "tf.Neg"
+# CHECK: %[[ISLAND_0:[a-z_0-9]*]], %[[ISLAND_0_control:[a-z_0-9]*]] = tf_executor.island wraps "tf.Neg"
# Check that the tf.Neg control is passed to the fetch
-# CHECK: tf_executor.fetch {{.*}} %[[ISLAND_0]]#1 : tensor<*xf32>, !tf_executor.control
+# CHECK: tf_executor.fetch {{.*}} %[[ISLAND_0_control]] : tensor<*xf32>, !tf_executor.control
node {
name: "const"
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-function-control-ret-same-island.pbtxt b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-function-control-ret-same-island.pbtxt
index dbb1d14..7b4804c 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-function-control-ret-same-island.pbtxt
+++ b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-function-control-ret-same-island.pbtxt
@@ -5,10 +5,10 @@
# FetchOp.
# Match the island containing the "tf.Neg", capture the output
-# CHECK: %[[ISLAND:[0-9]*]]:2 = tf_executor.island wraps "tf.Neg"
+# CHECK: %[[ISLAND:[a-z_0-9]*]], %[[ISLAND_control:[a-z_0-9]*]] = tf_executor.island wraps "tf.Neg"
# Check that the tf.Neg data output and control are passed to the fetch
-# CHECK: tf_executor.fetch %[[ISLAND]]#0, %[[ISLAND]]#1 : tensor<*xf32>, !tf_executor.control
+# CHECK: tf_executor.fetch %[[ISLAND]], %[[ISLAND_control]] : tensor<*xf32>, !tf_executor.control
node {
name: "const"
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-gradient-def.pbtxt b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-gradient-def.pbtxt
index b7179ae..5ab948e 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-gradient-def.pbtxt
+++ b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-gradient-def.pbtxt
@@ -5,7 +5,7 @@
# added to its list of function attributes.
# CHECK: func @foo0(
-# CHECK-NEXT: tf.gradient = @foo_grad
+# CHECK: tf.gradient = @foo_grad
node {
name: "Const"
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-scalar-input.pbtxt b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-scalar-input.pbtxt
index d162d79..bb4701e 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-scalar-input.pbtxt
+++ b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-scalar-input.pbtxt
@@ -3,10 +3,10 @@
# Verify that we match correctly the input / output when they are scalar.
# CHECK: func @main(%arg0: tensor<f32> {tf.device = "/device:CPU:0"}) -> (tensor<f32>, tensor<f32>)
-# CHECK-NEXT: attributes {tf.entry_function = {inputs = "input", outputs = "out:1,out"}} {
+# CHECK: attributes {tf.entry_function = {inputs = "input", outputs = "out:1,out"}} {
# CHECK: tf.Relu
-# CHECK: %[[IDENTITY:[0-9]+]]:3 = tf_executor.island wraps "tf.IdentityN"
+# CHECK: %[[IDENTITY:[a-z_0-9]+]]:2, {{.*}} = tf_executor.island wraps "tf.IdentityN"
# CHECK: fetch %[[IDENTITY]]#1, %[[IDENTITY]]#0 : tensor<f32>, tensor<f32>
node {
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-while-loop.pbtxt b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-while-loop.pbtxt
index 4ada2f6..16cdde9 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-while-loop.pbtxt
+++ b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-while-loop.pbtxt
@@ -4,10 +4,10 @@
# to break the cycle.
# CHECK-LABEL: func @main()
-# CHECK: %[[NEXTITERATION:[0-9]+]]:3 = tf_executor.NextIteration.Source
-# CHECK: tf_executor.Merge {{.*}} %[[NEXTITERATION]]#0
+# CHECK: %[[NEXTITERATION:[a-z0-9]+]], %[[NEXTITERATION_token:[a-z0-9]+]], {{.*}} = tf_executor.NextIteration.Source
+# CHECK: tf_executor.Merge {{.*}} %[[NEXTITERATION]]
-# CHECK: tf_executor.NextIteration.Sink [%[[NEXTITERATION]]#1]
+# CHECK: tf_executor.NextIteration.Sink [%[[NEXTITERATION_token]]]
node {
name: "Const"
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/multiple-use-next-iteration.pbtxt b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/multiple-use-next-iteration.pbtxt
index 09a900e..8199484 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/multiple-use-next-iteration.pbtxt
+++ b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/multiple-use-next-iteration.pbtxt
@@ -4,9 +4,9 @@
# Imported.
# CHECK-LABEL: func @main()
-# CHECK: %[[NEXTITERATION:[0-9]+]]:3 = tf_executor.NextIteration.Source
-# CHECK: tf_executor.Merge {{.*}}, %[[NEXTITERATION]]#0
-# CHECK: tf_executor.Merge {{.*}}, %[[NEXTITERATION]]#0
+# CHECK: %[[NEXTITERATION:[a-z_0-9]+]], {{.*}} = tf_executor.NextIteration.Source
+# CHECK: tf_executor.Merge {{.*}}, %[[NEXTITERATION]]
+# CHECK: tf_executor.Merge {{.*}}, %[[NEXTITERATION]]
node {
name: "Const"
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/stateful-attribute.pbtxt b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/stateful-attribute.pbtxt
index 54877e8..cb4b00f 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/stateful-attribute.pbtxt
+++ b/tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/stateful-attribute.pbtxt
@@ -87,5 +87,5 @@
# Find callee and verify it has the stateful attribute set.
# CHECK: func @[[FUNCTION_FOO]]
-# CHECK-NEXT: attributes
+# CHECK-SAME: attributes
# CHECK-SAME: tf.signature.is_stateful
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/isolate-placer.mlir b/tensorflow/compiler/mlir/tensorflow/tests/isolate-placer.mlir
index 59a6ff1..d94fcb0 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/isolate-placer.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/isolate-placer.mlir
@@ -1,8 +1,8 @@
// RUN: tf-opt %s --run-tf-graph-optimization --graph-passes=IsolatePlacerInspectionRequiredOpsPass | FileCheck %s
func @main() {
- %0:2 = "_tf.VarHandleOp"() {dtype = "tfdtype$DT_FLOAT", shape = "tfshape$"} : () -> (tensor<!tf.resource>, !_tf.control)
- %1:2 = "_tf.StatefulPartitionedCall"(%0#0) {Tin = ["tfdtype$DT_RESOURCE"], Tout = ["tfdtype$DT_RESOURCE"], config = "", config_proto = "", executor_type = "", f = @foo} : (tensor<!tf.resource>) -> (tensor<!tf.resource>, !_tf.control) loc("call_foo")
+ %0:2 = "_tf.VarHandleOp"() {container = "c", shared_name = "n"} : () -> (tensor<!tf.resource<tensor<8xf32>>>, !_tf.control)
+ %1:2 = "_tf.StatefulPartitionedCall"(%0#0) {Tin = ["tfdtype$DT_RESOURCE"], Tout = ["tfdtype$DT_RESOURCE"], config = "", config_proto = "", executor_type = "", f = @foo} : (tensor<!tf.resource<tensor<8xf32>>>) -> (tensor<!tf.resource<tensor<8xf32>>>, !_tf.control) loc("call_foo")
return
}
@@ -13,16 +13,16 @@
// The IsolatePlacerInspectionRequiredOpsPass adds Identities for each input/output of function-calling ops.
// Capture the result of input to function call.
-// CHECK: [[VARIABLE_REG:%[0-9]*]]:2 = tf_executor.island wraps "tf.VarHandleOp"()
+// CHECK: [[VARIABLE_REG:%.*]], [[VARIABLE_REG_control:%.*]] = tf_executor.island wraps "tf.VarHandleOp"()
// Test for the presence of Identity op between input and function call.
-// CHECK: [[IDENTITY_REG:%[0-9]*]]:2 = tf_executor.island wraps "tf.Identity"([[VARIABLE_REG]]#0)
+// CHECK: [[IDENTITY_REG:%.*]], [[IDENTITY_REG_control:%.*]] = tf_executor.island wraps "tf.Identity"([[VARIABLE_REG]])
-// CHECK: [[CALL_RESULT_REG:%[0-9]*]]:2 = tf_executor.island wraps "tf.StatefulPartitionedCall"([[IDENTITY_REG]]#0)
+// CHECK: [[CALL_RESULT_REG:%.*]], [[CALL_RESULT_REG_control:%.*]] = tf_executor.island wraps "tf.StatefulPartitionedCall"([[IDENTITY_REG]])
// CHECK-SAME: f = @[[FUNCTION:[a-zA-Z0-9_]*]]
// Match the inserted Identity op for call output.
-// CHECK: "tf.Identity"([[CALL_RESULT_REG]]#0)
+// CHECK: "tf.Identity"([[CALL_RESULT_REG]])
// Match the function name
// CHECK: func @[[FUNCTION]]
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/lower_tf.mlir b/tensorflow/compiler/mlir/tensorflow/tests/lower_tf.mlir
index c830b4a..c1418fb 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/lower_tf.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/lower_tf.mlir
@@ -207,3 +207,20 @@
return %0 : tensor<*xcomplex<f32>>
}
+
+// CHECK-LABEL: func @ZerosLike_unranked
+func @ZerosLike_unranked(%arg0: tensor<*xi32>) -> tensor<*xi32> {
+ // CHECK: %[[ZERO:.*]] = "tf.Const"() {value = dense<0> : tensor<i32>} : () -> tensor<i32>
+ // CHECK: %[[SHAPE:.*]] = "tf.Shape"(%arg0) : (tensor<*xi32>) -> tensor<?xi64>
+ // CHECK: "tf.BroadcastTo"(%[[ZERO]], %[[SHAPE]]) : (tensor<i32>, tensor<?xi64>) -> tensor<*xi32>
+
+ %0 = "tf.ZerosLike"(%arg0) : (tensor<*xi32>) -> tensor<*xi32>
+ return %0 : tensor<*xi32>
+}
+
+// CHECK-LABEL: func @ZerosLike_variant
+func @ZerosLike_variant(%arg0: tensor<!tf.variant<tensor<2xi32>>>) -> tensor<!tf.variant<tensor<2xi32>>> {
+ // CHECK: tf.ZerosLike
+ %0 = "tf.ZerosLike"(%arg0) : (tensor<!tf.variant<tensor<2xi32>>>) -> tensor<!tf.variant<tensor<2xi32>>>
+ return %0 : tensor<!tf.variant<tensor<2xi32>>>
+}
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/replicate_to_island.mlir b/tensorflow/compiler/mlir/tensorflow/tests/replicate_to_island.mlir
index 7b7902f..1dcf29d 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/replicate_to_island.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/replicate_to_island.mlir
@@ -20,9 +20,9 @@
// CHECK: %[[CT_0:[0-9]*]] = tf_executor.ControlTrigger
// CHECK: %[[CT_1:[0-9]*]] = tf_executor.ControlTrigger
-// CHECK: %[[ISLAND_0:[0-9]*]] = tf_executor.island(%[[CT_0]], %[[CT_1]])
-// CHECK: %[[ISLAND_1:[0-9]*]] = tf_executor.island(%[[CT_0]], %[[CT_1]])
-// CHECK: %[[ISLAND_2:[0-9]*]] = tf_executor.island(%[[ISLAND_0]], %[[ISLAND_1]])
+// CHECK: %[[ISLAND_0:[a-z_0-9]*]] = tf_executor.island(%[[CT_0]], %[[CT_1]])
+// CHECK: %[[ISLAND_1:[a-z_0-9]*]] = tf_executor.island(%[[CT_0]], %[[CT_1]])
+// CHECK: %[[ISLAND_2:[a-z_0-9]*]] = tf_executor.island(%[[ISLAND_0]], %[[ISLAND_1]])
// Tests devices are not set if no devices were defined in replicate.
@@ -117,18 +117,18 @@
}
// CHECK: %[[CT:[0-9]*]] = tf_executor.ControlTrigger
-// CHECK: %[[ISLAND_0:[0-9]*]]:3 = tf_executor.island(%[[CT]])
+// CHECK: %[[ISLAND_0:[a-z_0-9]*]]:2, %{{.*}} = tf_executor.island(%[[CT]])
// CHECK: %[[OP_A_0:[0-9]*]] = "tf.opA"(%[[ARG_0]])
// CHECK-SAME: device = "/CPU:0"
// CHECK: %[[OP_B_0:[0-9]*]] = "tf.opB"(%[[OP_A_0]])
// CHECK-SAME: device = "/CPU:0"
// CHECK: tf_executor.yield %[[OP_A_0]], %[[OP_B_0]]
-// CHECK: %[[ISLAND_1:[0-9]*]]:3 = tf_executor.island(%[[CT]])
+// CHECK: %[[ISLAND_1:[a-z_0-9]*]]:2, %[[ISLAND_1_control:[a-z_0-9]*]] = tf_executor.island(%[[CT]])
// CHECK: %[[OP_A_1:[0-9]*]] = "tf.opA"(%[[ARG_1]])
// CHECK-SAME: device = "/GPU:1"
// CHECK: %[[OP_B_1:[0-9]*]] = "tf.opB"(%[[OP_A_1]])
// CHECK-SAME: device = "/GPU:1"
// CHECK: tf_executor.yield %[[OP_A_1]], %[[OP_B_1]]
-// CHECK: %[[ISLAND_2:[0-9]*]]:2 = tf_executor.island(%[[ISLAND_1]]#2)
+// CHECK: %[[ISLAND_2:.*]], %[[ISLAND_2_control:.*]] = tf_executor.island(%[[ISLAND_1_control]])
// CHECK: tf_executor.yield %[[ISLAND_0]]#0
-// CHECK: tf_executor.fetch %[[ISLAND_2]]#0
+// CHECK: tf_executor.fetch %[[ISLAND_2]]
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/resource_op_lifting.mlir b/tensorflow/compiler/mlir/tensorflow/tests/resource_op_lifting.mlir
index c150596..8ff72db 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/resource_op_lifting.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/resource_op_lifting.mlir
@@ -6,7 +6,7 @@
func @only_resource_load() -> tensor<*xi32> {
// CHECK: %[[RES_HANDLE:[0-9]*]] = "tf.VarHandleOp"
- %0 = "tf.VarHandleOp"() : () -> tensor<*x!tf.resource>
+ %0 = "tf.VarHandleOp"() {container = "c", shared_name = "v"} : () -> tensor<*x!tf.resource>
// CHECK: %[[RES_READ_VAL:[0-9]*]] = "tf.ReadVariableOp"(%[[RES_HANDLE]]) {dtype = "tfdtype$DT_INT32"}
// CHECK: "tf_device.launch"
@@ -32,7 +32,7 @@
func @only_resource_store() -> tensor<*xi32> {
// CHECK: %[[RES_HANDLE:[0-9]*]] = "tf.VarHandleOp"
- %0 = "tf.VarHandleOp"() : () -> tensor<*x!tf.resource>
+ %0 = "tf.VarHandleOp"() {container = "c", shared_name = "v"} : () -> tensor<*x!tf.resource>
// CHECK: %[[LAUNCH_RES:[0-9]*]]:2 = "tf_device.launch"
// CHECK: %[[COMPUTE_RES:[0-9]*]] = "tf.SomeComputation"()
@@ -59,7 +59,7 @@
func @same_resource_load_and_store() -> tensor<*xi32> {
// CHECK: %[[RES_HANDLE:[0-9]*]] = "tf.VarHandleOp"
- %0 = "tf.VarHandleOp"() : () -> tensor<*x!tf.resource>
+ %0 = "tf.VarHandleOp"() {container = "c", shared_name = "v"} : () -> tensor<*x!tf.resource>
// CHECK: %[[RES_READ_VAL:[0-9]*]] = "tf.ReadVariableOp"(%[[RES_HANDLE]]) {dtype = "tfdtype$DT_INT32"}
// CHECK: %[[LAUNCH_RES:[0-9]*]]:2 = "tf_device.launch"
@@ -89,7 +89,7 @@
func @decompose_assign_add_variable_op() -> tensor<*xi32> {
// CHECK: %[[RES_HANDLE:[0-9]*]] = "tf.VarHandleOp"
- %0 = "tf.VarHandleOp"() : () -> tensor<*x!tf.resource>
+ %0 = "tf.VarHandleOp"() {container = "c", shared_name = "v"} : () -> tensor<*x!tf.resource>
// CHECK: %[[RES_READ_VAL:[0-9]*]] = "tf.ReadVariableOp"(%[[RES_HANDLE]]) {dtype = "tfdtype$DT_INT32"}
// CHECK: %[[LAUNCH_RES:[0-9]*]]:2 = "tf_device.launch"
@@ -119,7 +119,7 @@
// CHECK-LABEL: func @decompose_assign_sub_variable_op
func @decompose_assign_sub_variable_op() -> tensor<*xi32> {
- %0 = "tf.VarHandleOp"() : () -> tensor<*x!tf.resource>
+ %0 = "tf.VarHandleOp"() {container = "c", shared_name = "v"} : () -> tensor<*x!tf.resource>
// CHECK: %[[RES_READ_VAL:[0-9]*]] = "tf.ReadVariableOp"
// CHECK: %[[ONE:[0-9]*]] = "tf.Const"() {value = dense<1> : tensor<i32>}
@@ -145,7 +145,7 @@
func @decompose_resource_apply_gradient_descent() -> tensor<*xf32> {
// CHECK: %[[RES_HANDLE:[0-9]*]] = "tf.VarHandleOp"
- %0 = "tf.VarHandleOp"() : () -> tensor<*x!tf.resource>
+ %0 = "tf.VarHandleOp"() {container = "c", shared_name = "v"} : () -> tensor<*x!tf.resource>
// CHECK: %[[RES_READ_VAL:[0-9]*]] = "tf.ReadVariableOp"(%[[RES_HANDLE]]) {dtype = "tfdtype$DT_FLOAT"}
// CHECK: %[[LAUNCH_RES:[0-9]*]]:2 = "tf_device.launch"
@@ -181,7 +181,7 @@
%0 = "tf_device.launch"() ( {
// CHECK: %[[RES_HANDLE:[0-9]*]] = "tf.VarHandleOp"
- %1 = "tf.VarHandleOp"() : () -> tensor<*x!tf.resource>
+ %1 = "tf.VarHandleOp"() {container = "c", shared_name = "v"} : () -> tensor<*x!tf.resource>
// CHECK: %[[RES_READ_VAL:[0-9]*]] = "tf.ReadVariableOp"(%[[RES_HANDLE]])
%2 = "tf.ReadVariableOp"(%1) {dtype = "tfdtype$DT_INT32"} : (tensor<*x!tf.resource>) -> tensor<*xi32>
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/shape_inference.mlir b/tensorflow/compiler/mlir/tensorflow/tests/shape_inference.mlir
index 0b02ac2..acf236f 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/shape_inference.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/shape_inference.mlir
@@ -34,15 +34,29 @@
return %1 : tensor<*xf32>
}
+// Tests the case where an inference opportunity relies on folding.
+
+// CHECK-LABEL: func @simple_folding
+ func @simple_folding(%arg0: tensor<1x1x1x1xi32>, %arg1: tensor<1x1x1x1xf32>) -> tensor<?x?x?x?xf32> {
+// CHECK: %[[CST:.*]] = "tf.Const"{{.*}} {value = dense<1> : tensor<4xi32>} : () -> tensor<4xi32>
+// CHECK: %[[CONV:.*]] = "tf.Conv2DBackpropInput"(%[[CST]]
+// CHECK-SAME: (tensor<4xi32>, tensor<1x1x1x1xf32>, tensor<1x1x1x1xf32>) -> tensor<1x1x1x1xf32>
+// CHECK: %[[CAST:.*]] = "tf.Cast"(%[[CONV]]) {{.*}} : (tensor<1x1x1x1xf32>) -> tensor<?x?x?x?xf32>
+// CHECK: return %[[CAST]] : tensor<?x?x?x?xf32>
+ %0 = "tf.Shape"(%arg0) : (tensor<1x1x1x1xi32>) -> tensor<4xi32>
+ %1 = "tf.Conv2DBackpropInput"(%0, %arg1, %arg1) {
+ padding = "VALID", strides = [1, 1, 1, 1]
+ } : (tensor<4xi32>, tensor<1x1x1x1xf32>, tensor<1x1x1x1xf32>) -> tensor<?x?x?x?xf32>
+ return %1 : tensor<?x?x?x?xf32>
+ }
+
// Tests the case where an op's shape function returns non-fully-defined shapes.
// CHECK-LABEL: func @op_non_fully_defined_shape_fn
- func @op_non_fully_defined_shape_fn() -> tensor<?xi32> {
- %0 = "tf.Const"() {dtype = "tfdtype$DT_INT32", value = dense<[]> : tensor<0xi32>} : () -> tensor<0xi32>
- %1 = "tf.Const"() {dtype = "tfdtype$DT_INT32", value = dense<[]> : tensor<0xi32>} : () -> tensor<0xi32>
+ func @op_non_fully_defined_shape_fn(%arg0: tensor<0xi32>, %arg1: tensor<0xi32>) -> tensor<?xi32> {
// CHECK: tf.BroadcastGradientArgs
// CHECK-SAME: (tensor<0xi32>, tensor<0xi32>) -> (tensor<?xi32>, tensor<?xi32>)
- %2:2 = "tf.BroadcastGradientArgs"(%0, %1) {T = "tfdtype$DT_INT32", name = "BroadcastGradientArgs"} : (tensor<0xi32>, tensor<0xi32>) -> (tensor<?xi32>, tensor<?xi32>)
+ %2:2 = "tf.BroadcastGradientArgs"(%arg0, %arg1) {T = "tfdtype$DT_INT32", name = "BroadcastGradientArgs"} : (tensor<0xi32>, tensor<0xi32>) -> (tensor<?xi32>, tensor<?xi32>)
return %2#0 : tensor<?xi32>
}
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/tf-functional-to-executor.mlir b/tensorflow/compiler/mlir/tensorflow/tests/tf-functional-to-executor.mlir
index b443b1f..4dad266 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/tf-functional-to-executor.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/tf-functional-to-executor.mlir
@@ -9,7 +9,7 @@
// CHECK-LABEL: func @multiple_return
// CHECK-SAME: (%[[ARG0:.*]]: tensor<*xi32>, %[[ARG1:.*]]: tensor<i32>) -> (tensor<*xi32>, tensor<*xi32>) {
// CHECK: %[[GRAPH_RESULT:.*]]:2 = tf_executor.graph {
-// CHECK: %[[ISLAND_RESULT:.*]]:3 = tf_executor.island {
+// CHECK: %[[ISLAND_RESULT:.*]]:2, {{.*}} = tf_executor.island {
// CHECK: %[[ADD1:.*]] = "tf.Add"(%[[ARG0]], %[[ARG1]]) : (tensor<*xi32>, tensor<i32>) -> tensor<*xi32>
// CHECK: %[[ADD2:.*]] = "tf.Add"(%[[ADD1]], %[[ARG1]]) : (tensor<*xi32>, tensor<i32>) -> tensor<*xi32>
// CHECK: tf_executor.yield %[[ADD1]], %[[ADD2]] : tensor<*xi32>, tensor<*xi32>
@@ -64,7 +64,7 @@
// CHECK-LABEL: func @graph_and_more
// CHECK: %[[RESULT:.*]] = tf_executor.graph {
-// CHECK: %[[ISLAND:.*]]:2 = tf_executor.island {
+// CHECK: %[[ISLAND:.*]], %[[ISLAND_control:.*]] = tf_executor.island {
// CHECK: tf_executor.graph {
// CHECK: %[[ISLAND_INNER:.*]] = tf_executor.island {
// CHECK: tf_executor.yield
@@ -74,6 +74,6 @@
// CHECK: %[[ADD:.*]] = "tf.Add"(%arg0, %arg1) : (tensor<*xi32>, tensor<i32>) -> tensor<*xi32>
// CHECK: tf_executor.yield %[[ADD]] : tensor<*xi32>
// CHECK: }
-// CHECK: tf_executor.fetch %[[ISLAND]]#0 : tensor<*xi32>
+// CHECK: tf_executor.fetch %[[ISLAND]] : tensor<*xi32>
// CHECK: }
// CHECK: return %[[RESULT]] : tensor<*xi32>
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/tf_executor_ops.mlir b/tensorflow/compiler/mlir/tensorflow/tests/tf_executor_ops.mlir
index 3dea491..03184ff 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/tf_executor_ops.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/tf_executor_ops.mlir
@@ -18,7 +18,7 @@
}
-// CHECK-LABEL: func @graph_with_fetch(%arg0: tensor<*xf32>)
+// CHECK-LABEL: func @graph_with_fetch(%{{.*}}: tensor<*xf32>)
func @graph_with_fetch(%0: tensor<*xf32>) -> tensor<*xf32> {
%result = tf_executor.graph {
tf_executor.fetch %0 : tensor<*xf32>
@@ -26,19 +26,19 @@
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @graph_with_fetch_attributes(%arg0: tensor<*xf32>)
+// CHECK-LABEL: func @graph_with_fetch_attributes(%{{.*}}: tensor<*xf32>)
func @graph_with_fetch_attributes(%arg0: tensor<*xf32>) -> tensor<*xf32> {
%result = tf_executor.graph {
tf_executor.fetch %arg0 : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
} {attr2 = 32 : i64, tf_executor.attr1 = "value1"}
// CHECK: tf_executor.graph {
-// CHECK-NEXT: tf_executor.fetch %arg0 : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
+// CHECK-NEXT: tf_executor.fetch %{{.*}} : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
// CHECK-NEXT: } {attr2 = 32 : i64, tf_executor.attr1 = "value1"}
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @simpleIsland(%arg0: tensor<*xf32>)
+// CHECK-LABEL: func @simpleIsland(%{{.*}}: tensor<*xf32>)
func @simpleIsland(%arg0: tensor<*xf32>) -> tensor<*xf32> {
%0 = tf_executor.graph {
%1:2 = tf_executor.island {
@@ -47,20 +47,20 @@
tf_executor.fetch %1#0 : tensor<*xf32>
}
// CHECK: tf_executor.island {
-// CHECK: tf_executor.yield %arg0 : tensor<*xf32>
-// CHECK: tf_executor.fetch %1#0 : tensor<*xf32>
+// CHECK: tf_executor.yield %{{.*}} : tensor<*xf32>
+// CHECK: tf_executor.fetch {{.*}} : tensor<*xf32>
return %0 : tensor<*xf32>
}
-// CHECK-LABEL: func @simpleIsland_with_attributes(%arg0: tensor<*xf32>)
+// CHECK-LABEL: func @simpleIsland_with_attributes(%{{.*}}: tensor<*xf32>)
func @simpleIsland_with_attributes(%arg0: tensor<*xf32>) -> tensor<*xf32> {
%0 = tf_executor.graph {
%1:2 = tf_executor.island {
tf_executor.yield %arg0 : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
} {attr2 = 32 : i64, tf_executor.attr1 = "value1"}
-// CHECK: tf_executor.yield %arg0 : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
+// CHECK: tf_executor.yield %{{.*}} : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
// CHECK-NEXT: } {attr2 = 32 : i64, tf_executor.attr1 = "value1"}
tf_executor.fetch %1#0 : tensor<*xf32>
@@ -68,7 +68,7 @@
return %0 : tensor<*xf32>
}
-// CHECK-LABEL: func @simpleIsland_with_multiple_control_inputs(%arg0: tensor<*xf32>)
+// CHECK-LABEL: func @simpleIsland_with_multiple_control_inputs(%{{.*}}: tensor<*xf32>)
func @simpleIsland_with_multiple_control_inputs(%arg0: tensor<*xf32>) -> tensor<*xf32> {
%0 = tf_executor.graph {
%1 = tf_executor.island {
@@ -82,30 +82,30 @@
}
tf_executor.fetch %3#0 : tensor<*xf32>
}
-// CHECK: %[[ISLAND0:[0-9]*]] = tf_executor.island {
+// CHECK: %[[ISLAND0:[a-z_0-9]*]] = tf_executor.island {
// CHECK-NEXT: tf_executor.yield
-// CHECK: %[[ISLAND1:[0-9]*]] = tf_executor.island {
+// CHECK: %[[ISLAND1:[a-z_0-9]*]] = tf_executor.island {
// CHECK-NEXT: tf_executor.yield
-// CHECK: %[[ISLAND2:[0-9]*]]:2 = tf_executor.island(%[[ISLAND0]], %[[ISLAND1]]) {
-// CHECK: tf_executor.fetch %[[ISLAND2]]#0 : tensor<*xf32>
+// CHECK: %[[ISLAND2:.*]], %[[ISLAND2_control:.*]] = tf_executor.island(%[[ISLAND0]], %[[ISLAND1]]) {
+// CHECK: tf_executor.fetch %[[ISLAND2]] : tensor<*xf32>
return %0 : tensor<*xf32>
}
-// CHECK-LABEL: func @fetchWithControlDep(%arg0: tensor<*xf32>)
+// CHECK-LABEL: func @fetchWithControlDep(%{{.*}}: tensor<*xf32>)
func @fetchWithControlDep(%arg0: tensor<*xf32>) -> tensor<*xf32> {
%result = tf_executor.graph {
%val, %ctl_id = tf_executor.island {
%val = addf %arg0, %arg0 : tensor<*xf32>
tf_executor.yield %arg0 : tensor<*xf32>
}
-// CHECK: tf_executor.fetch %1#0, %1#1 : tensor<*xf32>, !tf_executor.control
+// CHECK: tf_executor.fetch %{{.*}}, %{{.*}} : tensor<*xf32>, !tf_executor.control
tf_executor.fetch %val, %ctl_id : tensor<*xf32>, !tf_executor.control
}
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @testAddWithControlDependency(%arg0: tensor<*xf32>)
+// CHECK-LABEL: func @testAddWithControlDependency(%{{.*}}: tensor<*xf32>)
func @testAddWithControlDependency(%0: tensor<*xf32>) -> tensor<*xf32> {
%result = tf_executor.graph {
// This identity operation is unused, but the control dependency on the
@@ -116,7 +116,7 @@
}
// The control dependency is held by the operand.
-// CHECK: tf_executor.island(%1#1)
+// CHECK: tf_executor.island(%{{.*}})
%add, %clt_add = tf_executor.island(%ctl_id) {
%add_in= "tf.Add"(%0, %0) : (tensor<*xf32>, tensor<*xf32>) -> (tensor<*xf32>)
tf_executor.yield %add_in : tensor<*xf32>
@@ -127,50 +127,50 @@
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @switch(%arg0: tensor<*xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
+// CHECK-LABEL: func @switch(%{{.*}}: tensor<*xf32>, %{{.*}}: tensor<i1>) -> tensor<*xf32> {
func @switch(%arg0: tensor<*xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
%result = tf_executor.graph {
-// CHECK: tf_executor.Switch %arg0, %arg1 : tensor<*xf32>
+// CHECK: tf_executor.Switch %{{.*}}, %{{.*}} : tensor<*xf32>
%true, %false, %ctlSwitch = tf_executor.Switch %arg0, %arg1 : tensor<*xf32>
tf_executor.fetch %true : tensor<*xf32>
}
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @switch_with_broadcast(%arg0: tensor<2xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
+// CHECK-LABEL: func @switch_with_broadcast(%{{.*}}: tensor<2xf32>, %{{.*}}: tensor<i1>) -> tensor<*xf32> {
func @switch_with_broadcast(%arg0: tensor<2xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
%result = tf_executor.graph {
-// CHECK: tf_executor.Switch %arg0, %arg1 : (tensor<2xf32>, tensor<i1>) -> (tensor<*xf32>, tensor<*xf32>, !tf_executor.control)
+// CHECK: tf_executor.Switch %{{.*}}, %{{.*}} : (tensor<2xf32>, tensor<i1>) -> (tensor<*xf32>, tensor<*xf32>, !tf_executor.control)
%true, %false, %ctlSwitch = tf_executor.Switch %arg0, %arg1 : (tensor<2xf32>, tensor<i1>) -> (tensor<*xf32>, tensor<*xf32>, !tf_executor.control)
tf_executor.fetch %true : tensor<*xf32>
}
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @switch_with_broadcast_one_output(%arg0: tensor<2xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
+// CHECK-LABEL: func @switch_with_broadcast_one_output(%{{.*}}: tensor<2xf32>, %{{.*}}: tensor<i1>) -> tensor<*xf32> {
func @switch_with_broadcast_one_output(%arg0: tensor<2xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
%result = tf_executor.graph {
-// CHECK: tf_executor.Switch %arg0, %arg1 : (tensor<2xf32>, tensor<i1>) -> (tensor<*xf32>, tensor<2xf32>, !tf_executor.control)
+// CHECK: tf_executor.Switch %{{.*}}, %{{.*}} : (tensor<2xf32>, tensor<i1>) -> (tensor<*xf32>, tensor<2xf32>, !tf_executor.control)
%true, %false, %ctlSwitch = tf_executor.Switch %arg0, %arg1 : (tensor<2xf32>, tensor<i1>) -> (tensor<*xf32>, tensor<2xf32>, !tf_executor.control)
tf_executor.fetch %true : tensor<*xf32>
}
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @switch_with_broadcast_output_only(%arg0: tensor<*xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
+// CHECK-LABEL: func @switch_with_broadcast_output_only(%{{.*}}: tensor<*xf32>, %{{.*}}: tensor<i1>) -> tensor<*xf32> {
func @switch_with_broadcast_output_only(%arg0: tensor<*xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
%result = tf_executor.graph {
-// CHECK: tf_executor.Switch %arg0, %arg1 : (tensor<*xf32>, tensor<i1>) -> (tensor<*xf32>, tensor<2xf32>, !tf_executor.control)
+// CHECK: tf_executor.Switch %{{.*}}, %{{.*}} : (tensor<*xf32>, tensor<i1>) -> (tensor<*xf32>, tensor<2xf32>, !tf_executor.control)
%true, %false, %ctlSwitch = tf_executor.Switch %arg0, %arg1 : (tensor<*xf32>, tensor<i1>) -> (tensor<*xf32>, tensor<2xf32>, !tf_executor.control)
tf_executor.fetch %true : tensor<*xf32>
}
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @switch_with_attributes(%arg0: tensor<*xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
+// CHECK-LABEL: func @switch_with_attributes(%{{.*}}: tensor<*xf32>, %{{.*}}: tensor<i1>) -> tensor<*xf32> {
func @switch_with_attributes(%arg0: tensor<*xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
%result = tf_executor.graph {
-// CHECK: tf_executor.Switch %arg0, %arg1 : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
+// CHECK: tf_executor.Switch %{{.*}}, %{{.*}} : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
%true, %false, %ctlSwitch = tf_executor.Switch %arg0, %arg1 : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
tf_executor.fetch %true : tensor<*xf32>
}
@@ -181,10 +181,10 @@
func @switchN(%arg0: tensor<i32>, %arg1: tensor<*xf32>) -> tensor<*xf32> {
%fetches = tf_executor.graph {
-// CHECK: %1:6 = tf_executor.SwitchN %arg1, %arg0 of 5 : tensor<*xf32>
+// CHECK: tf_executor.SwitchN %{{.*}}, %{{.*}} of 5 : tensor<*xf32>
%1:6 = tf_executor.SwitchN %arg1, %arg0 of 5 : tensor<*xf32>
-// CHECK: %2:13 = tf_executor.SwitchN %arg1, %arg0 of 12 (%1#5) : tensor<*xf32>
+// CHECK: tf_executor.SwitchN %{{.*}}, %{{.*}} of 12 (%{{.*}}) : tensor<*xf32>
%2:13 = tf_executor.SwitchN %arg1, %arg0 of 12 (%1#5) : tensor<*xf32>
tf_executor.fetch %2#0 : tensor<*xf32>
@@ -192,13 +192,13 @@
return %fetches : tensor<*xf32>
}
-// CHECK-LABEL: func @switch_merge(%arg0: tensor<*xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
+// CHECK-LABEL: func @switch_merge(%{{.*}}: tensor<*xf32>, %{{.*}}: tensor<i1>) -> tensor<*xf32> {
func @switch_merge(%arg0: tensor<*xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
%result = tf_executor.graph {
-// CHECK: tf_executor.Switch %arg0, %arg1 : tensor<*xf32>
+// CHECK: tf_executor.Switch %{{.*}}, %{{.*}} : tensor<*xf32>
%true, %false, %ctlSwitch = tf_executor.Switch %arg0, %arg1 : tensor<*xf32>
-// CHECK: tf_executor.Merge %1#0, %1#1 : tensor<*xf32>
+// CHECK: tf_executor.Merge %{{.*}}, %{{.*}} : tensor<*xf32>
%value, %idx, %ctlMerge = tf_executor.Merge %true, %false : tensor<*xf32>
tf_executor.fetch %value : tensor<*xf32>
@@ -206,13 +206,13 @@
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @switch_merge_with_ctl(%arg0: tensor<*xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
+// CHECK-LABEL: func @switch_merge_with_ctl(%{{.*}}: tensor<*xf32>, %{{.*}}: tensor<i1>) -> tensor<*xf32> {
func @switch_merge_with_ctl(%arg0: tensor<*xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
%result = tf_executor.graph {
-// CHECK: tf_executor.Switch %arg0, %arg1 : tensor<*xf32>
+// CHECK: tf_executor.Switch %{{.*}}, %{{.*}} : tensor<*xf32>
%true, %false, %ctlSwitch = tf_executor.Switch %arg0, %arg1 : tensor<*xf32>
-// CHECK: tf_executor.Merge %1#0, %1#1, %1#2, %1#2, %1#2 : tensor<*xf32>
+// CHECK: tf_executor.Merge %{{.*}}, %{{.*}}, %{{.*}}, %{{.*}}, %{{.*}} : tensor<*xf32>
%value, %idx, %ctlMerge = tf_executor.Merge %true, %false, %ctlSwitch, %ctlSwitch, %ctlSwitch : tensor<*xf32>
tf_executor.fetch %value : tensor<*xf32>
@@ -220,13 +220,13 @@
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @switch_merge_with_attributes(%arg0: tensor<*xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
+// CHECK-LABEL: func @switch_merge_with_attributes(%{{.*}}: tensor<*xf32>, %{{.*}}: tensor<i1>) -> tensor<*xf32> {
func @switch_merge_with_attributes(%arg0: tensor<*xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
%result = tf_executor.graph {
-// CHECK: tf_executor.Switch %arg0, %arg1 : tensor<*xf32>
+// CHECK: tf_executor.Switch %{{.*}}, %{{.*}} : tensor<*xf32>
%true, %false, %ctlSwitch = tf_executor.Switch %arg0, %arg1 : tensor<*xf32>
-// CHECK: tf_executor.Merge %1#0, %1#1 : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
+// CHECK: tf_executor.Merge %{{.*}}, %{{.*}} : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
%value, %idx, %ctlMerge = tf_executor.Merge %true, %false : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
tf_executor.fetch %value : tensor<*xf32>
@@ -315,136 +315,136 @@
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @enter(%arg0: tensor<*xf32>, %arg1: i1) -> tensor<*xf32> {
+// CHECK-LABEL: func @enter(%{{.*}}: tensor<*xf32>, %{{.*}}: i1) -> tensor<*xf32> {
func @enter(%arg0: tensor<*xf32>, %arg1: i1) -> tensor<*xf32> {
%result = tf_executor.graph {
-// CHECK: tf_executor.Enter %arg0 frame "some/fra\22me" : tensor<*xf32>
+// CHECK: tf_executor.Enter %{{.*}} frame "some/fra\22me" : tensor<*xf32>
%res:2 = tf_executor.Enter %arg0 frame "some/fra\"me" : tensor<*xf32>
tf_executor.fetch %res#0 : tensor<*xf32>
}
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @enter_broadcast(%arg0: tensor<8xf32>, %arg1: i1) -> tensor<*xf32> {
+// CHECK-LABEL: func @enter_broadcast(%{{.*}}: tensor<8xf32>, %{{.*}}: i1) -> tensor<*xf32> {
func @enter_broadcast(%arg0: tensor<8xf32>, %arg1: i1) -> tensor<*xf32> {
%result = tf_executor.graph {
-// CHECK: tf_executor.Enter %arg0 frame "some/fra\22me" : (tensor<8xf32>) -> (tensor<*xf32>, !tf_executor.control)
+// CHECK: tf_executor.Enter %{{.*}} frame "some/fra\22me" : (tensor<8xf32>) -> (tensor<*xf32>, !tf_executor.control)
%res:2 = tf_executor.Enter %arg0 frame "some/fra\"me" : (tensor<8xf32>) -> (tensor<*xf32>, !tf_executor.control)
tf_executor.fetch %res#0 : tensor<*xf32>
}
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @enter_parallel_iterations(%arg0: tensor<*xf32>, %arg1: i1) -> tensor<*xf32> {
+// CHECK-LABEL: func @enter_parallel_iterations(%{{.*}}: tensor<*xf32>, %{{.*}}: i1) -> tensor<*xf32> {
func @enter_parallel_iterations(%arg0: tensor<*xf32>, %arg1: i1) -> tensor<*xf32> {
%result = tf_executor.graph {
-// CHECK: tf_executor.Enter %arg0 frame "some/frame" parallel_iterations 42 : tensor<*xf32>
+// CHECK: tf_executor.Enter %{{.*}} frame "some/frame" parallel_iterations 42 : tensor<*xf32>
%res:2 = tf_executor.Enter %arg0 frame "some/frame" parallel_iterations 42 : tensor<*xf32>
tf_executor.fetch %res#0 : tensor<*xf32>
}
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @enter_constant(%arg0: tensor<*xf32>, %arg1: i1) -> tensor<*xf32> {
+// CHECK-LABEL: func @enter_constant(%{{.*}}: tensor<*xf32>, %{{.*}}: i1) -> tensor<*xf32> {
func @enter_constant(%arg0: tensor<*xf32>, %arg1: i1) -> tensor<*xf32> {
%result = tf_executor.graph {
-// CHECK: tf_executor.Enter %arg0 frame "some/frame" constant : tensor<*xf32>
+// CHECK: tf_executor.Enter %{{.*}} frame "some/frame" constant : tensor<*xf32>
%res:2 = tf_executor.Enter %arg0 frame "some/frame" constant : tensor<*xf32>
tf_executor.fetch %res#0 : tensor<*xf32>
}
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @enter_parallel_iterations_constant(%arg0: tensor<*xf32>, %arg1: i1) -> tensor<*xf32> {
+// CHECK-LABEL: func @enter_parallel_iterations_constant(%{{.*}}: tensor<*xf32>, %{{.*}}: i1) -> tensor<*xf32> {
func @enter_parallel_iterations_constant(%arg0: tensor<*xf32>, %arg1: i1) -> tensor<*xf32> {
%result = tf_executor.graph {
-// CHECK: tf_executor.Enter %arg0 frame "some/frame" parallel_iterations 42 constant : tensor<*xf32>
+// CHECK: tf_executor.Enter %{{.*}} frame "some/frame" parallel_iterations 42 constant : tensor<*xf32>
%res:2 = tf_executor.Enter %arg0 frame "some/frame" parallel_iterations 42 constant : tensor<*xf32>
tf_executor.fetch %res#0 : tensor<*xf32>
}
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @enter_with_attributes(%arg0: tensor<*xf32>, %arg1: i1) -> tensor<*xf32> {
+// CHECK-LABEL: func @enter_with_attributes(%{{.*}}: tensor<*xf32>, %{{.*}}: i1) -> tensor<*xf32> {
func @enter_with_attributes(%arg0: tensor<*xf32>, %arg1: i1) -> tensor<*xf32> {
%result = tf_executor.graph {
-// CHECK: tf_executor.Enter %arg0 frame "some/frame" : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
+// CHECK: tf_executor.Enter %{{.*}} frame "some/frame" : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
%res:2 = tf_executor.Enter %arg0 frame "some/frame" : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
tf_executor.fetch %res#0 : tensor<*xf32>
}
return %result : tensor<*xf32>
}
-// CHECK-LABEL: func @enter_control(%arg0: tensor<*xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
+// CHECK-LABEL: func @enter_control(%{{.*}}: tensor<*xf32>, %{{.*}}: tensor<i1>) -> tensor<*xf32> {
func @enter_control(%arg0: tensor<*xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
%0 = tf_executor.graph {
%1:3 = tf_executor.Switch %arg0, %arg1 : tensor<*xf32>
-// CHECK: tf_executor.Enter %arg0, %1#2, %1#2 frame "some/frame" : tensor<*xf32>
+// CHECK: tf_executor.Enter %{{.*}}, %{{.*}}, %{{.*}} frame "some/frame" : tensor<*xf32>
%res:2 = tf_executor.Enter %arg0, %1#2, %1#2 frame "some/frame" : tensor<*xf32>
tf_executor.fetch %res#0 : tensor<*xf32>
}
return %0 : tensor<*xf32>
}
-// CHECK-LABEL: func @nextiteration(%arg0: tensor<*xf32>, %arg1: i1) -> tensor<*xf32> {
+// CHECK-LABEL: func @nextiteration(%{{.*}}: tensor<*xf32>, %{{.*}}: i1) -> tensor<*xf32> {
func @nextiteration(%arg0: tensor<*xf32>, %arg1: i1) -> tensor<*xf32> {
%0 = tf_executor.graph {
%1:3 = tf_executor.NextIteration.Source : tensor<*xf32>
tf_executor.NextIteration.Sink[%1#1] %1#0 : tensor<*xf32>
-// CHECK: %1:3 = tf_executor.NextIteration.Source : tensor<*xf32>
-// CHECK: tf_executor.NextIteration.Sink [%1#1] %1#0 : tensor<*xf32>
+// CHECK: tf_executor.NextIteration.Source : tensor<*xf32>
+// CHECK: tf_executor.NextIteration.Sink [%{{.*}}] %{{.*}} : tensor<*xf32>
tf_executor.fetch %1#0 : tensor<*xf32>
}
return %0 : tensor<*xf32>
}
-// CHECK-LABEL: func @nextiteration_with_attributes(%arg0: tensor<*xf32>, %arg1: i1) -> tensor<*xf32> {
+// CHECK-LABEL: func @nextiteration_with_attributes(%{{.*}}: tensor<*xf32>, %{{.*}}: i1) -> tensor<*xf32> {
func @nextiteration_with_attributes(%arg0: tensor<*xf32>, %arg1: i1) -> tensor<*xf32> {
%0 = tf_executor.graph {
%1:3 = tf_executor.NextIteration.Source : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
tf_executor.NextIteration.Sink[%1#1] %1#0 : tensor<*xf32> {attr4 = 42 : i64, tf_executor.attr_push = "other_value"}
-// CHECK: %1:3 = tf_executor.NextIteration.Source : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
-// CHECK: tf_executor.NextIteration.Sink [%1#1] %1#0 : tensor<*xf32> {attr4 = 42 : i64, tf_executor.attr_push = "other_value"}
+// CHECK: tf_executor.NextIteration.Source : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
+// CHECK: tf_executor.NextIteration.Sink [%{{.*}}] %{{.*}} : tensor<*xf32> {attr4 = 42 : i64, tf_executor.attr_push = "other_value"}
tf_executor.fetch %1#0 : tensor<*xf32>
}
return %0 : tensor<*xf32>
}
-// CHECK-LABEL: func @nextiteration_control(%arg0: tensor<*xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
+// CHECK-LABEL: func @nextiteration_control(%{{.*}}: tensor<*xf32>, %{{.*}}: tensor<i1>) -> tensor<*xf32> {
func @nextiteration_control(%arg0: tensor<*xf32>, %arg1: tensor<i1>) -> tensor<*xf32> {
%0 = tf_executor.graph {
%1:3 = tf_executor.Switch %arg0, %arg1 : tensor<*xf32>
%2:2 = tf_executor.Enter %arg0, %1#2, %1#2 frame "some/frame" : tensor<*xf32>
%3:3 = tf_executor.NextIteration.Source : tensor<*xf32>
tf_executor.NextIteration.Sink [%3#1] %3#0, %1#2 : tensor<*xf32>
-// CHECK: %3:3 = tf_executor.NextIteration.Source : tensor<*xf32>
-// CHECK: tf_executor.NextIteration.Sink [%3#1] %3#0, %1#2 : tensor<*xf32>
+// CHECK: tf_executor.NextIteration.Source : tensor<*xf32>
+// CHECK: tf_executor.NextIteration.Sink [%{{.*}}] %{{.*}}, %{{.*}} : tensor<*xf32>
tf_executor.fetch %3#0 : tensor<*xf32>
}
return %0 : tensor<*xf32>
}
-// CHECK-LABEL: func @exit(%arg0: tensor<*xf32>) -> tensor<*xf32> {
+// CHECK-LABEL: func @exit(%{{.*}}: tensor<*xf32>) -> tensor<*xf32> {
func @exit(%arg0: tensor<*xf32>) -> tensor<*xf32> {
%0 = tf_executor.graph {
-// CHECK: %1:2 = tf_executor.Exit %arg0 : tensor<*xf32>
+// CHECK: tf_executor.Exit %{{.*}} : tensor<*xf32>
%1:2 = tf_executor.Exit %arg0 : tensor<*xf32>
tf_executor.fetch %1#0 : tensor<*xf32>
}
return %0 : tensor<*xf32>
}
-// CHECK-LABEL: func @exit_with_attributes(%arg0: tensor<*xf32>) -> tensor<*xf32> {
+// CHECK-LABEL: func @exit_with_attributes(%{{.*}}: tensor<*xf32>) -> tensor<*xf32> {
func @exit_with_attributes(%arg0: tensor<*xf32>) -> tensor<*xf32> {
%0 = tf_executor.graph {
-// CHECK: %1:2 = tf_executor.Exit %arg0 : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
+// CHECK: tf_executor.Exit %{{.*}} : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
%1:2 = tf_executor.Exit %arg0 : tensor<*xf32> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
tf_executor.fetch %1#0 : tensor<*xf32>
}
return %0 : tensor<*xf32>
}
-// CHECK-LABEL: func @exit_with_control(%arg0: tensor<*xf32>, %arg1: !tf_executor.control) -> tensor<*xf32> {
+// CHECK-LABEL: func @exit_with_control(%{{.*}}: tensor<*xf32>, %{{.*}}: !tf_executor.control) -> tensor<*xf32> {
func @exit_with_control(%arg0: tensor<*xf32>, %arg1: !tf_executor.control) -> tensor<*xf32> {
%0 = tf_executor.graph {
%1:2 = tf_executor.Exit %arg0, %arg1 : tensor<*xf32>
@@ -454,58 +454,58 @@
return %0 : tensor<*xf32>
}
-// CHECK-LABEL: func @control_trigger(%arg0: !tf_executor.control, %arg1: !tf_executor.control) {
+// CHECK-LABEL: func @control_trigger(%{{.*}}: !tf_executor.control, %{{.*}}: !tf_executor.control) {
func @control_trigger(%arg0: !tf_executor.control, %arg1: !tf_executor.control) {
tf_executor.graph {
-// CHECK: tf_executor.ControlTrigger %arg0, %arg1
+// CHECK: tf_executor.ControlTrigger %{{.*}}, %{{.*}}
%0 = tf_executor.ControlTrigger %arg0, %arg1
}
return
}
-// CHECK-LABEL: func @control_trigger_with_attributes(%arg0: !tf_executor.control, %arg1: !tf_executor.control) {
+// CHECK-LABEL: func @control_trigger_with_attributes(%{{.*}}: !tf_executor.control, %{{.*}}: !tf_executor.control) {
func @control_trigger_with_attributes(%arg0: !tf_executor.control, %arg1: !tf_executor.control) {
tf_executor.graph {
-// CHECK: tf_executor.ControlTrigger %arg0, %arg1 {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
+// CHECK: tf_executor.ControlTrigger %{{.*}}, %{{.*}} {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
%0 = tf_executor.ControlTrigger %arg0, %arg1 {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
}
return
}
-// CHECK-LABEL: func @loop_cond(%arg0: tensor<i1>, %arg1: !tf_executor.control) -> tensor<i1> {
+// CHECK-LABEL: func @loop_cond(%{{.*}}: tensor<i1>, %{{.*}}: !tf_executor.control) -> tensor<i1> {
func @loop_cond(%arg0: tensor<i1>, %arg1: !tf_executor.control) -> tensor<i1> {
%0 = tf_executor.graph {
-// CHECK: tf_executor.LoopCond %arg0 : tensor<i1>
+// CHECK: tf_executor.LoopCond %{{.*}} : tensor<i1>
%1:2 = tf_executor.LoopCond %arg0 : tensor<i1>
tf_executor.fetch %1#0 : tensor<i1>
}
return %0 : tensor<i1>
}
-// CHECK-LABEL: func @loop_cond_with_attributes(%arg0: tensor<i1>, %arg1: !tf_executor.control) -> tensor<i1> {
+// CHECK-LABEL: func @loop_cond_with_attributes(%{{.*}}: tensor<i1>, %{{.*}}: !tf_executor.control) -> tensor<i1> {
func @loop_cond_with_attributes(%arg0: tensor<i1>, %arg1: !tf_executor.control) -> tensor<i1> {
%0 = tf_executor.graph {
-// CHECK: tf_executor.LoopCond %arg0 : tensor<i1> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
+// CHECK: tf_executor.LoopCond %{{.*}} : tensor<i1> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
%1:2 = tf_executor.LoopCond %arg0 : tensor<i1> {attr3 = 32 : i64, tf_executor.attr_fetch = "some_value"}
tf_executor.fetch %1#0 : tensor<i1>
}
return %0 : tensor<i1>
}
-// CHECK-LABEL: func @loop_cond_with_control(%arg0: tensor<i1>, %arg1: !tf_executor.control) -> tensor<i1> {
+// CHECK-LABEL: func @loop_cond_with_control(%{{.*}}: tensor<i1>, %{{.*}}: !tf_executor.control) -> tensor<i1> {
func @loop_cond_with_control(%arg0: tensor<i1>, %arg1: !tf_executor.control) -> tensor<i1> {
%0 = tf_executor.graph {
-// CHECK: tf_executor.LoopCond %arg0, %arg1 : tensor<i1>
+// CHECK: tf_executor.LoopCond %{{.*}}, %{{.*}} : tensor<i1>
%1:2 = tf_executor.LoopCond %arg0, %arg1 : tensor<i1>
tf_executor.fetch %1#0 : tensor<i1>
}
return %0 : tensor<i1>
}
-// CHECK-LABEL: func @loop_cond_with_control_broadcast(%arg0: tensor<i1>, %arg1: !tf_executor.control) -> tensor<*xi1> {
+// CHECK-LABEL: func @loop_cond_with_control_broadcast(%{{.*}}: tensor<i1>, %{{.*}}: !tf_executor.control) -> tensor<*xi1> {
func @loop_cond_with_control_broadcast(%arg0: tensor<i1>, %arg1: !tf_executor.control) -> tensor<*xi1> {
%0 = tf_executor.graph {
-// CHECK: tf_executor.LoopCond %arg0, %arg1 : (tensor<i1>, !tf_executor.control) -> (tensor<*xi1>, !tf_executor.control)
+// CHECK: tf_executor.LoopCond %{{.*}}, %{{.*}} : (tensor<i1>, !tf_executor.control) -> (tensor<*xi1>, !tf_executor.control)
%1:2 = tf_executor.LoopCond %arg0, %arg1 : (tensor<i1>, !tf_executor.control) -> (tensor<*xi1>, !tf_executor.control)
tf_executor.fetch %1#0 : tensor<*xi1>
}
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/tf_executor_ops_invalid.mlir b/tensorflow/compiler/mlir/tensorflow/tests/tf_executor_ops_invalid.mlir
index 1570c0c..022d181 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/tf_executor_ops_invalid.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/tf_executor_ops_invalid.mlir
@@ -344,18 +344,6 @@
// -----
-// Check that the predicate must be a tensor.
-func @invalidswitch(%arg0: tensor<*xf32>, %arg1: i1) -> tensor<*xf32> {
- %result = tf_executor.graph {
- %true, %false, %ctlSwitch = "tf_executor.Switch"(%arg0, %arg1) : (tensor<*xf32>, i1) -> (tensor<*xf32>, tensor<*xf32>, !tf_executor.control)
-// expected-error@-1 {{'tf_executor.Switch' op operand #1 must be tensor of 1-bit integer values}}
- tf_executor.fetch %true : tensor<*xf32>
- }
- return %result : tensor<*xf32>
-}
-
-// -----
-
// Check that a switch second argument must be a valid predicate (i1).
func @invalid_switch(%arg0: tensor<*xf32>, %arg1: i1) -> tensor<*xf32> {
%result = tf_executor.graph {
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/tf_executor_ops_location_roundtrip.mlir b/tensorflow/compiler/mlir/tensorflow/tests/tf_executor_ops_location_roundtrip.mlir
index 4fea155..82e4205 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/tf_executor_ops_location_roundtrip.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/tf_executor_ops_location_roundtrip.mlir
@@ -17,15 +17,15 @@
// When parsing it back, we should recover all 3 locations (the
// tf_executor.island, tf.Identity, and tf_executor.yield).
-// CHECK-LABEL: func @island_one_op_all_locs_same(%arg0: tensor<f32>) -> tensor<f32> {
-// CHECK-NEXT: %0 = "tf_executor.graph"() ( {
-// CHECK-NEXT: %1:2 = "tf_executor.island"() ( {
-// CHECK-NEXT: %2 = "tf.Identity"(%arg0) : (tensor<f32>) -> tensor<f32> loc("identity@some_function")
-// CHECK-NEXT: "tf_executor.yield"(%2) : (tensor<f32>) -> () loc("identity@some_function")
+// CHECK-LABEL: func @island_one_op_all_locs_same(%{{.*}}: tensor<f32>) -> tensor<f32> {
+// CHECK-NEXT: "tf_executor.graph"() ( {
+// CHECK-NEXT: "tf_executor.island"() ( {
+// CHECK-NEXT: "tf.Identity"(%{{.*}}) : (tensor<f32>) -> tensor<f32> loc("identity@some_function")
+// CHECK-NEXT: "tf_executor.yield"(%{{.*}}) : (tensor<f32>) -> () loc("identity@some_function")
// CHECK-NEXT: }) : () -> (tensor<f32>, !tf_executor.control) loc("identity@some_function")
-// CHECK-NEXT: "tf_executor.fetch"(%1#0) : (tensor<f32>) -> () loc(unknown)
+// CHECK-NEXT: "tf_executor.fetch"(%{{.*}}) : (tensor<f32>) -> () loc(unknown)
// CHECK-NEXT: }) : () -> tensor<f32> loc(unknown)
-// CHECK-NEXT: "std.return"(%0) : (tensor<f32>) -> () loc(unknown)
+// CHECK-NEXT: "std.return"(%{{.*}}) : (tensor<f32>) -> () loc(unknown)
// CHECK-NEXT: } loc(unknown)
func @island_one_op_all_locs_same(%arg0: tensor<f32>) -> tensor<f32> {
@@ -44,15 +44,15 @@
// it is incorrect to use that syntax if the island, wrapped op, and yield
// don't have identical locations.
-// CHECK-LABEL: func @island_one_op_all_locs_NOT_same(%arg0: tensor<f32>) -> tensor<f32> {
-// CHECK-NEXT: %0 = "tf_executor.graph"() ( {
-// CHECK-NEXT: %1:2 = "tf_executor.island"() ( {
-// CHECK-NEXT: %2 = "tf.Identity"(%arg0) : (tensor<f32>) -> tensor<f32> loc("identity@some_function")
-// CHECK-NEXT: "tf_executor.yield"(%2) : (tensor<f32>) -> () loc("identity@some_function")
+// CHECK-LABEL: func @island_one_op_all_locs_NOT_same(%{{.*}}: tensor<f32>) -> tensor<f32> {
+// CHECK-NEXT: "tf_executor.graph"() ( {
+// CHECK-NEXT: "tf_executor.island"() ( {
+// CHECK-NEXT: "tf.Identity"(%{{.*}}) : (tensor<f32>) -> tensor<f32> loc("identity@some_function")
+// CHECK-NEXT: "tf_executor.yield"(%{{.*}}) : (tensor<f32>) -> () loc("identity@some_function")
// CHECK-NEXT: }) : () -> (tensor<f32>, !tf_executor.control) loc("NOT_identity@some_function")
-// CHECK-NEXT: "tf_executor.fetch"(%1#0) : (tensor<f32>) -> () loc(unknown)
+// CHECK-NEXT: "tf_executor.fetch"(%{{.*}}) : (tensor<f32>) -> () loc(unknown)
// CHECK-NEXT: }) : () -> tensor<f32> loc(unknown)
-// CHECK-NEXT: "std.return"(%0) : (tensor<f32>) -> () loc(unknown)
+// CHECK-NEXT: "std.return"(%{{.*}}) : (tensor<f32>) -> () loc(unknown)
// CHECK-NEXT: } loc(unknown)
func @island_one_op_all_locs_NOT_same(%arg0: tensor<f32>) -> tensor<f32> {
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/basic.py b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/basic.py
index fce0981..0465f9d 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/basic.py
+++ b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/basic.py
@@ -38,7 +38,7 @@
# CHECK-SAME: %arg1: tensor<*x!tf.resource> {tf_saved_model.bound_input = @[[VAR]]},
# CHECK-SAME: %arg2: tensor<f32> {tf_saved_model.bound_input = @[[CONST]]}) -> (
# CHECK-SAME: tensor<f32> {tf_saved_model.index_path = []})
- # CHECK-NEXT: attributes {{.*}} tf_saved_model.exported_names = ["some_function"]
+ # CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["some_function"]
@tf.function(input_signature=[tf.TensorSpec([], tf.float32)])
def some_function(self, x):
return x + self.v42 + self.c43
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/call_to_exported.py b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/call_to_exported.py
index 609dc89..8e9e197 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/call_to_exported.py
+++ b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/call_to_exported.py
@@ -50,7 +50,7 @@
# CHECK-SAME: ) -> (
# CHECK-SAME: tensor<f32> {tf_saved_model.index_path = [0]},
# CHECK-SAME: tensor<f32> {tf_saved_model.index_path = [1]})
- # CHECK-NEXT: attributes{{.*}}tf_saved_model.exported_names = ["callee"]
+ # CHECK-SAME: attributes{{.*}}tf_saved_model.exported_names = ["callee"]
# CHECK: "tf.StatefulPartitionedCall"{{.*}}f = @[[CALLEE_INTERNAL:[a-zA-Z_0-9]+]]
#
# CHECK: func {{@[a-zA-Z_0-9]+}}(
@@ -59,7 +59,7 @@
# CHECK-SAME: ) -> (
# CHECK-SAME: tensor<f32> {tf_saved_model.index_path = [0]},
# CHECK-SAME: tensor<*xf32> {tf_saved_model.index_path = [1]})
- # CHECK-NEXT: attributes{{.*}}tf_saved_model.exported_names = ["caller"]
+ # CHECK-SAME: attributes{{.*}}tf_saved_model.exported_names = ["caller"]
# CHECK: "tf.StatefulPartitionedCall"{{.*}}f = @[[CALLEE_INTERNAL]]
#
# CHECK: func @[[CALLEE_INTERNAL]]
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/keras.py b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/keras.py
index 2e724c3..a95909b 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/keras.py
+++ b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/keras.py
@@ -40,7 +40,7 @@
self.model = mnist_model()
# CHECK: func {{@[a-zA-Z_0-9]+}}(%arg0: tensor<1x28x28x1xf32> {tf_saved_model.index_path = [0]}
- # CHECK-NEXT: attributes {{.*}} tf_saved_model.exported_names = ["my_predict"]
+ # CHECK: attributes {{.*}} tf_saved_model.exported_names = ["my_predict"]
@tf.function(input_signature=[
tf.TensorSpec([1, 28, 28, 1], tf.float32),
])
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/shapes_for_arguments.py b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/shapes_for_arguments.py
index 8cdedad..2a72c9b 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/shapes_for_arguments.py
+++ b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/shapes_for_arguments.py
@@ -36,7 +36,7 @@
# sense and will be superceded by MLIR->MLIR shape inference tests.
#
# CHECK: func {{@[a-zA-Z_0-9]+}}(%arg0: tensor<f32> {{.*}}) -> (tensor<f32> {{.*}})
- # CHECK-NEXT: attributes {{.*}} tf_saved_model.exported_names = ["some_function"]
+ # CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["some_function"]
@tf.function(input_signature=[tf.TensorSpec([], tf.float32)])
def some_function(self, x):
return x
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/shapes_for_variables.py b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/shapes_for_variables.py
index 7e804e7..3729043 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/shapes_for_variables.py
+++ b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/shapes_for_variables.py
@@ -36,7 +36,7 @@
# will be superceded by MLIR->MLIR shape inference tests.
#
# CHECK: func {{@[a-zA-Z_0-9]+}}({{.*}}) -> (tensor<f32> {{.*}})
- # CHECK-NEXT: tf_saved_model.exported_names = ["some_function"]
+ # CHECK: tf_saved_model.exported_names = ["some_function"]
def __init__(self):
super(TestModule, self).__init__()
self.my_variable = tf.Variable(42.)
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/structured_input.py b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/structured_input.py
index cb44ecf..095fddb 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/structured_input.py
+++ b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/structured_input.py
@@ -38,7 +38,7 @@
# CHECK: func {{@[a-zA-Z_0-9]+}}(
# CHECK-SAME: %arg0: tensor<1xf32> {tf_saved_model.index_path = [0]},
# CHECK-SAME: %arg1: tensor<2xf32> {tf_saved_model.index_path = [1]})
- # CHECK-NEXT: attributes {{.*}} tf_saved_model.exported_names = ["f0000_function_arity"]
+ # CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0000_function_arity"]
@tf.function(input_signature=[
tf.TensorSpec([1], tf.float32),
tf.TensorSpec([2], tf.float32)
@@ -51,7 +51,7 @@
# CHECK: func {{@[a-zA-Z_0-9]+}}(
# CHECK-SAME: %arg0: tensor<f32> {tf_saved_model.index_path = [0, 0]},
# CHECK-SAME: %arg1: tensor<f32> {tf_saved_model.index_path = [0, 1]})
- # CHECK-NEXT: attributes {{.*}} tf_saved_model.exported_names = ["f0001_list_2_elements"]
+ # CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0001_list_2_elements"]
@tf.function(input_signature=[[
tf.TensorSpec([], tf.float32),
tf.TensorSpec([], tf.float32),
@@ -65,7 +65,7 @@
# CHECK: func {{@[a-zA-Z_0-9]+}}(
# CHECK-SAME: %arg0: tensor<1xf32> {tf_saved_model.index_path = [0, "x"]},
# CHECK-SAME: %arg1: tensor<2xf32> {tf_saved_model.index_path = [0, "y"]})
- # CHECK-NEXT: attributes {{.*}} tf_saved_model.exported_names = ["f0002_dict_2_keys"]
+ # CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0002_dict_2_keys"]
@tf.function(input_signature=[{
'x': tf.TensorSpec([1], tf.float32),
'y': tf.TensorSpec([2], tf.float32),
@@ -79,7 +79,7 @@
# CHECK: func {{@[a-zA-Z_0-9]+}}(
# CHECK-SAME: %arg0: tensor<1xf32> {tf_saved_model.index_path = [0, "x"]},
# CHECK-SAME: %arg1: tensor<2xf32> {tf_saved_model.index_path = [0, "y"]})
- # CHECK-NEXT: attributes {{.*}} tf_saved_model.exported_names = ["f0003_dict_2_keys_out_of_order"]
+ # CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0003_dict_2_keys_out_of_order"]
@tf.function(input_signature=[{
'y': tf.TensorSpec([2], tf.float32),
'x': tf.TensorSpec([1], tf.float32),
@@ -96,7 +96,7 @@
# CHECK-SAME: %arg3: tensor<4xf32> {tf_saved_model.index_path = [0, "x"]},
# CHECK-SAME: %arg4: tensor<5xf32> {tf_saved_model.index_path = [0, "y"]},
# CHECK-SAME: %arg5: tensor<6xf32> {tf_saved_model.index_path = [0, "z"]})
- # CHECK-NEXT: attributes {{.*}} tf_saved_model.exported_names = ["f0004_dict_many_keys"]
+ # CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0004_dict_many_keys"]
@tf.function(input_signature=[{
'x': tf.TensorSpec([4], tf.float32),
'y': tf.TensorSpec([5], tf.float32),
@@ -115,7 +115,7 @@
# CHECK-SAME: %arg0: tensor<1xf32> {tf_saved_model.index_path = [0, "x", 0]},
# CHECK-SAME: %arg1: tensor<2xf32> {tf_saved_model.index_path = [0, "x", 1]},
# CHECK-SAME: %arg2: tensor<3xf32> {tf_saved_model.index_path = [0, "y"]})
- # CHECK-NEXT: attributes {{.*}} tf_saved_model.exported_names = ["f0005_more_complex_recursive_structure"]
+ # CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0005_more_complex_recursive_structure"]
@tf.function(input_signature=[{
'x': [tf.TensorSpec([1], tf.float32),
tf.TensorSpec([2], tf.float32)],
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/structured_output.py b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/structured_output.py
index e73c805..b476df0 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/structured_output.py
+++ b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/structured_output.py
@@ -36,7 +36,7 @@
#
# CHECK: func {{@[a-zA-Z_0-9]+}}() -> (
# CHECK-SAME: tensor<1xf32> {tf_saved_model.index_path = []})
- # CHECK-NEXT: attributes {{.*}} tf_saved_model.exported_names = ["f0000_single_return"]
+ # CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0000_single_return"]
@tf.function(input_signature=[])
def f0000_single_return(self):
return tf.constant(1.0, shape=[1])
@@ -48,7 +48,7 @@
# CHECK: func {{@[a-zA-Z_0-9]+}}() -> (
# CHECK-SAME: tensor<1xf32> {tf_saved_model.index_path = [0]},
# CHECK-SAME: tensor<2xf32> {tf_saved_model.index_path = [1]})
- # CHECK-NEXT: attributes {{.*}} tf_saved_model.exported_names = ["f0001_multiple_results_no_punctuation"]
+ # CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0001_multiple_results_no_punctuation"]
@tf.function(input_signature=[])
def f0001_multiple_results_no_punctuation(self):
return tf.constant(1.0, shape=[1]), tf.constant(1.0, shape=[2])
@@ -61,7 +61,7 @@
# CHECK: func {{@[a-zA-Z_0-9]+}}() -> (
# CHECK-SAME: tensor<1xf32> {tf_saved_model.index_path = [0]},
# CHECK-SAME: tensor<2xf32> {tf_saved_model.index_path = [1]})
- # CHECK-NEXT: attributes {{.*}} tf_saved_model.exported_names = ["f0002_multiple_results_parentheses"]
+ # CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0002_multiple_results_parentheses"]
@tf.function(input_signature=[])
def f0002_multiple_results_parentheses(self):
return (tf.constant(1.0, shape=[1]), tf.constant(1.0, shape=[2]))
@@ -74,7 +74,7 @@
# CHECK: func {{@[a-zA-Z_0-9]+}}() -> (
# CHECK-SAME: tensor<1xf32> {tf_saved_model.index_path = [0]},
# CHECK-SAME: tensor<2xf32> {tf_saved_model.index_path = [1]})
- # CHECK-NEXT: attributes {{.*}} tf_saved_model.exported_names = ["f0003_multiple_results_brackets"]
+ # CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0003_multiple_results_brackets"]
@tf.function(input_signature=[])
def f0003_multiple_results_brackets(self):
return [tf.constant(1.0, shape=[1]), tf.constant(1.0, shape=[2])]
@@ -84,7 +84,7 @@
# CHECK: func {{@[a-zA-Z_0-9]+}}() -> (
# CHECK-SAME: tensor<1xf32> {tf_saved_model.index_path = [0, 0]},
# CHECK-SAME: tensor<2xf32> {tf_saved_model.index_path = [0, 1]})
- # CHECK-NEXT: attributes {{.*}} tf_saved_model.exported_names = ["f0004_list_2_elements"]
+ # CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0004_list_2_elements"]
@tf.function(input_signature=[])
def f0004_list_2_elements(self):
return [[tf.constant(1.0, shape=[1]), tf.constant(1.0, shape=[2])]]
@@ -97,7 +97,7 @@
# CHECK: func {{@[a-zA-Z_0-9]+}}() -> (
# CHECK-SAME: tensor<1xf32> {tf_saved_model.index_path = ["x"]},
# CHECK-SAME: tensor<2xf32> {tf_saved_model.index_path = ["y"]})
- # CHECK-NEXT: attributes {{.*}} tf_saved_model.exported_names = ["f0005_dict_2_keys"]
+ # CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0005_dict_2_keys"]
@tf.function(input_signature=[])
def f0005_dict_2_keys(self):
return {
@@ -112,7 +112,7 @@
# CHECK-SAME: %arg0: tensor<f32> {tf_saved_model.index_path = [0]}
# CHECK-SAME: ) -> (
# CHECK-SAME: tensor<1xf32> {tf_saved_model.index_path = ["x"]})
- # CHECK-NEXT: attributes {{.*}} tf_saved_model.exported_names = ["f0006_multiple_return_statements"]
+ # CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0006_multiple_return_statements"]
@tf.function(input_signature=[tf.TensorSpec([], tf.float32)])
def f0006_multiple_return_statements(self, x):
if x > 3.:
diff --git a/tensorflow/compiler/mlir/tensorflow/tests/tpu_cluster_formation.mlir b/tensorflow/compiler/mlir/tensorflow/tests/tpu_cluster_formation.mlir
index 2213e62..e2c81c9 100644
--- a/tensorflow/compiler/mlir/tensorflow/tests/tpu_cluster_formation.mlir
+++ b/tensorflow/compiler/mlir/tensorflow/tests/tpu_cluster_formation.mlir
@@ -128,7 +128,7 @@
return %0#0, %0#1 : tensor<i1>, tensor<i1>
}
-// CHECK: %[[ISLAND_1:[0-9]*]]:2 = tf_executor.island {
+// CHECK: %[[ISLAND_1:.*]], %[[ISLAND_1_control:.*]] = tf_executor.island {
// CHECK: "tf.opB"
// CHECK: %[[LAUNCH_0:[0-9]*]] = "tf_device.launch"() ( {
// CHECK-NEXT: %[[OP_A:[0-9]*]] = "tf.opA"(%[[ARG_0]])
@@ -141,7 +141,7 @@
// CHECK: tf_executor.island {
// CHECK: "tf.opE"
// CHECK: %[[LAUNCH_1:[0-9]*]] = "tf_device.launch"() ( {
-// CHECK-NEXT: %[[OP_D:[0-9]*]] = "tf.opD"(%[[ISLAND_1]]#0)
+// CHECK-NEXT: %[[OP_D:[0-9]*]] = "tf.opD"(%[[ISLAND_1]])
// CHECK-NEXT: %[[OP_F:[0-9]*]] = "tf.opF"(%[[ARG_0]])
// CHECK-NEXT: tf_device.return %[[OP_F]]
// CHECK-NEXT: _tpu_replicate = "replicate"
diff --git a/tensorflow/compiler/mlir/tensorflow/transforms/graph_pruning.cc b/tensorflow/compiler/mlir/tensorflow/transforms/graph_pruning.cc
index c8d3ab2..882e769 100644
--- a/tensorflow/compiler/mlir/tensorflow/transforms/graph_pruning.cc
+++ b/tensorflow/compiler/mlir/tensorflow/transforms/graph_pruning.cc
@@ -14,12 +14,12 @@
==============================================================================*/
#include "llvm/ADT/STLExtras.h"
+#include "llvm/ADT/SmallPtrSet.h"
#include "llvm/ADT/SmallVector.h"
#include "llvm/ADT/iterator_range.h"
-#include "mlir/IR/Block.h" // TF:local_config_mlir
-#include "mlir/IR/Builders.h" // TF:local_config_mlir
-#include "mlir/IR/Location.h" // TF:local_config_mlir
+#include "llvm/Support/Casting.h"
#include "mlir/IR/Operation.h" // TF:local_config_mlir
+#include "mlir/IR/Value.h" // TF:local_config_mlir
#include "mlir/Pass/Pass.h" // TF:local_config_mlir
#include "mlir/Pass/PassRegistry.h" // TF:local_config_mlir
#include "tensorflow/compiler/mlir/tensorflow/ir/tf_executor.h"
@@ -28,39 +28,56 @@
namespace mlir {
namespace tf_executor {
-// Prunes a TF graph eliminating dead nodes.
-void prune_graph(GraphOp graph) {
- // A graph has a single block which forms a DAG: nodes that aren't reachable
- // from the `fetch` operands can be eliminated.
+// Prunes unreachable operations of a tf_executor.graph operation.
+void PruneGraph(GraphOp graph) {
+ // A graph has a single block which forms a DAG: operations that aren't
+ // reachable from the `fetch` operands can be eliminated.
- // Delete unreachable node from the graph. We traverse it in reverse order so
- // that we just have to check that a node does not have any users to delete
- // it.
- for (Operation &op : llvm::make_early_inc_range(
- llvm::drop_begin(llvm::reverse(graph.GetBody()), 1))) {
- // NextIteration.Sink operation are handled specially: they are live if the
- // source is live, and removed when the source is processed.
- if (auto sinkOp = dyn_cast<NextIterationSinkOp>(op)) continue;
+ llvm::SmallPtrSet<Operation*, 8> reachable_ops;
+ llvm::SmallVector<Operation*, 8> ops_to_visit;
- // For NextIteration.Source, we just check that the source does not have any
- // other user than the sink.
- if (auto sourceOp = dyn_cast<NextIterationSourceOp>(op)) {
- Operation *sink = sourceOp.GetSink().getOperation();
- if (llvm::any_of(sourceOp.getResults(), [sink](Value *result) {
- return llvm::any_of(result->getUsers(), [sink](Operation *user) {
- return user != sink;
- });
- }))
- continue;
+ // Visit an op's operands if it is output of an Operation in same graph.
+ auto visit_op = [&](Operation* op) {
+ for (Value* operand : op->getOperands()) {
+ Operation* def = operand->getDefiningOp();
+ if (def && def->getParentOp() == graph &&
+ reachable_ops.insert(def).second) {
+ // Op has not been visited, add to queue to visit later.
+ ops_to_visit.push_back(def);
+ }
+ }
+ };
- // No other users than the sink, erase the pair!
- sink->erase();
- sourceOp.erase();
+ // Visit `fetch` operands.
+ visit_op(graph.GetFetch());
+
+ while (!ops_to_visit.empty()) {
+ Operation* op = ops_to_visit.pop_back_val();
+ if (auto island_op = llvm::dyn_cast<IslandOp>(op)) {
+ // Visit island and island inner ops operands.
+ op->walk([&](Operation* inner_op) { visit_op(inner_op); });
continue;
+ } else {
+ // Op is not an island, only visit its operands.
+ visit_op(op);
}
- // General case.
- if (op.use_empty()) op.erase();
+ // If op is a `tf_executor.NextIteration.Source`, visit its associated
+ // `tf_executor.NextIteration.Sink` op.
+ if (auto source_op = llvm::dyn_cast<NextIterationSourceOp>(op)) {
+ Operation* sink_op = source_op.GetSink().getOperation();
+ if (reachable_ops.insert(sink_op).second) {
+ ops_to_visit.push_back(sink_op);
+ }
+ }
+ }
+
+ // Erase unreachable ops in reverse order.
+ for (Operation& op : llvm::make_early_inc_range(
+ llvm::drop_begin(llvm::reverse(graph.GetBody()), 1))) {
+ if (reachable_ops.find(&op) == reachable_ops.end()) {
+ op.erase();
+ }
}
}
@@ -69,7 +86,7 @@
// This transformation pass prunes a TF graph eliminating dead-nodes.
struct GraphPruning : public FunctionPass<GraphPruning> {
void runOnFunction() override {
- getFunction().walk([](tf_executor::GraphOp graph) { prune_graph(graph); });
+ getFunction().walk([](tf_executor::GraphOp graph) { PruneGraph(graph); });
}
};
@@ -80,7 +97,8 @@
}
static PassRegistration<GraphPruning> pass(
- "tf-executor-graph-pruning", "Prune a TensorFlow Graph from dead nodes.");
+ "tf-executor-graph-pruning",
+ "Prune unreachable nodes in a TensorFlow Graph.");
} // namespace tf_executor
} // namespace mlir
diff --git a/tensorflow/compiler/mlir/tensorflow/transforms/lower_tf.cc b/tensorflow/compiler/mlir/tensorflow/transforms/lower_tf.cc
index ca54305..fa299a5 100644
--- a/tensorflow/compiler/mlir/tensorflow/transforms/lower_tf.cc
+++ b/tensorflow/compiler/mlir/tensorflow/transforms/lower_tf.cc
@@ -35,8 +35,7 @@
Builder *builder) {
RankedTensorType ty = RankedTensorType::get(
{static_cast<int64_t>(values.size())}, builder->getIntegerType(64));
- return DenseElementsAttr::get<int64_t>(ty, values)
- .cast<DenseIntElementsAttr>();
+ return DenseIntElementsAttr::get(ty, values);
}
// Returns a 1-d i64 elements attribute populated with numbers from start to
@@ -50,8 +49,7 @@
std::iota(vals.begin(), vals.end(), start);
TensorType ty = RankedTensorType::get({size}, builder->getIntegerType(64));
- return DenseIntElementsAttr::get<int64_t>(ty, vals)
- .cast<DenseIntElementsAttr>();
+ return DenseIntElementsAttr::get(ty, vals);
}
// Returns int or float DenseElementsAttr with scalar shape with the given
diff --git a/tensorflow/compiler/mlir/tensorflow/transforms/lower_tf.td b/tensorflow/compiler/mlir/tensorflow/transforms/lower_tf.td
index b442c5b..99832de 100644
--- a/tensorflow/compiler/mlir/tensorflow/transforms/lower_tf.td
+++ b/tensorflow/compiler/mlir/tensorflow/transforms/lower_tf.td
@@ -147,3 +147,16 @@
(TF_MulOp $dy,
(TF_SubOp (TF_ConstOp (GetScalarOfType<1> $y)),
(TF_SquareOp $y)))>;
+
+//===----------------------------------------------------------------------===//
+// ZerosLike op patterns.
+//===----------------------------------------------------------------------===//
+
+def CreateTFShapeOp : NativeCodeCall<
+ "$_builder.create<TF::ShapeOp>($0->getLoc(), $1, $2)">;
+
+// TODO(hinsu): Support inputs of TensorList types.
+def LowerZerosLikeOp :
+ Pat<(TF_ZerosLikeOp:$src_op TensorOf<[AnyInteger, AnyFloat]>:$input),
+ (TF_BroadcastToOp (TF_ConstOp (GetScalarOfType<0> $input)),
+ (CreateTFShapeOp $src_op, $input, /*use 32bit*/ConstBoolAttrFalse))>;
diff --git a/tensorflow/compiler/mlir/tensorflow/transforms/passes.h b/tensorflow/compiler/mlir/tensorflow/transforms/passes.h
index 434f02d..7a5c060 100644
--- a/tensorflow/compiler/mlir/tensorflow/transforms/passes.h
+++ b/tensorflow/compiler/mlir/tensorflow/transforms/passes.h
@@ -71,8 +71,8 @@
// Create a pass to prune tf_executor.graph from dead nodes.
std::unique_ptr<OpPassBase<FuncOp>> CreateTFExecutorGraphPruningPass();
-// Prune a tf_executor.graph operation from dead nodes.
-void prune_graph(GraphOp graph);
+// Prunes unreachable operations of a tf_executor.graph operation.
+void PruneGraph(GraphOp graph);
// Sink `tf.Const` operations in the LaunchOp region using them. This is
// performed in order to limit the number of values implicitly captured in this
diff --git a/tensorflow/compiler/mlir/tensorflow/transforms/shape_inference.cc b/tensorflow/compiler/mlir/tensorflow/transforms/shape_inference.cc
index 9a71781..c44c81d 100644
--- a/tensorflow/compiler/mlir/tensorflow/transforms/shape_inference.cc
+++ b/tensorflow/compiler/mlir/tensorflow/transforms/shape_inference.cc
@@ -32,6 +32,7 @@
#include "mlir/Pass/PassRegistry.h" // TF:local_config_mlir
#include "mlir/Support/LLVM.h" // TF:local_config_mlir
#include "mlir/Support/LogicalResult.h" // TF:local_config_mlir
+#include "mlir/Transforms/FoldUtils.h" // TF:local_config_mlir
#include "tensorflow/compiler/mlir/tensorflow/ir/tf_executor.h"
#include "tensorflow/compiler/mlir/tensorflow/ir/tf_ops.h"
#include "tensorflow/compiler/mlir/tensorflow/transforms/passes.h"
@@ -213,9 +214,13 @@
LogicalResult InferShapeUntilFixPoint(Region* region, int64_t graph_version,
int64_t max_iteration) {
- Dialect* tf_dialect = region->getContext()->getRegisteredDialect(
- TensorFlowDialect::getDialectNamespace());
+ MLIRContext* ctx = region->getContext();
+ Dialect* tf_dialect = ctx->getRegisteredDialect<TensorFlowDialect>();
+
+ // An operation folder that is used to attempt folding before inference.
+ OperationFolder folder(ctx);
bool changed = true;
+
// TODO(aminim): we could have a more efficient traversal by guiding the
// traversal with a worklist and reconsider only the nodes for which an
// operand type was inferred. This would need to be careful if working on a
@@ -225,15 +230,17 @@
LLVM_DEBUG(llvm::dbgs()
<< "Shape inference, iteration " << iteration << "\n");
region->walk([&](Operation* op) {
- if (op->getDialect() == tf_dialect)
+ if (op->getDialect() != tf_dialect) return;
+
+ // Before attempting inference, just try to fold the operation.
+ if (failed(folder.tryToFold(op)))
changed |= InferShapeForSingleOperation(op, tf_dialect, graph_version);
});
}
if (changed) {
- region->getParentOp()->emitWarning()
- << "Shape inference did not reach stable state after " << max_iteration
- << " iterations";
- return failure();
+ return region->getParentOp()->emitWarning()
+ << "Shape inference did not reach stable state after "
+ << max_iteration << " iterations";
}
return success();
}
diff --git a/tensorflow/compiler/mlir/xla/hlo_function_importer.cc b/tensorflow/compiler/mlir/xla/hlo_function_importer.cc
index d552ba1..7c95a13 100644
--- a/tensorflow/compiler/mlir/xla/hlo_function_importer.cc
+++ b/tensorflow/compiler/mlir/xla/hlo_function_importer.cc
@@ -387,6 +387,12 @@
instruction->tuple_index())));
MakeAndReturn(GetTupleElementOp);
};
+ case HloOpcode::kGetDimensionSize: {
+ attributes.push_back(builder_->getNamedAttr(
+ "dimension", builder_->getIntegerAttr(builder_->getIntegerType(32),
+ instruction->dimension())));
+ MakeAndReturn(GetDimensionSizeOp);
+ };
case HloOpcode::kTranspose: {
attributes.push_back(builder_->getNamedAttr(
"permutation", ConvertDimensions(instruction->dimensions())));
@@ -632,19 +638,15 @@
for (auto value : op_dimensions) dimensions.emplace_back(APInt(64, value));
return DenseIntElementsAttr::get(
- RankedTensorType::get(dimensions.size(),
- builder_->getIntegerType(64)),
- dimensions)
- .cast<DenseIntElementsAttr>();
+ RankedTensorType::get(dimensions.size(), builder_->getIntegerType(64)),
+ dimensions);
}
mlir::DenseIntElementsAttr HloFunctionImporter::Convert(
llvm::ArrayRef<int64_t> op_dimensions) {
return DenseIntElementsAttr::get(
- RankedTensorType::get(op_dimensions.size(),
- builder_->getIntegerType(64)),
- op_dimensions)
- .cast<DenseIntElementsAttr>();
+ RankedTensorType::get(op_dimensions.size(), builder_->getIntegerType(64)),
+ op_dimensions);
}
mlir::NamedAttribute HloFunctionImporter::ConvertPadding(
diff --git a/tensorflow/compiler/mlir/xla/ir/hlo_ops.cc b/tensorflow/compiler/mlir/xla/ir/hlo_ops.cc
index c658764..1bc3d8c 100644
--- a/tensorflow/compiler/mlir/xla/ir/hlo_ops.cc
+++ b/tensorflow/compiler/mlir/xla/ir/hlo_ops.cc
@@ -82,8 +82,7 @@
Builder* builder) {
RankedTensorType ty = RankedTensorType::get(
{static_cast<int64_t>(values.size())}, builder->getIntegerType(64));
- return DenseElementsAttr::get<int64_t>(ty, values)
- .cast<DenseIntElementsAttr>();
+ return DenseIntElementsAttr::get(ty, values);
}
// Given the start indices and slice sizes for a dynamic-slice that can be
@@ -450,6 +449,18 @@
build(builder, state, result_ty, lhs, rhs);
}
+OpFoldResult ComplexOp::fold(ArrayRef<Attribute> operands) {
+ auto real_op =
+ dyn_cast_or_null<xla_hlo::RealOp>(getOperand(0)->getDefiningOp());
+ auto imag_op =
+ dyn_cast_or_null<xla_hlo::ImagOp>(getOperand(1)->getDefiningOp());
+ if (real_op && imag_op && real_op.getOperand() == imag_op.getOperand()) {
+ return real_op.getOperand();
+ }
+
+ return {};
+}
+
namespace {
Type CreateRealType(Type type) {
auto element_ty = getElementTypeOrSelf(type);
@@ -471,10 +482,28 @@
build(builder, state, CreateRealType(val->getType()), val);
}
+OpFoldResult ImagOp::fold(ArrayRef<Attribute> operands) {
+ if (auto complex_op =
+ dyn_cast_or_null<xla_hlo::ComplexOp>(getOperand()->getDefiningOp())) {
+ return complex_op.getOperand(1);
+ }
+
+ return {};
+}
+
void RealOp::build(Builder* builder, OperationState& state, Value* val) {
build(builder, state, CreateRealType(val->getType()), val);
}
+OpFoldResult RealOp::fold(ArrayRef<Attribute> operands) {
+ if (auto complex_op =
+ dyn_cast_or_null<xla_hlo::ComplexOp>(getOperand()->getDefiningOp())) {
+ return complex_op.getOperand(0);
+ }
+
+ return {};
+}
+
//===----------------------------------------------------------------------===//
// ConcatenateOp
//===----------------------------------------------------------------------===//
diff --git a/tensorflow/compiler/mlir/xla/ir/hlo_ops.td b/tensorflow/compiler/mlir/xla/ir/hlo_ops.td
index b371b13..5478232 100644
--- a/tensorflow/compiler/mlir/xla/ir/hlo_ops.td
+++ b/tensorflow/compiler/mlir/xla/ir/hlo_ops.td
@@ -165,6 +165,13 @@
def HLO_FloorOp: HLO_UnaryElementwiseOp<"floor",
[NoSideEffect, SameOperandsAndResultType], HLO_FpTensor>, BASE_HLO_FloorOp;
+def HLO_IsFiniteOp: HLO_UnaryElementwiseOp<"is_finite",
+ [NoSideEffect, SameOperandsAndResultShape], HLO_Tensor>,
+ BASE_HLO_IsFiniteOp {
+ let arguments = (ins HLO_FpTensor:$x);
+ let results = (outs HLO_PredTensor:$y);
+}
+
def HLO_LogOp: HLO_UnaryElementwiseOp<"log",
[NoSideEffect, SameOperandsAndResultType], HLO_FpOrComplexTensor>,
BASE_HLO_LogOp;
@@ -218,6 +225,7 @@
let arguments = (ins HLO_FpTensor:$lhs, HLO_FpTensor:$rhs);
let results = (outs HLO_ComplexTensor);
+ let hasFolder = 1;
}
def HLO_ImagOp: HLO_Op<
@@ -227,6 +235,7 @@
let arguments = (ins HLO_ComplexTensor);
let results = (outs HLO_FpTensor);
+ let hasFolder = 1;
}
def HLO_RealOp: HLO_Op<
@@ -236,6 +245,7 @@
let arguments = (ins HLO_ComplexTensor);
let results = (outs HLO_FpTensor);
+ let hasFolder = 1;
}
//===----------------------------------------------------------------------===//
@@ -477,10 +487,6 @@
DenseIntElementsAttr limit_indices,
DenseIntElementsAttr strides);
}];
-
- // TODO(b/129422361) Two of the required arguments comes from the start and
- // limit indices which aren't handled by the codegen.
- let hasCustomHLOConverter = 1;
}
def HLO_DynamicSliceOp: HLO_Op<"dynamic-slice",
@@ -706,6 +712,15 @@
let hasCustomHLOConverter = 1;
}
+def HLO_GetDimensionSizeOp: HLO_Op<"get_dimension_size", [NoSideEffect]>,
+ BASE_HLO_GetDimensionSizeOp {
+ let arguments = (ins
+ HLO_Tensor:$operand,
+ I32Attr:$dimension
+ );
+ let results = (outs HLO_IntTensor);
+}
+
def HLO_ReshapeOp: HLO_Op<"reshape",
[NoSideEffect, SameOperandsAndResultElementType]>, BASE_HLO_ReshapeOp {
let arguments = (ins HLO_Tensor:$operand);
diff --git a/tensorflow/compiler/mlir/xla/ir/hlo_ops_base.td b/tensorflow/compiler/mlir/xla/ir/hlo_ops_base.td
index a97fbd6..52a63e9 100644
--- a/tensorflow/compiler/mlir/xla/ir/hlo_ops_base.td
+++ b/tensorflow/compiler/mlir/xla/ir/hlo_ops_base.td
@@ -134,6 +134,17 @@
}];
}
+class BASE_HLO_GetDimensionSizeOp {
+ string summary = "GetDimensionSize operator";
+
+ string description = [{
+ Returns the size of the given dimension of the operand.
+
+ See
+ https://www.tensorflow.org/xla/operation_semantics#getdimensionsize.
+ }];
+}
+
class BASE_HLO_ImagOp {
string summary = "Imag operator";
@@ -142,6 +153,20 @@
}];
}
+class BASE_HLO_IsFiniteOp {
+ string summary = "IsFinite operator";
+
+ string description = [{
+ Tests whether each element of operand is finite, i.e., is not positive or
+ negative infinity, and is not NaN. Returns a tensor of 1-bit integers with
+ the same shape as the input, where each element is nonzero (i.e. true) if
+ and only if the corresponding input element is finite.
+
+ See
+ https://www.tensorflow.org/xla/operation_semantics#element-wise_unary_functions.
+ }];
+}
+
class BASE_HLO_LogOp {
string summary = "Logarithm operator";
diff --git a/tensorflow/compiler/mlir/xla/ir/lhlo_ops.td b/tensorflow/compiler/mlir/xla/ir/lhlo_ops.td
index 871f65f..f4c3115 100644
--- a/tensorflow/compiler/mlir/xla/ir/lhlo_ops.td
+++ b/tensorflow/compiler/mlir/xla/ir/lhlo_ops.td
@@ -36,18 +36,18 @@
//===----------------------------------------------------------------------===//
// Any integer tensor types
-def LHLO_IntBuffer : StaticShapeMemRefOf<[HLO_Int]>;
+def LHLO_IntBuffer : MemRefOf<[HLO_Int]>;
// Any floating-point tensor types
-def LHLO_FpBuffer : StaticShapeMemRefOf<[AnyFloat]>;
+def LHLO_FpBuffer : MemRefOf<[AnyFloat]>;
-def LHLO_PredBuffer : StaticShapeMemRefOf<[HLO_Pred]>;
+def LHLO_PredBuffer : MemRefOf<[HLO_Pred]>;
// Any integer or floating-point tensor types
-def LHLO_IntOrFpBuffer : StaticShapeMemRefOf<[HLO_Int, AnyFloat]>;
+def LHLO_IntOrFpBuffer : MemRefOf<[HLO_Int, AnyFloat]>;
-def LHLO_Buffer : StaticShapeMemRefOf<[AnyFloat, AnyInteger]>;
+def LHLO_Buffer : MemRefOf<[AnyFloat, AnyInteger]>;
def LHLO_TupleBuffer : NestedTupleOf<[LHLO_Buffer]>;
diff --git a/tensorflow/compiler/mlir/xla/mlir_hlo_to_hlo.cc b/tensorflow/compiler/mlir/xla/mlir_hlo_to_hlo.cc
index c55d517..941d127 100644
--- a/tensorflow/compiler/mlir/xla/mlir_hlo_to_hlo.cc
+++ b/tensorflow/compiler/mlir/xla/mlir_hlo_to_hlo.cc
@@ -88,12 +88,6 @@
return out;
}
-// Converts the broadcast_sizes attribute into a vector of dimension sizes.
-static std::vector<int64> Convert_broadcast_sizes(
- mlir::DenseIntElementsAttr broadcast_sizes) {
- return ConvertDenseIntAttr(broadcast_sizes);
-}
-
static std::vector<xla::ReplicaGroup> Convert_replica_groups(
mlir::DenseIntElementsAttr groups) {
int64_t num_groups = groups.getType().getDimSize(0);
@@ -111,10 +105,19 @@
return result;
}
-static std::vector<int64> Convert_permutation(
- mlir::DenseIntElementsAttr permutation) {
- return ConvertDenseIntAttr(permutation);
-}
+#define I64_ELEMENTS_ATTR_TO_VECTOR(attribute) \
+ static std::vector<int64> Convert_##attribute( \
+ mlir::DenseIntElementsAttr attribute) { \
+ return ConvertDenseIntAttr(attribute); \
+ }
+
+I64_ELEMENTS_ATTR_TO_VECTOR(broadcast_sizes);
+I64_ELEMENTS_ATTR_TO_VECTOR(permutation);
+I64_ELEMENTS_ATTR_TO_VECTOR(start_indices);
+I64_ELEMENTS_ATTR_TO_VECTOR(limit_indices);
+I64_ELEMENTS_ATTR_TO_VECTOR(strides);
+
+#undef I64_ELEMENTS_ATTR_TO_VECTOR
static std::vector<int64> Convert_ArrayRef(llvm::ArrayRef<int64_t> values) {
return {values.begin(), values.end()};
diff --git a/tensorflow/compiler/mlir/xla/tests/canonicalize.mlir b/tensorflow/compiler/mlir/xla/tests/canonicalize.mlir
index d7fc60e..e6d99b9 100644
--- a/tensorflow/compiler/mlir/xla/tests/canonicalize.mlir
+++ b/tensorflow/compiler/mlir/xla/tests/canonicalize.mlir
@@ -30,3 +30,21 @@
%1 = "xla_hlo.dynamic-slice"(%arg0, %0) {slice_sizes = dense<[1, 4]> : tensor<2xi64>} : (tensor<?x4xi32>, tensor<2xi64>) -> tensor<1x4xi32>
return %1 : tensor<1x4xi32>
}
+
+// CHECK-LABEL: @complex_expand_fold
+func @complex_expand_fold(%arg0: tensor<4xf32>, %arg1: tensor<4xf32>) -> (tensor<4xf32>, tensor<4xf32>) {
+ %0 = "xla_hlo.complex"(%arg0, %arg1) : (tensor<4xf32>, tensor<4xf32>) -> (tensor<4xcomplex<f32>>)
+ %1 = "xla_hlo.real"(%0) : (tensor<4xcomplex<f32>>) -> (tensor<4xf32>)
+ %2 = "xla_hlo.imag"(%0) : (tensor<4xcomplex<f32>>) -> (tensor<4xf32>)
+ // CHECK: return %arg0, %arg1
+ return %1, %2 : tensor<4xf32>, tensor<4xf32>
+}
+
+// CHECK-LABEL: @complex_collapse_fold
+func @complex_collapse_fold(%arg0: tensor<4xcomplex<f32>>) -> tensor<4xcomplex<f32>> {
+ %0 = "xla_hlo.real"(%arg0) : (tensor<4xcomplex<f32>>) -> (tensor<4xf32>)
+ %1 = "xla_hlo.imag"(%arg0) : (tensor<4xcomplex<f32>>) -> (tensor<4xf32>)
+ %2 = "xla_hlo.complex"(%0, %1) : (tensor<4xf32>, tensor<4xf32>) -> tensor<4xcomplex<f32>>
+ // CHECK: return %arg0
+ return %2 : tensor<4xcomplex<f32>>
+}
diff --git a/tensorflow/compiler/mlir/xla/tests/legalize-tf-full-conversion.mlir b/tensorflow/compiler/mlir/xla/tests/legalize-tf-full-conversion.mlir
index 932b661..d2b4d26 100644
--- a/tensorflow/compiler/mlir/xla/tests/legalize-tf-full-conversion.mlir
+++ b/tensorflow/compiler/mlir/xla/tests/legalize-tf-full-conversion.mlir
@@ -1,4 +1,4 @@
-// RUN: tf-opt %s -xla-legalize-tf -verify-diagnostics
+// RUN: tf-opt %s -xla-legalize-tf -split-input-file -verify-diagnostics
func @tf_executor_graph_op() {
// expected-error@+1 {{failed to legalize operation 'tf_executor.graph'}}
@@ -13,12 +13,16 @@
}
+// -----
+
func @tf_unknown_op(%arg0: tensor<2xi32>) -> tensor<2xi32> {
// expected-error@+1 {{failed to legalize operation 'tf.OpA'}}
%0 = "tf.OpA"(%arg0, %arg0) : (tensor<2xi32>, tensor<2xi32>) -> tensor<2xi32>
return %0: tensor<2xi32>
}
+// -----
+
func @tf_known_op(%arg0: tensor<2xi32>) -> tensor<2xi32> {
%0 = "tf.Add"(%arg0, %arg0) : (tensor<2xi32>, tensor<2xi32>) -> tensor<2xi32>
return %0: tensor<2xi32>
diff --git a/tensorflow/compiler/mlir/xla/tests/legalize-tf.mlir b/tensorflow/compiler/mlir/xla/tests/legalize-tf.mlir
index f22172d..d9d0349 100644
--- a/tensorflow/compiler/mlir/xla/tests/legalize-tf.mlir
+++ b/tensorflow/compiler/mlir/xla/tests/legalize-tf.mlir
@@ -1135,6 +1135,27 @@
return %0 : tensor<*xf32>
}
+// CHECK-LABEL: @is_finite
+func @is_finite(%arg0: tensor<2xf32>) -> tensor<2xi1> {
+ // CHECK: "xla_hlo.is_finite"(%arg0) : (tensor<2xf32>) -> tensor<2xi1>
+ %0 = "tf.IsFinite"(%arg0) : (tensor<2xf32>) -> tensor<2xi1>
+ return %0 : tensor<2xi1>
+}
+
+// CHECK-LABEL: func @is_finite_dynamic
+func @is_finite_dynamic(%arg0: tensor<?xf32>) -> tensor<?xi1> {
+ // CHECK: "xla_hlo.is_finite"(%arg0) : (tensor<?xf32>) -> tensor<?xi1>
+ %0 = "tf.IsFinite"(%arg0) : (tensor<?xf32>) -> tensor<?xi1>
+ return %0 : tensor<?xi1>
+}
+
+// CHECK-LABEL: func @is_finite_unranked
+func @is_finite_unranked(%arg0: tensor<*xf32>) -> tensor<*xi1> {
+ // CHECK: "xla_hlo.is_finite"(%arg0) : (tensor<*xf32>) -> tensor<*xi1>
+ %0 = "tf.IsFinite"(%arg0) : (tensor<*xf32>) -> tensor<*xi1>
+ return %0 : tensor<*xi1>
+}
+
// CHECK-LABEL: @log
func @log(%arg0: tensor<2xf32>) -> tensor<2xf32> {
// CHECK: "xla_hlo.log"(%arg0) : (tensor<2xf32>) -> tensor<2xf32>
diff --git a/tensorflow/compiler/mlir/xla/tests/lhlo_ops.mlir b/tensorflow/compiler/mlir/xla/tests/lhlo_ops.mlir
index bb17b70..19e5be9 100644
--- a/tensorflow/compiler/mlir/xla/tests/lhlo_ops.mlir
+++ b/tensorflow/compiler/mlir/xla/tests/lhlo_ops.mlir
@@ -1,15 +1,5 @@
// RUN: tf-opt %s -verify-diagnostics -split-input-file
-// -----
-
-func @enforce_static_shapes(%arg0: memref<?xf32>, %arg1: memref<?xf32>) -> () {
- // expected-error@+1{{op operand #0 must be statically shaped memref of floating-point or integer values}}
- "xla_lhlo.tanh"(%arg0, %arg1) : (memref<?xf32>, memref<?xf32>) -> ()
- return
-}
-
-// -----
-
func @enforce_same_shape(%arg0: memref<1xf32>, %arg1: memref<2xf32>) -> () {
// expected-error@+1{{'xla_lhlo.tanh' op requires all operands to have the same type}}
"xla_lhlo.tanh"(%arg0, %arg1) : (memref<1xf32>, memref<2xf32>) -> ()
diff --git a/tensorflow/compiler/mlir/xla/tests/translate/get_dimension_size.mlir b/tensorflow/compiler/mlir/xla/tests/translate/get_dimension_size.mlir
new file mode 100644
index 0000000..44ff3f1
--- /dev/null
+++ b/tensorflow/compiler/mlir/xla/tests/translate/get_dimension_size.mlir
@@ -0,0 +1,10 @@
+// RUN: tf-mlir-translate -mlir-hlo-to-hlo-text %s | FileCheck %s
+
+func @main(%arg: tensor<4x2xf32>) -> tensor<i32> {
+ %0 = "xla_hlo.get_dimension_size"(%arg) {dimension = 1 : i32} : (tensor<4x2xf32>) -> tensor<i32>
+ return %0 : tensor<i32>
+}
+
+// CHECK-LABEL: ENTRY
+// CHECK: [[ARG:%.*]] = f32[4,2] parameter(0)
+// CHECK: s32[] get-dimension-size(f32[4,2] [[ARG]]), dimensions={1}
diff --git a/tensorflow/compiler/mlir/xla/tests/translate/ops.hlotxt b/tensorflow/compiler/mlir/xla/tests/translate/ops.hlotxt
index c5e4280..77d7425 100644
--- a/tensorflow/compiler/mlir/xla/tests/translate/ops.hlotxt
+++ b/tensorflow/compiler/mlir/xla/tests/translate/ops.hlotxt
@@ -317,6 +317,14 @@
ROOT %floor.2 = f32[16] floor(f32[16] %arg0.1)
}
+// CHECK-LABEL: func @test_get_dimension_size
+// CHECK-SAME: ([[ARG:%.*]]: tensor<4x2xf32>)
+%test_get_dimension_size (Arg_0.1: f32[4,2]) -> s32[] {
+ %Arg_0.1 = f32[4,2] parameter(0)
+ // CHECK-NEXT: "xla_hlo.get_dimension_size"([[ARG]]) {dimension = 1 : i32, name = "{{.*}}"} : (tensor<4x2xf32>) -> tensor<i32>
+ ROOT %get-dimension-size.2 = s32[] get-dimension-size(f32[4,2] %Arg_0.1), dimensions={1}
+}
+
// CHECK-LABEL: func @test_imag
%test_imag (Arg_0.1: c64[4]) -> f32[4] {
%Arg_0.1 = c64[4] parameter(0)
diff --git a/tensorflow/compiler/mlir/xla/tests/translate/pad.mlir b/tensorflow/compiler/mlir/xla/tests/translate/pad.mlir
index 5d8608b..d4fba83 100644
--- a/tensorflow/compiler/mlir/xla/tests/translate/pad.mlir
+++ b/tensorflow/compiler/mlir/xla/tests/translate/pad.mlir
@@ -10,4 +10,3 @@
// CHECK: [[PADDING_VAL:%.*]] = f32[] parameter(1)
// CHECK-LABEL: ROOT
// CHECK-SAME: f32[13,19] pad(f32[4,6] [[ARG]], f32[] [[PADDING_VAL]]), padding=2_4_1x3_5_1
-// CHECK: }
diff --git a/tensorflow/compiler/mlir/xla/tests/translate/slice.mlir b/tensorflow/compiler/mlir/xla/tests/translate/slice.mlir
new file mode 100644
index 0000000..3f31a00
--- /dev/null
+++ b/tensorflow/compiler/mlir/xla/tests/translate/slice.mlir
@@ -0,0 +1,11 @@
+// RUN: tf-mlir-translate -mlir-hlo-to-hlo-text %s | FileCheck %s
+
+func @main(%arg: tensor<3x4xi32>) -> tensor<1x2xi32> {
+ %0 = "xla_hlo.slice"(%arg) {start_indices = dense<[1, 0]> : tensor<2xi64>, limit_indices = dense<[2, 4]> : tensor<2xi64>, strides = dense<[1, 2]> : tensor<2xi64>} : (tensor<3x4xi32>) -> tensor<1x2xi32>
+ return %0 : tensor<1x2xi32>
+}
+
+// CHECK-LABEL: ENTRY
+// CHECK: [[ARG:%.*]] = s32[3,4] parameter(0)
+// CHECK-LABEL: ROOT
+// CHECK-SAME: s32[1,2] slice(s32[3,4] [[ARG]]), slice={[1:2:1], [0:4:2]}
diff --git a/tensorflow/compiler/mlir/xla/transforms/legalize_tf.cc b/tensorflow/compiler/mlir/xla/transforms/legalize_tf.cc
index 28f5aeb..b1d9eb1 100644
--- a/tensorflow/compiler/mlir/xla/transforms/legalize_tf.cc
+++ b/tensorflow/compiler/mlir/xla/transforms/legalize_tf.cc
@@ -92,8 +92,7 @@
Builder *builder) {
RankedTensorType ty = RankedTensorType::get(
{static_cast<int64_t>(values.size())}, builder->getIntegerType(64));
- return DenseElementsAttr::get<int64_t>(ty, values)
- .cast<DenseIntElementsAttr>();
+ return DenseIntElementsAttr::get(ty, values);
}
// Converts an ArrayAttr to a 1D 64-bit dense elements attribute.
@@ -101,8 +100,7 @@
RankedTensorType ty =
RankedTensorType::get(static_cast<int64_t>(attr.size()),
IntegerType::get(64, attr.getContext()));
- return DenseElementsAttr::get(ty, attr.getValue())
- .cast<DenseIntElementsAttr>();
+ return DenseIntElementsAttr::get(ty, attr.getValue());
}
static IntegerAttr GetHLOAxisFromTFAxis(ElementsAttr attr, int64_t rank,
@@ -232,8 +230,7 @@
auto inputType = input->getType().cast<RankedTensorType>();
size_t featureDim = getFeatureDimension(format, inputType);
RankedTensorType type = RankedTensorType::get(1, b.getIntegerType(64));
- return DenseIntElementsAttr::get(type, featureDim)
- .cast<DenseIntElementsAttr>();
+ return DenseIntElementsAttr::get(type, featureDim);
}
//===----------------------------------------------------------------------===//
@@ -269,9 +266,8 @@
}
}
- return DenseIntElementsAttr::get<int64_t>(
- RankedTensorType::get({shape[0]}, element_type), values)
- .cast<DenseIntElementsAttr>();
+ return DenseIntElementsAttr::get(
+ RankedTensorType::get({shape[0]}, element_type), values);
}
//===----------------------------------------------------------------------===//
@@ -293,7 +289,7 @@
else
llvm_unreachable("unhandled element type");
- return DenseIntElementsAttr::get(valType, elementAttr);
+ return DenseElementsAttr::get(valType, elementAttr);
}
// Returns whether the two values are guaranteed to be broadcastable to the
@@ -354,8 +350,7 @@
RankedTensorType type =
RankedTensorType::get({minRank}, b.getIntegerType(64));
- return DenseIntElementsAttr::get<int64_t>(type, broadcastDimensions)
- .cast<DenseIntElementsAttr>();
+ return DenseIntElementsAttr::get(type, broadcastDimensions);
}
// Return a new TensorType the same rank and dimensions as the input with an
@@ -385,8 +380,7 @@
std::iota(vals.begin(), vals.end(), start);
TensorType ty = RankedTensorType::get({size}, builder->getIntegerType(64));
- return DenseIntElementsAttr::get<int64_t>(ty, vals)
- .cast<DenseIntElementsAttr>();
+ return DenseIntElementsAttr::get(ty, vals);
}
// Returns the type to use for accumulating the given type.
@@ -781,8 +775,7 @@
DenseIntElementsAttr::get(
RankedTensorType::get({shaped_type.getRank()},
rewriter.getIntegerType(64)),
- shaped_type.getShape())
- .cast<DenseIntElementsAttr>());
+ shaped_type.getShape()));
auto scaled_input = rewriter.create<xla_hlo::MulOp>(
op.getLoc(), operand, constant_ones, DenseIntElementsAttr());
@@ -1468,8 +1461,7 @@
}
RankedTensorType paddings_ty = mlir::RankedTensorType::get(
{num_spatial_dims, 2}, rewriter.getIntegerType(64));
- auto paddings_attr =
- DenseIntElementsAttr::get<int64_t>(paddings_ty, conv_paddings);
+ auto paddings_attr = DenseIntElementsAttr::get(paddings_ty, conv_paddings);
auto spatial_dims_attr = GetI64ElementsAttr(spatial_dims, &rewriter);
Value *filter = op.filter();
@@ -1492,8 +1484,7 @@
Value *result = rewriter.create<xla_hlo::ConvOp>(
loc, op.getType(), op.out_backprop(), filter,
/*window_strides=*/GetI64ElementsAttr(ones, &rewriter),
- /*padding=*/paddings_attr.cast<DenseIntElementsAttr>(),
- GetI64ElementsAttr(lhs_dilation, &rewriter),
+ /*padding=*/paddings_attr, GetI64ElementsAttr(lhs_dilation, &rewriter),
GetI64ElementsAttr(rhs_dilation, &rewriter),
xla_hlo::ConvDimensionNumbers::get(
/*input_batch_dimension=*/batch_dim_attr,
@@ -1681,8 +1672,7 @@
RankedTensorType paddings_ty = mlir::RankedTensorType::get(
{num_spatial_dims, 2}, rewriter.getIntegerType(64));
- auto paddings_attr =
- DenseIntElementsAttr::get<int64_t>(paddings_ty, conv_padding);
+ auto paddings_attr = DenseIntElementsAttr::get(paddings_ty, conv_padding);
auto out_spatial_dims_attr =
GetI64ElementsAttrForSeq(0, num_spatial_dims, &rewriter);
auto kernel_spatial_dims_attr =
@@ -1695,8 +1685,7 @@
Value *result = rewriter.create<xla_hlo::ConvOp>(
loc, op.getType(), op.input(), op.out_backprop(),
/*window_strides=*/GetI64ElementsAttr(window_strides, &rewriter),
- /*padding=*/paddings_attr.cast<DenseIntElementsAttr>(),
- GetI64ElementsAttr(lhs_dilation, &rewriter),
+ /*padding=*/paddings_attr, GetI64ElementsAttr(lhs_dilation, &rewriter),
GetI64ElementsAttr(rhs_dilation, &rewriter),
xla_hlo::ConvDimensionNumbers::get(
// Swap batch_dim and feature_dim in the activations.
@@ -1813,8 +1802,8 @@
target.addLegalDialect<XlaHloDialect>();
if (!allow_partial_conversion) {
- target.addLegalOp<mlir::ModuleOp, mlir::FuncOp, mlir::ModuleTerminatorOp,
- mlir::ReturnOp>();
+ target.addLegalOp<mlir::CallOp, mlir::ModuleOp, mlir::FuncOp,
+ mlir::ModuleTerminatorOp, mlir::ReturnOp>();
return applyFullConversion(op, target, patterns);
}
diff --git a/tensorflow/compiler/mlir/xla/transforms/legalize_tf_patterns.td b/tensorflow/compiler/mlir/xla/transforms/legalize_tf_patterns.td
index f6a1e3f..ebbc289 100644
--- a/tensorflow/compiler/mlir/xla/transforms/legalize_tf_patterns.td
+++ b/tensorflow/compiler/mlir/xla/transforms/legalize_tf_patterns.td
@@ -268,10 +268,10 @@
//===----------------------------------------------------------------------===//
def ZeroPaddingAttr : NativeCodeCall <
- "DenseElementsAttr::get("
- "RankedTensorType::get($0.getType().getShape()[0], getElementTypeOrSelf($0.getType())), "
- "{$_builder.getZeroAttr(getElementTypeOrSelf($0.getType()))})"
- ".cast<DenseIntElementsAttr>()">;
+ "DenseIntElementsAttr::get("
+ "RankedTensorType::get($0.getType().getShape()[0],"
+ " getElementTypeOrSelf($0.getType())), "
+ "{$_builder.getZeroAttr(getElementTypeOrSelf($0.getType()))})">;
class SliceDenseIntElementsAttrColumn2D<string column> : NativeCodeCall<
"SliceDenseIntElementsAttrColumn2D("
@@ -380,6 +380,7 @@
[TF_ExpOp, HLO_ExpOp],
[TF_FloorOp, HLO_FloorOp],
[TF_ImagOp, HLO_ImagOp],
+ [TF_IsFiniteOp, HLO_IsFiniteOp],
[TF_LogOp, HLO_LogOp],
[TF_NegOp, HLO_NegOp],
[TF_RealOp, HLO_RealOp],
diff --git a/tensorflow/compiler/tf2xla/kernels/case_op.cc b/tensorflow/compiler/tf2xla/kernels/case_op.cc
index 9b3770c..748006a 100644
--- a/tensorflow/compiler/tf2xla/kernels/case_op.cc
+++ b/tensorflow/compiler/tf2xla/kernels/case_op.cc
@@ -132,7 +132,6 @@
// Compile each branch of the conditional.
XlaCompiler::CompileOptions options;
options.use_tuple_arg = true;
- options.resolve_compile_time_constants = false;
options.return_updated_values_for_all_resources = true;
options.is_entry_computation = false;
options.add_token_input_output = has_token_input_output_;
diff --git a/tensorflow/compiler/tf2xla/kernels/if_op.cc b/tensorflow/compiler/tf2xla/kernels/if_op.cc
index a7dd1bb..c46c093 100644
--- a/tensorflow/compiler/tf2xla/kernels/if_op.cc
+++ b/tensorflow/compiler/tf2xla/kernels/if_op.cc
@@ -131,7 +131,6 @@
// Compile both branches of the conditional.
XlaCompiler::CompileOptions options;
options.use_tuple_arg = true;
- options.resolve_compile_time_constants = false;
options.return_updated_values_for_all_resources = true;
options.is_entry_computation = false;
options.add_token_input_output = has_token_input_output_;
diff --git a/tensorflow/compiler/tf2xla/kernels/reduce_window_op.cc b/tensorflow/compiler/tf2xla/kernels/reduce_window_op.cc
index dacdbc8..8bd8edc 100644
--- a/tensorflow/compiler/tf2xla/kernels/reduce_window_op.cc
+++ b/tensorflow/compiler/tf2xla/kernels/reduce_window_op.cc
@@ -81,7 +81,6 @@
XlaCompiler::CompileOptions compile_options;
compile_options.use_tuple_arg = false;
- compile_options.resolve_compile_time_constants = false;
compile_options.is_entry_computation = false;
compile_options.always_return_tuple = false;
XlaCompiler::CompilationResult reducer;
diff --git a/tensorflow/compiler/tf2xla/kernels/while_op.cc b/tensorflow/compiler/tf2xla/kernels/while_op.cc
index 36c35f3..b585405 100644
--- a/tensorflow/compiler/tf2xla/kernels/while_op.cc
+++ b/tensorflow/compiler/tf2xla/kernels/while_op.cc
@@ -329,7 +329,6 @@
XlaCompiler::CompileOptions body_options;
body_options.use_tuple_arg = true;
body_options.return_updated_values_for_all_resources = true;
- body_options.resolve_compile_time_constants = false;
body_options.is_entry_computation = false;
body_options.add_token_input_output = has_token_input_output_;
XlaCompiler::CompilationResult body;
@@ -422,7 +421,6 @@
XlaCompiler::CompileOptions cond_options;
cond_options.use_tuple_arg = true;
- cond_options.resolve_compile_time_constants = false;
cond_options.is_entry_computation = false;
cond_options.add_token_input_output = has_token_input_output_;
XlaCompiler::CompilationResult cond;
diff --git a/tensorflow/compiler/tf2xla/kernels/xla_reduce_op.cc b/tensorflow/compiler/tf2xla/kernels/xla_reduce_op.cc
index fc2425f..8b481d5 100644
--- a/tensorflow/compiler/tf2xla/kernels/xla_reduce_op.cc
+++ b/tensorflow/compiler/tf2xla/kernels/xla_reduce_op.cc
@@ -65,7 +65,6 @@
XlaCompiler::CompileOptions compile_options;
compile_options.use_tuple_arg = false;
compile_options.always_return_tuple = false;
- compile_options.resolve_compile_time_constants = false;
compile_options.is_entry_computation = false;
XlaCompiler::CompilationResult reducer;
OP_REQUIRES_OK(context, context->compiler()->CompileFunction(
diff --git a/tensorflow/compiler/tf2xla/kernels/xla_select_and_scatter_op.cc b/tensorflow/compiler/tf2xla/kernels/xla_select_and_scatter_op.cc
index 9043af9..7eaab34 100644
--- a/tensorflow/compiler/tf2xla/kernels/xla_select_and_scatter_op.cc
+++ b/tensorflow/compiler/tf2xla/kernels/xla_select_and_scatter_op.cc
@@ -60,7 +60,6 @@
XlaCompiler::CompileOptions compile_options;
compile_options.use_tuple_arg = false;
- compile_options.resolve_compile_time_constants = false;
compile_options.is_entry_computation = false;
compile_options.always_return_tuple = false;
diff --git a/tensorflow/compiler/tf2xla/xla_compiler.cc b/tensorflow/compiler/tf2xla/xla_compiler.cc
index 730fc36..9d10be1 100644
--- a/tensorflow/compiler/tf2xla/xla_compiler.cc
+++ b/tensorflow/compiler/tf2xla/xla_compiler.cc
@@ -1226,22 +1226,6 @@
return Status::OK();
}
-// Converts the value of any expressions whose values are known at compile-time
-// to constants.
-Status ResolveConstantExpressionsToConstants(
- xla::Client* client, absl::Span<XlaExpression> expressions) {
- for (XlaExpression& expression : expressions) {
- if (expression.kind() == XlaExpression::Kind::kXlaOp) {
- TF_ASSIGN_OR_RETURN(absl::optional<Tensor> constant,
- expression.ResolveConstant(client));
- if (constant.has_value()) {
- expression = XlaExpression::Constant(*constant);
- }
- }
- }
- return Status::OK();
-}
-
void ConvertConstantsToExpressions(xla::XlaBuilder* builder,
absl::Span<XlaExpression> expressions) {
for (XlaExpression& expression : expressions) {
@@ -1360,21 +1344,7 @@
result->computation = std::make_shared<xla::XlaComputation>();
result->outputs.resize(context->retvals().size());
std::vector<XlaExpression> retvals = context->retvals();
- if (options.resolve_compile_time_constants) {
- Status status = ResolveConstantExpressionsToConstants(
- client(), absl::Span<XlaExpression>(retvals));
-
- // If the HloEvaluator has not implemented an expression, just evaluate it
- // at runtime.
- if (status.code() == error::UNIMPLEMENTED) {
- ConvertConstantsToExpressions(&builder,
- absl::Span<XlaExpression>(retvals));
- } else {
- TF_RETURN_IF_ERROR(status);
- }
- } else {
- ConvertConstantsToExpressions(&builder, absl::Span<XlaExpression>(retvals));
- }
+ ConvertConstantsToExpressions(&builder, absl::Span<XlaExpression>(retvals));
TF_RETURN_IF_ERROR(BuildComputation(
real_args, retvals, arg_shardings, retval_shardings, context->resources(),
std::move(token_output),
diff --git a/tensorflow/compiler/tf2xla/xla_compiler.h b/tensorflow/compiler/tf2xla/xla_compiler.h
index c7bddd1..c3e9b3e 100644
--- a/tensorflow/compiler/tf2xla/xla_compiler.h
+++ b/tensorflow/compiler/tf2xla/xla_compiler.h
@@ -199,12 +199,6 @@
// the input and output signatures match.
bool return_updated_values_for_all_resources = false;
- // If 'resolve_compile_time_constants' is true, then outputs of a
- // computation that are known to be compile-time constants will be returned
- // as Tensors at compile-time, rather than as run-time outputs of the
- // computation.
- bool resolve_compile_time_constants = true;
-
// If 'always_return_tuple' is true, then the output of a computation will
// always be a tuple. Otherwise, a single-element output will not be wrapped
// in a tuple.
diff --git a/tensorflow/compiler/tf2xla/xla_compiler_test.cc b/tensorflow/compiler/tf2xla/xla_compiler_test.cc
index c9de03a..4a239c3 100644
--- a/tensorflow/compiler/tf2xla/xla_compiler_test.cc
+++ b/tensorflow/compiler/tf2xla/xla_compiler_test.cc
@@ -573,48 +573,12 @@
XlaCompiler::Options options = DefaultOptions();
XlaCompiler compiler(options);
- {
- // Compiles the graph, with resolve_compile_time_constants enabled.
+ {
std::unique_ptr<Graph> graph_copy(new Graph(OpRegistry::Global()));
CopyGraph(*graph, graph_copy.get());
XlaCompiler::CompileOptions compile_options;
- compile_options.resolve_compile_time_constants = true;
- XlaCompiler::CompilationResult result;
- TF_ASSERT_OK(compiler.CompileGraph(compile_options, "constants",
- std::move(graph_copy), args,
- /*user_aliases=*/{}, &result));
-
- ASSERT_EQ(2, result.outputs.size());
- EXPECT_TRUE(result.outputs[0].is_constant);
- test::ExpectTensorEqual<int32>(result.outputs[0].constant_value,
- test::AsScalar(7));
- EXPECT_FALSE(result.outputs[1].is_constant);
-
- // Tests that the generated computation works.
- xla::Literal param0_literal = xla::LiteralUtil::CreateR1<int32>({7, 42});
- std::unique_ptr<xla::GlobalData> param0_data =
- client_->TransferToServer(param0_literal).ConsumeValueOrDie();
-
- std::unique_ptr<xla::GlobalData> actual =
- client_->Execute(*result.computation, {param0_data.get()})
- .ConsumeValueOrDie();
- xla::Literal actual_literal =
- client_->Transfer(*actual).ConsumeValueOrDie();
-
- xla::Literal expected0 = xla::LiteralUtil::CreateR1<int32>({-7, -42});
- xla::Literal expected_literal = xla::LiteralUtil::MakeTuple({&expected0});
- EXPECT_TRUE(xla::LiteralTestUtil::Equal(expected_literal, actual_literal));
- }
-
- {
- // Compiles the graph, with resolve_compile_time_constants disabled.
- std::unique_ptr<Graph> graph_copy(new Graph(OpRegistry::Global()));
- CopyGraph(*graph, graph_copy.get());
-
- XlaCompiler::CompileOptions compile_options;
- compile_options.resolve_compile_time_constants = false;
XlaCompiler::CompilationResult result;
TF_ASSERT_OK(compiler.CompileGraph(compile_options, "constants",
std::move(graph_copy), args,
@@ -701,16 +665,12 @@
XlaCompiler compiler(options);
XlaCompiler::CompileOptions compile_options;
- compile_options.resolve_compile_time_constants = true;
XlaCompiler::CompilationResult result;
TF_ASSERT_OK(compiler.CompileGraph(compile_options, "constants",
std::move(graph), args,
/*user_aliases=*/{}, &result));
ASSERT_EQ(2, result.outputs.size());
- EXPECT_TRUE(result.outputs[0].is_constant);
- test::ExpectTensorEqual<int32>(result.outputs[0].constant_value,
- test::AsScalar(7));
EXPECT_FALSE(result.outputs[1].is_constant);
}
@@ -1869,7 +1829,6 @@
XlaCompiler::CompilationResult result;
auto options = XlaCompiler::CompileOptions();
- options.resolve_compile_time_constants = false;
TF_ASSERT_OK(compiler.CompileGraph(options, "test", std::move(graph), args,
/*user_aliases=*/{}, &result));
diff --git a/tensorflow/compiler/xla/g3doc/shapes.md b/tensorflow/compiler/xla/g3doc/shapes.md
index 39e74ff..c7612ac 100644
--- a/tensorflow/compiler/xla/g3doc/shapes.md
+++ b/tensorflow/compiler/xla/g3doc/shapes.md
@@ -120,7 +120,7 @@
dimension is padded. If present, the number of elements in `padded_dimensions`
must equal the rank of the shape.
-For example, given the `[2 x 3]` array defined above, if `padded_dimension` is
+For example, given the `[2 x 3]` array defined above, if `padded_dimensions` is
`[3, 5]` then dimension 0 is padded to a width of 3 and dimension 1 is padded to
a width of 5. The layout in linear memory (assuming a padding value of 0 and
column-major layout) is:
diff --git a/tensorflow/compiler/xla/python/BUILD b/tensorflow/compiler/xla/python/BUILD
index 398727f..4345871 100644
--- a/tensorflow/compiler/xla/python/BUILD
+++ b/tensorflow/compiler/xla/python/BUILD
@@ -26,6 +26,7 @@
name = "xla_client_test",
srcs = ["xla_client_test.py"],
main = "xla_client_test.py",
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = ["no_oss"], # TODO(phawkins): This test passes, but requires --config=monolithic.
deps = [
diff --git a/tensorflow/compiler/xla/python/local_client.cc b/tensorflow/compiler/xla/python/local_client.cc
index a8c1785..ef8ff42 100644
--- a/tensorflow/compiler/xla/python/local_client.cc
+++ b/tensorflow/compiler/xla/python/local_client.cc
@@ -157,10 +157,10 @@
static std::shared_ptr<Device> MakeDevice(const std::string& platform_name,
int id, int local_device_ordinal) {
if (platform_name == "cpu") {
- return std::make_shared<CpuDevice>(id, local_device_ordinal);
+ return std::make_shared<CpuDevice>(id, local_device_ordinal, platform_name);
} else {
CHECK_EQ(platform_name, "gpu");
- return std::make_shared<GpuDevice>(id, local_device_ordinal);
+ return std::make_shared<GpuDevice>(id, local_device_ordinal, platform_name);
}
}
diff --git a/tensorflow/compiler/xla/python/local_client.h b/tensorflow/compiler/xla/python/local_client.h
index aa8e457..3f13f62 100644
--- a/tensorflow/compiler/xla/python/local_client.h
+++ b/tensorflow/compiler/xla/python/local_client.h
@@ -43,10 +43,12 @@
class Device {
public:
- explicit Device(int id, int local_device_ordinal, int host_id = 0)
+ explicit Device(int id, int local_device_ordinal,
+ absl::string_view platform_name, int host_id = 0)
: id_(id),
local_device_ordinal_(local_device_ordinal),
- host_id_(host_id) {}
+ host_id_(host_id),
+ platform_name_(platform_name) {}
virtual ~Device() {}
// The ID of this device. IDs are unique among devices of this type
@@ -65,12 +67,15 @@
// The ID of this device's host. This is always 0 on single-host platforms.
int host_id() const { return host_id_; }
+ const std::string& platform_name() const { return platform_name_; }
+
virtual std::string DebugString() const = 0;
private:
const int id_;
const int local_device_ordinal_;
const int host_id_;
+ const std::string platform_name_;
};
class CpuDevice : public Device {
diff --git a/tensorflow/compiler/xla/python/tpu_driver/BUILD b/tensorflow/compiler/xla/python/tpu_driver/BUILD
index ecce28c..e34b8de 100644
--- a/tensorflow/compiler/xla/python/tpu_driver/BUILD
+++ b/tensorflow/compiler/xla/python/tpu_driver/BUILD
@@ -58,6 +58,7 @@
srcs = [
"grpc_tpu_driver.cc",
],
+ hdrs = ["grpc_tpu_driver.h"],
deps = [
":tpu_driver",
"//tensorflow/core/platform:logging",
diff --git a/tensorflow/compiler/xla/python/tpu_driver/client/tpu_client.cc b/tensorflow/compiler/xla/python/tpu_driver/client/tpu_client.cc
index c251d14..b9ca2a7 100644
--- a/tensorflow/compiler/xla/python/tpu_driver/client/tpu_client.cc
+++ b/tensorflow/compiler/xla/python/tpu_driver/client/tpu_client.cc
@@ -42,7 +42,7 @@
int id, int local_device_ordinal) {
CHECK_EQ(platform_name, "tpu");
CHECK_EQ(id, local_device_ordinal); // Every device must be local for now.
- return std::make_shared<TpuDevice>(id, local_device_ordinal);
+ return std::make_shared<TpuDevice>(id, local_device_ordinal, "tpu");
}
StatusOr<std::shared_ptr<PyTpuClient>> PyTpuClient::Get(
@@ -380,14 +380,17 @@
}
tpu_driver::TpuDriver* driver = client_->driver();
- tpu_driver::BufferHandle* src_handle = src_device_buffer->handle.get();
TF_ASSIGN_OR_RETURN(
std::unique_ptr<PyTpuBuffer> dst_buffer,
CreateBuffer(
on_host_shape_,
- [driver, src_handle](tpu_driver::BufferHandle* dst_handle) {
- return driver->TransferFromDeviceToDevice(src_handle, dst_handle,
- {});
+ [driver, src_device_buffer](tpu_driver::BufferHandle* dst_handle) {
+ std::vector<tpu_driver::Event*> src_wait_for_use;
+ for (auto& event : src_device_buffer->wait_for_use) {
+ src_wait_for_use.push_back(event.get());
+ }
+ return driver->TransferFromDeviceToDevice(
+ src_device_buffer->handle.get(), dst_handle, src_wait_for_use);
},
client_, dst_device_ordinal));
// TODO(jiawenhao): This may be too pessimistic: it prevents future readers
diff --git a/tensorflow/compiler/xla/python/tpu_driver/client/tpu_client_extension.cc b/tensorflow/compiler/xla/python/tpu_driver/client/tpu_client_extension.cc
index f645349..e7d1e2e 100644
--- a/tensorflow/compiler/xla/python/tpu_driver/client/tpu_client_extension.cc
+++ b/tensorflow/compiler/xla/python/tpu_driver/client/tpu_client_extension.cc
@@ -61,6 +61,12 @@
std::shared_ptr<Device> device)
-> StatusOr<std::unique_ptr<PyTpuBuffer>> {
CHECK(device != nullptr);
+ auto iter = client->id_to_device().find(device->id());
+ if (iter->second != device) {
+ return InvalidArgument(
+ "Cannot copy value to device '%s' with '%s' backend",
+ device->DebugString(), client->platform_name());
+ }
GlobalPyRefManager()->CollectGarbage();
TF_ASSIGN_OR_RETURN(PythonBufferTree tree,
GetPythonBufferTree(argument));
@@ -105,8 +111,15 @@
.def_static("make_tuple",
[](const std::vector<PyTpuBuffer*> buffers,
std::shared_ptr<PyTpuClient> client,
- std::shared_ptr<Device> device) {
+ std::shared_ptr<Device> device)
+ -> StatusOr<std::unique_ptr<PyTpuBuffer>> {
CHECK(device != nullptr);
+ auto iter = client->id_to_device().find(device->id());
+ if (iter->second != device) {
+ return InvalidArgument(
+ "Cannot make tuple on device '%s' with '%s' backend",
+ device->DebugString(), client->platform_name());
+ }
return PyTpuBuffer::MakeTuple(
buffers, client, device->local_device_ordinal());
})
diff --git a/tensorflow/compiler/xla/python/tpu_driver/grpc_tpu_driver.cc b/tensorflow/compiler/xla/python/tpu_driver/grpc_tpu_driver.cc
index 4492758..e01aab1 100644
--- a/tensorflow/compiler/xla/python/tpu_driver/grpc_tpu_driver.cc
+++ b/tensorflow/compiler/xla/python/tpu_driver/grpc_tpu_driver.cc
@@ -313,8 +313,10 @@
class GrpcTpuDriver : public TpuDriver {
public:
- explicit GrpcTpuDriver(const TpuDriverConfig& config, int32_t client_id)
- : config_(config), client_id_(client_id) {
+ explicit GrpcTpuDriver(const TpuDriverConfig& config,
+ std::shared_ptr<::grpc::ChannelCredentials> creds,
+ int32_t client_id)
+ : config_(config), creds_(creds), client_id_(client_id) {
SystemInfo system_info;
QuerySystemInfo(&system_info);
for (auto& chip_info : system_info.tpu_chip()) {
@@ -330,18 +332,12 @@
}
~GrpcTpuDriver() override {
- auto stub = CreateTpuDriverStub(config_);
- ::grpc::ClientContext ctx;
- ctx.set_fail_fast(false);
- ctx.set_deadline(std::chrono::system_clock::now() +
- std::chrono::seconds(10));
- CloseRequest req;
- req.set_client_id(client_id_);
- CloseResponse resp;
- ::grpc::Status status = stub->Close(&ctx, req, &resp);
+ if (closed_) {
+ return;
+ }
+ auto status = Close();
if (!status.ok()) {
- LOG(ERROR) << "Failed to close the gRPC driver: " << status.error_code()
- << ": " << status.error_details();
+ LOG(ERROR) << status;
}
}
@@ -424,20 +420,24 @@
EventId NewOperationId() { return EventId{client_id_, ++operation_id_}; }
static std::unique_ptr<grpc::CloudTpuDriver::Stub> CreateTpuDriverStub(
- const TpuDriverConfig& config);
+ const TpuDriverConfig& config,
+ std::shared_ptr<::grpc::ChannelCredentials> creds);
uint32_t client_id() const { return client_id_; }
private:
+ Status Close();
std::unique_ptr<GrpcTpuStream> AllocateStream(int32_t core_id);
const TpuDriverConfig config_;
+ std::shared_ptr<::grpc::ChannelCredentials> creds_;
const uint32_t client_id_;
// Map from stream IDs to streams.
absl::flat_hash_map<int32_t, std::unique_ptr<GrpcTpuStream>> streams_;
std::unique_ptr<GrpcTpuStream> host_stream_;
// Shared by all streams.
std::atomic<uint64_t> operation_id_{0};
+ std::atomic<bool> closed_{false};
}; // namespace
GrpcEvent::~GrpcEvent() { stream_->DeleteEvent(id_); }
@@ -942,8 +942,9 @@
}
/*static*/ std::unique_ptr<grpc::CloudTpuDriver::Stub>
-GrpcTpuDriver::CreateTpuDriverStub(const TpuDriverConfig& config) {
- auto creds = ::grpc::InsecureChannelCredentials();
+GrpcTpuDriver::CreateTpuDriverStub(
+ const TpuDriverConfig& config,
+ std::shared_ptr<::grpc::ChannelCredentials> creds) {
::grpc::ChannelArguments args;
args.SetMaxReceiveMessageSize(std::numeric_limits<int>::max());
args.SetMaxSendMessageSize(std::numeric_limits<int>::max());
@@ -984,7 +985,7 @@
}
std::unique_ptr<GrpcTpuStream> GrpcTpuDriver::AllocateStream(int32_t id) {
- auto stub = CreateTpuDriverStub(config_);
+ auto stub = CreateTpuDriverStub(config_, creds_);
::grpc::ClientContext ctx;
ctx.set_fail_fast(false);
ctx.set_deadline(std::chrono::system_clock::now() + std::chrono::seconds(10));
@@ -992,7 +993,7 @@
}
void GrpcTpuDriver::QuerySystemInfo(SystemInfo* system_info) {
- auto stub = CreateTpuDriverStub(config_);
+ auto stub = CreateTpuDriverStub(config_, creds_);
::grpc::ClientContext ctx;
ctx.set_fail_fast(false);
ctx.set_deadline(std::chrono::system_clock::now() + std::chrono::seconds(10));
@@ -1002,43 +1003,94 @@
::grpc::Status status = stub->QuerySystemInfo(&ctx, req, &resp);
if (!status.ok()) {
LOG(ERROR) << "QuerySystemInfo request failed: " << status.error_code()
- << ":" << status.error_details();
+ << ": " << status.error_message() << ": "
+ << status.error_details();
return;
}
*system_info = resp.system_info();
}
Status GrpcTpuDriver::Reset() {
- return xla::Unimplemented("GRPC driver reset is not implemented yet.");
+ auto stub = CreateTpuDriverStub(config_, creds_);
+ ::grpc::ClientContext ctx;
+ ctx.set_fail_fast(false);
+ ctx.set_deadline(std::chrono::system_clock::now() + std::chrono::seconds(10));
+ ResetRequest req;
+ ResetResponse resp;
+ ::grpc::Status status = stub->Reset(&ctx, req, &resp);
+ if (!status.ok()) {
+ LOG(ERROR) << "Failed to reset the gRPC driver: " << status.error_code()
+ << ": " << status.error_message() << ": "
+ << status.error_details();
+ return xla::Status(tensorflow::error::Code(status.error_code()),
+ absl::StrCat("Failed to reset TPU driver. Error was: ",
+ status.error_message(),
+ ". Details: ", status.error_details()));
+ }
+ streams_.clear();
+ host_stream_.reset();
+ return Close();
+}
+
+Status GrpcTpuDriver::Close() {
+ auto stub = CreateTpuDriverStub(config_, creds_);
+ ::grpc::ClientContext ctx;
+ ctx.set_fail_fast(false);
+ ctx.set_deadline(std::chrono::system_clock::now() + std::chrono::seconds(10));
+ CloseRequest req;
+ req.set_client_id(client_id_);
+ CloseResponse resp;
+ ::grpc::Status status = stub->Close(&ctx, req, &resp);
+ if (!status.ok()) {
+ return xla::Status(tensorflow::error::Code(status.error_code()),
+ absl::StrCat("Failed to close TPU driver. Error was: ",
+ status.error_message(),
+ ". Details: ", status.error_details()));
+ }
+ closed_ = true;
+ return Status::OK();
+}
+} // namespace
+
+xla::StatusOr<std::unique_ptr<TpuDriver>> CreateGrpcTpuDriver(
+ const TpuDriverConfig& config,
+ std::shared_ptr<::grpc::ChannelCredentials> creds) {
+ auto stub = GrpcTpuDriver::CreateTpuDriverStub(config, creds);
+ ::grpc::ClientContext ctx;
+ ctx.set_fail_fast(false);
+ ctx.set_deadline(
+ std::chrono::system_clock::now() +
+ std::chrono::seconds(config.grpc().connection_timeout_secs()));
+ OpenRequest req;
+ OpenResponse resp;
+ ::grpc::Status status = stub->Open(&ctx, req, &resp);
+ if (!status.ok()) {
+ LOG(ERROR) << "Failed to open the gRPC driver: " << status.error_code()
+ << ": " << status.error_message() << ": "
+ << status.error_details();
+ return xla::Status(
+ tensorflow::error::Code(status.error_code()),
+ absl::StrCat(
+ "Failed to connect to remote server at address: ", config.worker(),
+ ". Error from gRPC: ", status.error_message(),
+ ". Details: ", status.error_details()));
+ }
+ return std::unique_ptr<TpuDriver>(
+ new GrpcTpuDriver(config, creds, resp.client_id()));
}
REGISTER_TPU_DRIVER(
"grpc://",
[](const TpuDriverConfig& config)
-> xla::StatusOr<std::unique_ptr<TpuDriver>> {
- auto stub = GrpcTpuDriver::CreateTpuDriverStub(config);
- ::grpc::ClientContext ctx;
- ctx.set_fail_fast(false);
- ctx.set_deadline(
- std::chrono::system_clock::now() +
- std::chrono::seconds(config.grpc().connection_timeout_secs()));
- OpenRequest req;
- OpenResponse resp;
- ::grpc::Status status = stub->Open(&ctx, req, &resp);
- if (!status.ok()) {
- LOG(ERROR) << "Failed to open the gRPC driver: " << status.error_code()
- << ": " << status.error_message() << ": "
- << status.error_details();
- return xla::Status(
- tensorflow::error::Code(status.error_code()),
- absl::StrCat("Failed to connect to remote server at address: ",
- config.worker(),
- ". Error from gRPC: ", status.error_message(),
- ". Details: ", status.error_details()));
+ if (absl::StartsWith(config.worker(), "grpc://localhost")) {
+ LOG(INFO) << "Using local credentials for localhost: connection.";
+ return CreateGrpcTpuDriver(
+ config, ::grpc::experimental::LocalCredentials(LOCAL_TCP));
+ } else {
+ return CreateGrpcTpuDriver(config,
+ ::grpc::InsecureChannelCredentials());
}
- return std::unique_ptr<TpuDriver>(
- new GrpcTpuDriver(config, resp.client_id()));
});
-} // namespace
} // namespace tpu_driver
diff --git a/tensorflow/compiler/xla/python/tpu_driver/grpc_tpu_driver.h b/tensorflow/compiler/xla/python/tpu_driver/grpc_tpu_driver.h
new file mode 100644
index 0000000..a3d146f
--- /dev/null
+++ b/tensorflow/compiler/xla/python/tpu_driver/grpc_tpu_driver.h
@@ -0,0 +1,31 @@
+#ifndef TENSORFLOW_COMPILER_XLA_PYTHON_TPU_DRIVER_GRPC_TPU_DRIVER_H_
+#define TENSORFLOW_COMPILER_XLA_PYTHON_TPU_DRIVER_GRPC_TPU_DRIVER_H_
+
+// Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// ==============================================================================
+
+#include "grpcpp/grpcpp.h"
+#include "tensorflow/compiler/xla/python/tpu_driver/tpu_driver.h"
+#include "tensorflow/compiler/xla/python/tpu_driver/tpu_driver.pb.h"
+
+namespace tpu_driver {
+
+xla::StatusOr<std::unique_ptr<TpuDriver>> CreateGrpcTpuDriver(
+ const TpuDriverConfig& config,
+ std::shared_ptr<grpc_impl::ChannelCredentials> credentials);
+
+} // namespace tpu_driver
+
+#endif // TENSORFLOW_COMPILER_XLA_PYTHON_TPU_DRIVER_GRPC_TPU_DRIVER_H_
diff --git a/tensorflow/compiler/xla/python/tpu_driver/tpu_driver.h b/tensorflow/compiler/xla/python/tpu_driver/tpu_driver.h
index 93f001c..36b7fa0 100644
--- a/tensorflow/compiler/xla/python/tpu_driver/tpu_driver.h
+++ b/tensorflow/compiler/xla/python/tpu_driver/tpu_driver.h
@@ -153,11 +153,13 @@
virtual ~TpuDriver() {}
virtual void QuerySystemInfo(SystemInfo* system_info) = 0;
- // Synchronous. Reset the state of the TPU driver. All running programs
- // will be terminated and all allocations reset.
+ // Synchronous. Reset the state of the TPU driver. After Reset(), this TPU
+ // driver object is no longer usable. Users must destroy this object and
+ // create a new one.
//
- // All events and buffer handles created prior to Reset() will be invalid,
- // and any use will result in undefined behavior.
+ // All running programs will be terminated and all allocations reset. All
+ // events and buffer handles created prior to Reset() will be invalid, and any
+ // use will result in undefined behavior.
virtual xla::Status Reset() = 0;
virtual std::unique_ptr<BufferHandle> Allocate(
diff --git a/tensorflow/compiler/xla/python/tpu_driver/tpu_service.proto b/tensorflow/compiler/xla/python/tpu_driver/tpu_service.proto
index cfc8b63..fb09311 100644
--- a/tensorflow/compiler/xla/python/tpu_driver/tpu_service.proto
+++ b/tensorflow/compiler/xla/python/tpu_driver/tpu_service.proto
@@ -157,6 +157,10 @@
message CloseResponse {}
+message ResetRequest {}
+
+message ResetResponse {}
+
message QuerySystemInfoRequest {}
message QuerySystemInfoResponse {
@@ -170,6 +174,9 @@
// Close the driver. Any outstanding requests will be terminated.
rpc Close(CloseRequest) returns (CloseResponse);
+ // Reset the driver. All connected clients will be disconnected.
+ rpc Reset(ResetRequest) returns (ResetResponse);
+
// Query the driver for current system performance information.
rpc QuerySystemInfo(QuerySystemInfoRequest) returns (QuerySystemInfoResponse);
diff --git a/tensorflow/compiler/xla/python/types.h b/tensorflow/compiler/xla/python/types.h
index 94ad945..c67ad72 100644
--- a/tensorflow/compiler/xla/python/types.h
+++ b/tensorflow/compiler/xla/python/types.h
@@ -468,6 +468,69 @@
}
};
+template <>
+struct type_caster<xla::OpSharding> {
+ public:
+ PYBIND11_TYPE_CASTER(xla::OpSharding, _("xla::OpSharding"));
+
+ // PyObject -> C++ conversion.
+ bool load(handle handle_obj, bool) {
+ if (handle_obj.is_none()) {
+ return true;
+ }
+
+ // Sets `type` field.
+ handle sharding_type = getattr(handle_obj, "type");
+ if (!sharding_type.is_none()) {
+ value.set_type(sharding_type.cast<xla::OpSharding_Type>());
+ }
+
+ // Sets `tile_assignment_dimensions` field.
+ std::vector<xla::int64> dims;
+ dims = getattr(handle_obj, "tile_assignment_dimensions")
+ .cast<std::vector<xla::int64>>();
+ std::copy(dims.begin(), dims.end(),
+ tensorflow::protobuf::RepeatedFieldBackInserter(
+ value.mutable_tile_assignment_dimensions()));
+
+ // Sets `tile_assignment_devices` field.
+ std::vector<xla::int64> devices;
+ devices = getattr(handle_obj, "tile_assignment_devices")
+ .cast<std::vector<xla::int64>>();
+ std::copy(devices.begin(), devices.end(),
+ tensorflow::protobuf::RepeatedFieldBackInserter(
+ value.mutable_tile_assignment_devices()));
+
+ // Sets `tuple_shardings` field.
+ sequence tuple_shardings =
+ reinterpret_borrow<sequence>(getattr(handle_obj, "tuple_shardings"));
+
+ for (auto tuple_sharding : tuple_shardings) {
+ xla::OpSharding* sharding = value.add_tuple_shardings();
+
+ handle sharding_type = getattr(tuple_sharding, "type");
+ if (!sharding_type.is_none()) {
+ sharding->set_type(sharding_type.cast<xla::OpSharding_Type>());
+ }
+ std::vector<xla::int64> dims;
+ dims = getattr(tuple_sharding, "tile_assignment_dimensions")
+ .cast<std::vector<xla::int64>>();
+ std::copy(dims.begin(), dims.end(),
+ tensorflow::protobuf::RepeatedFieldBackInserter(
+ sharding->mutable_tile_assignment_dimensions()));
+
+ std::vector<xla::int64> devices;
+ devices = getattr(tuple_sharding, "tile_assignment_devices")
+ .cast<std::vector<xla::int64>>();
+ std::copy(devices.begin(), devices.end(),
+ tensorflow::protobuf::RepeatedFieldBackInserter(
+ sharding->mutable_tile_assignment_devices()));
+ }
+
+ return true;
+ }
+};
+
} // namespace detail
} // namespace pybind11
diff --git a/tensorflow/compiler/xla/python/xla.cc b/tensorflow/compiler/xla/python/xla.cc
index 9191cad..4398561 100644
--- a/tensorflow/compiler/xla/python/xla.cc
+++ b/tensorflow/compiler/xla/python/xla.cc
@@ -48,6 +48,7 @@
#include "tensorflow/compiler/xla/shape.h"
#include "tensorflow/compiler/xla/shape_util.h"
#include "tensorflow/compiler/xla/statusor.h"
+#include "tensorflow/compiler/xla/xla_data.pb.h"
namespace xla {
@@ -317,6 +318,7 @@
.def_property_readonly("host_id", &Device::host_id,
"Integer ID of this device's host.\n\n"
"This is always 0 except on multi-host platforms.")
+ .def_property_readonly("platform", &Device::platform_name)
.def("__str__", &Device::DebugString);
py::class_<CpuDevice, Device, std::shared_ptr<CpuDevice>>(m, "CpuDevice")
@@ -390,6 +392,12 @@
std::shared_ptr<Device> device)
-> StatusOr<std::unique_ptr<PyLocalBuffer>> {
CHECK(device != nullptr);
+ auto iter = client->id_to_device().find(device->id());
+ if (iter->second != device) {
+ return InvalidArgument(
+ "Cannot copy value to device '%s' with '%s' backend",
+ device->DebugString(), client->platform_name());
+ }
GlobalPyRefManager()->CollectGarbage();
TF_ASSIGN_OR_RETURN(PythonBufferTree tree,
GetPythonBufferTree(argument));
@@ -435,8 +443,15 @@
.def_static("make_tuple",
[](const std::vector<PyLocalBuffer*> buffers,
std::shared_ptr<PyLocalClient> client,
- std::shared_ptr<Device> device) {
+ std::shared_ptr<Device> device)
+ -> StatusOr<std::unique_ptr<PyLocalBuffer>> {
CHECK(device != nullptr);
+ auto iter = client->id_to_device().find(device->id());
+ if (iter->second != device) {
+ return InvalidArgument(
+ "Cannot make tuple on device '%s' with '%s' backend",
+ device->DebugString(), client->platform_name());
+ }
return PyLocalBuffer::MakeTuple(
buffers, client, device->local_device_ordinal());
})
@@ -588,7 +603,9 @@
},
py::arg("root") = absl::nullopt)
.def("IsConstant", &XlaBuilder::IsConstant)
- .def("SetOpMetadata", &XlaBuilder::SetOpMetadata);
+ .def("SetOpMetadata", &XlaBuilder::SetOpMetadata)
+ .def("SetSharding", &XlaBuilder::SetSharding)
+ .def("ClearSharding", &XlaBuilder::ClearSharding);
// ops submodule, containing free functions that add operators to an
// XlaBuilder.
@@ -824,6 +841,12 @@
.value("HIGH", PrecisionConfig::HIGH)
.value("HIGHEST", PrecisionConfig::HIGHEST);
+ py::enum_<OpSharding::Type>(m, "OpSharding_Type")
+ .value("REPLICATED", OpSharding::REPLICATED)
+ .value("MAXIMAL", OpSharding::MAXIMAL)
+ .value("TUPLE", OpSharding::TUPLE)
+ .value("OTHER", OpSharding::OTHER);
+
// TODO(phawkins): improve bindings for these types.
py::class_<ChannelHandle>(m, "ChannelHandle");
} // NOLINT(readability/fn_size)
diff --git a/tensorflow/compiler/xla/python/xla_client.py b/tensorflow/compiler/xla/python/xla_client.py
index a9add36..65db35a 100644
--- a/tensorflow/compiler/xla/python/xla_client.py
+++ b/tensorflow/compiler/xla/python/xla_client.py
@@ -740,6 +740,17 @@
"""Clear metadata for operations that are about to be enqueued."""
self._builder.ClearOpMetadata()
+ def SetSharding(self, sharding):
+ """Set sharding that will be attached to all instructions until cleared."""
+ self._builder.SetSharding(sharding)
+
+ def ClearSharding(self):
+ """Clears the sharding.
+
+ Ops will be shared according to the default placement policy.
+ """
+ self._builder.ClearSharding()
+
def CreateToken(self):
"""Enqueues a CreateToken op onto the computation.
@@ -1812,6 +1823,20 @@
self.output_spatial_dimensions = []
+class OpSharding(object):
+ """Python representation of a xla.OpSharding protobuf."""
+ __slots__ = ('type', 'tile_assignment_dimensions', 'tile_assignment_devices',
+ 'tuple_shardings')
+
+ Type = _xla.OpSharding_Type
+
+ def __init__(self):
+ self.type = self.Type.REPLICATED
+ self.tile_assignment_dimensions = []
+ self.tile_assignment_devices = []
+ self.tuple_shardings = []
+
+
class PrecisionConfig(object):
"""Python representation of a xla.PrecisionConfig protobuf."""
__slots__ = ('operand_precision',)
diff --git a/tensorflow/compiler/xla/python/xla_client_test.py b/tensorflow/compiler/xla/python/xla_client_test.py
index 5381145..db1c012 100644
--- a/tensorflow/compiler/xla/python/xla_client_test.py
+++ b/tensorflow/compiler/xla/python/xla_client_test.py
@@ -1988,5 +1988,28 @@
np.testing.assert_allclose(ans, 4.14)
+class SetShardingTest(ComputationTest):
+ """Tests related to set OpSharding."""
+
+ def testSetSharding(self):
+ c = self._NewComputation()
+ sharding = xla_client.OpSharding()
+ sharding.type = sharding.type.REPLICATED
+ sharding.tile_assignment_dimensions.extend([1])
+ sharding.tile_assignment_devices.extend([0])
+ # Set Sharding.
+ c.SetSharding(sharding)
+ x = c.ParameterFromNumpy(NumpyArrayF32(2.0))
+ # Clear Sharding.
+ c.ClearSharding()
+
+ result = c.Add(x, c.ConstantF32Scalar(3.14))
+ extra = c.Add(result, c.ConstantF32Scalar(1.618)) # pylint: disable=unused-variable
+ arg = NumpyArrayF32(1.0)
+ compiled_c = c.Build(result).Compile()
+ ans = xla_client.execute_with_python_values(compiled_c, [arg])
+ np.testing.assert_allclose(ans, 4.14)
+
+
if __name__ == "__main__":
absltest.main()
diff --git a/tensorflow/compiler/xla/service/algebraic_simplifier.cc b/tensorflow/compiler/xla/service/algebraic_simplifier.cc
index 21454d7..fbd6399 100755
--- a/tensorflow/compiler/xla/service/algebraic_simplifier.cc
+++ b/tensorflow/compiler/xla/service/algebraic_simplifier.cc
@@ -341,11 +341,13 @@
}
// Helper method to perform and add reduction on a list of dimensions.
- HloInstruction* AddReduce(HloInstruction* hlo, absl::Span<const int64> dims) {
+ HloInstruction* AddReduce(HloInstruction* hlo, absl::Span<const int64> dims,
+ PrimitiveType type) {
HloInstruction* zero = computation_->AddInstruction(
simplifier_->CreateConstantWithLayoutUpdated(
LiteralUtil::Zero(hlo->shape().element_type()).Clone()));
- HloComputation* AddReduce_computation = GetOrCreateScalarAddComputation();
+ HloComputation* AddReduce_computation =
+ GetOrCreateScalarAddComputation(type);
Shape shape = ShapeUtil::FilterDimensions(
[&](int64 dim) { return !absl::c_linear_search(dims, dim); },
hlo->shape());
@@ -397,13 +399,13 @@
StatusOr<HloInstruction*> OptimizeDotOfReorderContractingDims(
HloInstruction* dot);
- HloComputation* GetOrCreateScalarAddComputation() {
+ HloComputation* GetOrCreateScalarAddComputation(PrimitiveType type) {
if (scalar_add_computation_) {
return scalar_add_computation_;
}
HloComputation::Builder b("scalar_add_computation");
- Shape shape = ShapeUtil::MakeShape(F32, {});
+ Shape shape = ShapeUtil::MakeShape(type, {});
simplifier_->UpdateLayout(&shape);
auto scalar_lhs = b.AddInstruction(
HloInstruction::CreateParameter(0, shape, "scalar_lhs"));
@@ -1782,9 +1784,7 @@
// If the lhs or rhs have only batch and contracting dimensions, a dot can be
// rewritten as reduce(mul(broadcast(transpose(x)),broadcast(transpose(y))))
if (options_.enable_dot_strength_reduction() &&
- (dot->shape().element_type() == F32 ||
- dot->shape().element_type() == F16 ||
- dot->shape().element_type() == BF16) &&
+ ShapeUtil::ElementIsFloating(dot->shape()) &&
((dot->dot_dimension_numbers().lhs_batch_dimensions_size() +
dot->dot_dimension_numbers().lhs_contracting_dimensions_size() ==
lhs->shape().rank()) ||
@@ -1845,12 +1845,13 @@
MakeBinaryHlo(HloOpcode::kMultiply, new_lhs, new_rhs));
std::vector<int64> reduce_dims(
dot->dot_dimension_numbers().lhs_contracting_dimensions_size());
- new_dot = AsType(new_dot, F32);
+ PrimitiveType dot_type = dot->shape().element_type() == F64 ? F64 : F32;
+ new_dot = AsType(new_dot, dot_type);
const int64 outer_dims = std::max(rhs_outer_dims, lhs_outer_dims);
absl::c_iota(
reduce_dims,
outer_dims + dot->dot_dimension_numbers().lhs_batch_dimensions_size());
- new_dot = AddReduce(new_dot, reduce_dims);
+ new_dot = AddReduce(new_dot, reduce_dims, dot_type);
new_dot = AsType(new_dot, dot->shape().element_type());
return ReplaceInstruction(dot, new_dot);
}
diff --git a/tensorflow/compiler/xla/service/algebraic_simplifier_test.cc b/tensorflow/compiler/xla/service/algebraic_simplifier_test.cc
index 33d4939..9c84ac1 100755
--- a/tensorflow/compiler/xla/service/algebraic_simplifier_test.cc
+++ b/tensorflow/compiler/xla/service/algebraic_simplifier_test.cc
@@ -4673,7 +4673,7 @@
::testing::Combine(::testing::Values(-1, 1, 2),
::testing::Values(-1, 1, 2),
::testing::Values(-1, 1, 2),
- ::testing::Values(F32, BF16)));
+ ::testing::Values(F64, F32, BF16)));
class DotStrengthReductionTest
: public AlgebraicSimplifierTest,
@@ -4737,7 +4737,7 @@
DotStrengthReductionTestInstantiation, DotStrengthReductionTest,
::testing::Combine(::testing::Values(1, 2), ::testing::Values(1, 2),
::testing::Values(1, 2), ::testing::Bool(),
- ::testing::Bool(), ::testing::Values(F32, BF16)));
+ ::testing::Bool(), ::testing::Values(F64, F32, BF16)));
struct DotOfConcatTestSpec {
int64 m;
diff --git a/tensorflow/compiler/xla/service/cpu/cpu_compiler.cc b/tensorflow/compiler/xla/service/cpu/cpu_compiler.cc
index 20a8ff1..1270cd7 100644
--- a/tensorflow/compiler/xla/service/cpu/cpu_compiler.cc
+++ b/tensorflow/compiler/xla/service/cpu/cpu_compiler.cc
@@ -409,20 +409,8 @@
llvm::TargetOptions CompilerTargetOptions(
const HloModuleConfig& module_config) {
llvm::TargetOptions target_options;
- // In LLVM backend flags, UnsafeFPMath does not explicitly imply NoInfs, etc.
- if (module_config.debug_options().xla_cpu_enable_fast_math()) {
- target_options.UnsafeFPMath = true;
- target_options.NoInfsFPMath =
- !module_config.debug_options().xla_cpu_fast_math_honor_infs();
- target_options.NoNaNsFPMath =
- !module_config.debug_options().xla_cpu_fast_math_honor_nans();
- target_options.NoSignedZerosFPMath = true;
- } else {
- target_options.UnsafeFPMath = false;
- target_options.NoInfsFPMath = false;
- target_options.NoNaNsFPMath = false;
- target_options.NoSignedZerosFPMath = false;
- }
+ // Always allow FMA fusion. This increases precision instead of decreasing it.
+ target_options.AllowFPOpFusion = llvm::FPOpFusion::Fast;
return target_options;
}
diff --git a/tensorflow/compiler/xla/service/cpu/runtime_fp16.cc b/tensorflow/compiler/xla/service/cpu/runtime_fp16.cc
index af0275c..c3cbf50 100644
--- a/tensorflow/compiler/xla/service/cpu/runtime_fp16.cc
+++ b/tensorflow/compiler/xla/service/cpu/runtime_fp16.cc
@@ -131,3 +131,9 @@
o.set_uint(o.as_uint() | (h & 0x8000) << 16); // sign bit
return o.as_float();
}
+
+uint16 TF_ATTRIBUTE_WEAK __truncdfhf2(double d) {
+ // This does a double rounding step, but it's precise enough for our use
+ // cases.
+ return __gnu_f2h_ieee(static_cast<float>(d));
+}
diff --git a/tensorflow/compiler/xla/service/cpu/runtime_fp16.h b/tensorflow/compiler/xla/service/cpu/runtime_fp16.h
index 01d92d0..fe91c01 100644
--- a/tensorflow/compiler/xla/service/cpu/runtime_fp16.h
+++ b/tensorflow/compiler/xla/service/cpu/runtime_fp16.h
@@ -24,4 +24,7 @@
// Converts an F16 value to a F32.
extern "C" float __gnu_h2f_ieee(tensorflow::uint16);
+// Converts an F64 value to a F16.
+extern "C" tensorflow::uint16 __truncdfhf2(double);
+
#endif // TENSORFLOW_COMPILER_XLA_SERVICE_CPU_RUNTIME_FP16_H_
diff --git a/tensorflow/compiler/xla/service/cpu/simple_orc_jit.cc b/tensorflow/compiler/xla/service/cpu/simple_orc_jit.cc
index 4fcabe1..4fe55e0 100644
--- a/tensorflow/compiler/xla/service/cpu/simple_orc_jit.cc
+++ b/tensorflow/compiler/xla/service/cpu/simple_orc_jit.cc
@@ -250,6 +250,8 @@
"Host");
registry->Register("__gnu_h2f_ieee", reinterpret_cast<void*>(__gnu_h2f_ieee),
"Host");
+ registry->Register("__truncdfhf2", reinterpret_cast<void*>(__truncdfhf2),
+ "Host");
#undef REGISTER_CPU_RUNTIME_SYMBOL
diff --git a/tensorflow/compiler/xla/service/cpu/tests/BUILD b/tensorflow/compiler/xla/service/cpu/tests/BUILD
index ccd98dd..51a12ae 100644
--- a/tensorflow/compiler/xla/service/cpu/tests/BUILD
+++ b/tensorflow/compiler/xla/service/cpu/tests/BUILD
@@ -234,3 +234,20 @@
"//tensorflow/core:test_main",
],
)
+
+tf_cc_test(
+ name = "cpu_vectorization_test",
+ srcs = ["cpu_vectorization_test.cc"],
+ deps = [
+ ":cpu_codegen_test",
+ "//tensorflow/compiler/xla/service:hlo",
+ "//tensorflow/compiler/xla/service/cpu:cpu_compiler",
+ "//tensorflow/core:lib",
+ "//tensorflow/core:test",
+ "//tensorflow/core:test_main",
+ "@com_google_absl//absl/strings",
+ "@llvm//:arm_code_gen", # fixdeps: keep
+ "@llvm//:target",
+ "@llvm//:x86_code_gen", # fixdeps: keep
+ ],
+)
diff --git a/tensorflow/compiler/xla/service/cpu/tests/cpu_vectorization_test.cc b/tensorflow/compiler/xla/service/cpu/tests/cpu_vectorization_test.cc
new file mode 100644
index 0000000..8a72eb1
--- /dev/null
+++ b/tensorflow/compiler/xla/service/cpu/tests/cpu_vectorization_test.cc
@@ -0,0 +1,130 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+#include <algorithm>
+#include <string>
+
+#include "absl/strings/ascii.h"
+#include "absl/strings/str_cat.h"
+#include "llvm-c/Target.h"
+#include "tensorflow/compiler/xla/service/cpu/cpu_compiler.h"
+#include "tensorflow/compiler/xla/service/cpu/tests/cpu_codegen_test.h"
+#include "tensorflow/compiler/xla/service/hlo_computation.h"
+#include "tensorflow/core/platform/test.h"
+
+namespace xla {
+namespace cpu {
+namespace {
+
+const char* const kTriple_x86_64 = "x86_64-pc-linux";
+const char* const kTriple_android_arm = "armv7-none-android";
+
+struct VectorizationTestSpec {
+ HloOpcode opcode;
+ std::string triple;
+ std::string features;
+ std::string check_lines;
+};
+
+// Tests that the vectorizer does what we want.
+class CpuVectorizationTest
+ : public CpuCodegenTest,
+ public ::testing::WithParamInterface<VectorizationTestSpec> {
+ public:
+ static std::string Name(
+ const ::testing::TestParamInfo<VectorizationTestSpec>& info) {
+ auto spec = info.param;
+
+ std::string opcode = HloOpcodeString(spec.opcode);
+ opcode[0] = toupper(opcode[0]);
+
+ std::string triple{spec.triple.data(), spec.triple.size()};
+ if (triple == kTriple_x86_64) {
+ triple = "x86_64";
+ } else if (triple == kTriple_android_arm) {
+ triple = "android_arm";
+ } else {
+ triple = "Unknown";
+ }
+
+ std::string features = spec.features;
+ if (!features.empty()) {
+ absl::c_replace_if(
+ features, [](char c) { return c != '_' && !absl::ascii_isalnum(c); },
+ '_');
+ }
+
+ return absl::StrCat(opcode, "_On_", triple,
+ (features.empty() ? "" : "_With"), features);
+ }
+};
+
+TEST_P(CpuVectorizationTest, DoIt) {
+ HloComputation::Builder builder(TestName());
+ VectorizationTestSpec spec = GetParam();
+
+ LLVMInitializeX86Target();
+ LLVMInitializeX86TargetInfo();
+ LLVMInitializeX86TargetMC();
+ LLVMInitializeARMTarget();
+ LLVMInitializeARMTargetInfo();
+ LLVMInitializeARMTargetMC();
+
+ auto param_shape = ShapeUtil::MakeShape(F32, {1024});
+ HloInstruction* param0 = builder.AddInstruction(
+ HloInstruction::CreateParameter(0, param_shape, "input0"));
+ HloInstruction* param1 = builder.AddInstruction(
+ HloInstruction::CreateParameter(1, param_shape, "input1"));
+ builder.AddInstruction(
+ HloInstruction::CreateBinary(param_shape, spec.opcode, param0, param1));
+ std::unique_ptr<HloComputation> computation = builder.Build();
+
+ CpuAotCompilationOptions options{
+ /*triple=*/spec.triple, /*cpu_name=*/"", /*features=*/spec.features,
+ /*entry_point_name=*/"entry",
+ /*relocation_model=*/CpuAotCompilationOptions::RelocationModel::Static};
+
+ auto hlo_module = CreateNewVerifiedModule();
+ hlo_module->AddEntryComputation(std::move(computation));
+
+ string check_lines{spec.check_lines.data(), spec.check_lines.size()};
+
+ CompileAheadOfTimeAndVerifyIr(std::move(hlo_module), options, check_lines,
+ /*match_optimized_ir=*/true);
+}
+
+VectorizationTestSpec CpuVectorizationTestCases[] = {
+ VectorizationTestSpec{HloOpcode::kMultiply, kTriple_x86_64, "",
+ R"(CHECK: fmul fast <4 x float>)"},
+
+ VectorizationTestSpec{HloOpcode::kMultiply, kTriple_x86_64, "+avx",
+ R"(CHECK: fmul fast <8 x float>)"},
+
+ VectorizationTestSpec{HloOpcode::kMultiply, kTriple_android_arm,
+ "-vfp,-neon", R"(CHECK: fmul fast float)"},
+
+ // Neon is not IEEE754-compliant (no denormals). We want vectorized code
+ // anyways.
+ VectorizationTestSpec{HloOpcode::kMultiply, kTriple_android_arm,
+ "+neon,-vfp", R"(CHECK: fmul fast <4 x float>)"}};
+
+INSTANTIATE_TEST_SUITE_P(CpuVectorizationTestInstantiation,
+ CpuVectorizationTest,
+ ::testing::ValuesIn(CpuVectorizationTestCases),
+ CpuVectorizationTest::Name);
+
+} // namespace
+} // namespace cpu
+} // namespace xla
diff --git a/tensorflow/compiler/xla/service/gpu/gpu_fusible.cc b/tensorflow/compiler/xla/service/gpu/gpu_fusible.cc
index 78c5f6a..599eef4 100644
--- a/tensorflow/compiler/xla/service/gpu/gpu_fusible.cc
+++ b/tensorflow/compiler/xla/service/gpu/gpu_fusible.cc
@@ -120,21 +120,21 @@
// sometimes referred to as "the real hero".
auto get_real_hero =
[&](const HloInstruction* instr) -> const HloInstruction* {
- if (instr->opcode() == HloOpcode::kFusion) {
- auto fused_expression_root = instr->fused_expression_root();
- if (instr->IsMultiOutputFusion()) {
- // If possible, we want to pick a reduction-to-vector operand of the
- // fusion root, because it has the most constraints.
- for (const auto* inst : fused_expression_root->operands()) {
- if (IsReductionFromOrToContiguousDimensions(*inst)) {
- return inst;
- }
- }
- return fused_expression_root->operands()[0];
- }
+ if (instr->opcode() != HloOpcode::kFusion) {
+ return instr;
+ }
+ auto fused_expression_root = instr->fused_expression_root();
+ if (!instr->IsMultiOutputFusion()) {
return fused_expression_root;
}
- return instr;
+ // If possible, we want to pick a reduction-to-vector operand of the
+ // fusion root, because it has the most constraints.
+ for (const auto* inst : fused_expression_root->operands()) {
+ if (IsReductionFromOrToContiguousDimensions(*inst)) {
+ return inst;
+ }
+ }
+ return fused_expression_root->operands()[0];
};
// Multi-output fusion kernels share a common parallel loop. The loop
diff --git a/tensorflow/compiler/xla/service/hlo_cse.cc b/tensorflow/compiler/xla/service/hlo_cse.cc
index 1e7e125..a58fcf4 100644
--- a/tensorflow/compiler/xla/service/hlo_cse.cc
+++ b/tensorflow/compiler/xla/service/hlo_cse.cc
@@ -113,6 +113,7 @@
StatusOr<bool> HloCSE::Run(HloModule* module) {
bool changed = false;
+
const std::function<bool(const HloInstruction*, const HloInstruction*)>
eq_instructions = std::equal_to<const HloInstruction*>();
const std::function<bool(const HloComputation*, const HloComputation*)>
@@ -153,16 +154,15 @@
continue;
}
- auto it = representatives.find(instruction);
- if (it != representatives.end()) {
- HloInstruction* equivalent_instruction = *it;
+ auto pair = representatives.insert(instruction);
+ if (!pair.second) {
+ HloInstruction* equivalent_instruction = *pair.first;
TF_RETURN_IF_ERROR(
instruction->ReplaceAllUsesWith(equivalent_instruction));
TF_RETURN_IF_ERROR(computation->RemoveInstruction(instruction));
changed = true;
continue;
}
- representatives.insert(instruction);
}
}
return changed;
diff --git a/tensorflow/compiler/xla/service/hlo_live_range.h b/tensorflow/compiler/xla/service/hlo_live_range.h
index cc0445a..5464de6 100644
--- a/tensorflow/compiler/xla/service/hlo_live_range.h
+++ b/tensorflow/compiler/xla/service/hlo_live_range.h
@@ -83,6 +83,12 @@
return buffer_live_ranges_;
}
+ // Returns the map from a computation and its time span in the schedule.
+ const absl::flat_hash_map<const HloComputation*, TimeBound>&
+ computation_span_times() const {
+ return computation_span_times_;
+ }
+
// Returns the time stamp of the end of the program.
LogicalTime schedule_end_time() const { return schedule_end_time_; }
diff --git a/tensorflow/compiler/xla/service/hlo_module.cc b/tensorflow/compiler/xla/service/hlo_module.cc
index 74ef9a1..c38c307 100644
--- a/tensorflow/compiler/xla/service/hlo_module.cc
+++ b/tensorflow/compiler/xla/service/hlo_module.cc
@@ -28,6 +28,7 @@
#include "absl/memory/memory.h"
#include "absl/strings/str_cat.h"
#include "tensorflow/compiler/xla/map_util.h"
+#include "tensorflow/compiler/xla/service/hlo_instruction.h"
#include "tensorflow/compiler/xla/service/hlo_schedule.h"
#include "tensorflow/compiler/xla/shape_util.h"
#include "tensorflow/compiler/xla/types.h"
@@ -223,7 +224,7 @@
}
s << "\n\n";
const auto& computations = options.canonicalize_computations()
- ? MakeComputationPostOrderAndSortedByNames()
+ ? MakeComputationSortedByContent()
: MakeComputationPostOrder();
for (const HloComputation* computation : computations) {
if (!options.print_computation(computation)) {
@@ -600,12 +601,15 @@
return post_order;
}
-std::vector<HloComputation*>
-HloModule::MakeComputationPostOrderAndSortedByNames() const {
+std::vector<HloComputation*> HloModule::MakeComputationSortedByContent() const {
auto result = MakeComputationPostOrder();
std::sort(result.begin(), result.end(),
[](HloComputation* a, HloComputation* b) {
- return a->name() < b->name();
+ if (a->instruction_count() != b->instruction_count()) {
+ return a->instruction_count() < b->instruction_count();
+ }
+ return a->ToString(HloPrintOptions::Fingerprint()) <
+ b->ToString(HloPrintOptions::Fingerprint());
});
return result;
}
diff --git a/tensorflow/compiler/xla/service/hlo_module.h b/tensorflow/compiler/xla/service/hlo_module.h
index 9a96b78..3e9630a 100644
--- a/tensorflow/compiler/xla/service/hlo_module.h
+++ b/tensorflow/compiler/xla/service/hlo_module.h
@@ -189,9 +189,6 @@
// computation B, then A will appear after B in the sort.
std::vector<HloComputation*> MakeComputationPostOrder() const;
- // Same as MakeComputationPostOrder() but sorting the computations by names.
- std::vector<HloComputation*> MakeComputationPostOrderAndSortedByNames() const;
-
// Gets the computations in this module which aren't for fusion nodes.
//
// Postcondition: All computations in the returned list have
@@ -342,6 +339,10 @@
std::unique_ptr<HloComputation> computation, bool is_entry,
bool uniquify_identifiers);
+ // Same as MakeComputationPostOrder() but sorting the computations by their
+ // contents.
+ std::vector<HloComputation*> MakeComputationSortedByContent() const;
+
string name_;
HloModuleConfig config_;
HloComputation* entry_computation_ = nullptr;
diff --git a/tensorflow/compiler/xla/service/llvm_ir/llvm_util.cc b/tensorflow/compiler/xla/service/llvm_ir/llvm_util.cc
index 303088a..c4d527b 100644
--- a/tensorflow/compiler/xla/service/llvm_ir/llvm_util.cc
+++ b/tensorflow/compiler/xla/service/llvm_ir/llvm_util.cc
@@ -607,19 +607,10 @@
// created by the JIT compiled code.
function->setHasUWTable();
- if (module_config.debug_options().xla_cpu_enable_fast_math()) {
- function->addFnAttr("unsafe-fp-math", "true");
- function->addFnAttr("no-signed-zeros-fp-math", "true");
- if (!module_config.debug_options().xla_cpu_fast_math_honor_nans()) {
- function->addFnAttr("no-nans-fp-math", "true");
- }
- if (!module_config.debug_options().xla_cpu_fast_math_honor_infs()) {
- function->addFnAttr("no-infs-fp-math", "true");
- }
- if (module_config.debug_options().xla_cpu_fast_math_honor_division()) {
- function->addFnAttr("reciprocal-estimates", "none");
- }
- }
+ // Tensorflow always flushes denormals to zero, let LLVM know that flushing
+ // denormals is safe. This allows vectorization using ARM's neon instruction
+ // set.
+ function->addFnAttr("denormal-fp-math", "preserve-sign");
// Add the optize attribute to the function if optimizing for size. This
// controls internal behavior of some optimization passes (e.g. loop
diff --git a/tensorflow/compiler/xla/service/memory_space_assignment.cc b/tensorflow/compiler/xla/service/memory_space_assignment.cc
index a9d4c1e..08d9328 100644
--- a/tensorflow/compiler/xla/service/memory_space_assignment.cc
+++ b/tensorflow/compiler/xla/service/memory_space_assignment.cc
@@ -158,9 +158,10 @@
end_logical_time_ = end_time;
// Find the earliest time we're allowed to start prefetching.
for (current_logical_prefetch_time_ = start_time;
+ current_logical_prefetch_time_ <= end_logical_time_ &&
max_async_copy_to_overlap_ratio_ * async_copy_elapsed_ <
- GetLogicalIntervalElapsed(current_logical_prefetch_time_,
- end_logical_time_);
+ GetLogicalIntervalElapsed(current_logical_prefetch_time_,
+ end_logical_time_);
++current_logical_prefetch_time_) {
}
}
@@ -172,6 +173,11 @@
}
bool CostAnalysisPrefetchIntervalPicker::Done() const {
+ // The end time is inclusive, so we're done if the prefetch time is greater
+ // than that.
+ if (current_logical_prefetch_time_ > end_logical_time_) {
+ return true;
+ }
float logical_interval_elapsed = GetLogicalIntervalElapsed(
current_logical_prefetch_time_, end_logical_time_);
return min_async_copy_to_overlap_ratio_ * async_copy_elapsed_ -
@@ -237,24 +243,19 @@
}
auto colocated_intervals = GetSortedColocatedIntervals(interval);
- bool keep_in_default_memory = false;
- for (const BufferInterval* colocated_interval : colocated_intervals) {
- const HloValue* value = colocated_interval->buffer;
- // If any of the colocated values are phi buffers, we keep them in the
- // default memory for now.
- if (value->is_phi()) {
- keep_in_default_memory = true;
- VLOG(4) << "Keeping value " << value->ToShortString()
- << " because it contains a phi node.";
- break;
- }
+
+ if (colocated_intervals.size() > 1 &&
+ !options_.allocate_across_sequential_calls) {
+ VLOG(4) << "Not allocating " << interval.buffer->ToShortString()
+ << " because it aliases with another interval and "
+ << " allocate_across_sequential_calls is false.";
+ continue;
}
- // At this point, none of the colocated buffers contain any phi buffers.
+ const HloComputation* defining_computation =
+ colocated_intervals[0]->buffer->defining_instruction()->parent();
+ MemorySpaceAssignment::Allocation* aliased_allocation = nullptr;
for (const BufferInterval* colocated_interval : colocated_intervals) {
- if (keep_in_default_memory) {
- break;
- }
const HloValue* value = colocated_interval->buffer;
const auto& instruction_schedule = hlo_live_range_.instruction_schedule();
MemorySpaceAssignment::AllocationSequence* allocation_sequence =
@@ -267,25 +268,66 @@
return instruction_schedule.at(use1.instruction) <
instruction_schedule.at(use2.instruction);
});
+
+ // If there was an aliased allocation for this buffer, propagate that for
+ // this HloValue.
+ if (aliased_allocation != nullptr) {
+ VLOG(3) << "Adding an aliased allocation: ("
+ << aliased_allocation->start_time() << ", "
+ << aliased_allocation->end_time()
+ << ") pos: " << aliased_allocation->defining_position()
+ << " mem space: "
+ << (aliased_allocation->memory_space() == MemorySpace::kDefault
+ ? "default"
+ : "alt");
+ allocation_sequence->push_back(
+ absl::make_unique<MemorySpaceAssignment::Allocation>(
+ value->defining_instruction(), value->defining_position(),
+ aliased_allocation->memory_space(), aliased_allocation->chunk(),
+ aliased_allocation->start_time(),
+ aliased_allocation->end_time()));
+ }
+
// Iterate over the uses.
for (HloUse use : uses) {
int64 use_time = instruction_schedule.at(use.instruction);
int64 last_use_time = instruction_schedule.at(uses.back().instruction);
+ int64 latest_prefetch_time = use_time;
+
+ if (use.instruction->parent() != defining_computation) {
+ VLOG(3) << "skip use " << use.ToString()
+ << " because it's in a different computation.";
+ continue;
+ }
+
+ // Sequential calls include kWhile, kCall, and kConditional opcodes.
+ bool is_sequential_call =
+ (GetInstructionCallContext(use.instruction->opcode()) ==
+ CallContext::kSequential);
+ if (is_sequential_call) {
+ for (const HloComputation* called_computation :
+ use.instruction->called_computations()) {
+ const HloLiveRange::TimeBound& computation_span =
+ hlo_live_range_.computation_span_times().at(called_computation);
+ latest_prefetch_time =
+ std::min(computation_span.start, latest_prefetch_time);
+ }
+ }
// Bitcasts don't define buffers and don't directly consume buffers.
// Skip allocating buffers for bitcast uses. The uses that feed from
// bitcasts will be handled specially.
if (use.instruction->opcode() != HloOpcode::kBitcast) {
if (!FindAllocation(definition_time, use_time, last_use_time,
- value->defining_position(), use, value,
- colocated_interval->size, allocation_sequence)) {
+ latest_prefetch_time, value->defining_position(),
+ use, value, colocated_interval->size,
+ allocation_sequence)) {
// If the allocation finding failed (e.g., due to running out of
// asynchronous copies), then fall back to allocating the buffer
// entirely in the default memory.
pending_chunks_.clear();
pending_async_copies_.clear();
allocation_sequence->clear();
- keep_in_default_memory = true;
break;
}
@@ -293,6 +335,12 @@
// allocation already at the alternate memory.
definition_time = use_time;
}
+
+ // If the use has been a sequential call (e.g. a while loop), the other
+ // colocated intervals must alias with this allocation.
+ if (is_sequential_call && !allocation_sequence->empty()) {
+ aliased_allocation = allocation_sequence->back().get();
+ }
}
}
@@ -390,8 +438,9 @@
bool AlternateMemoryBestFitHeap::FindAllocation(
int64 start_time, int64 end_time, int64 last_use_time,
- HloPosition defining_position, HloUse use, const HloValue* buffer,
- int64 size, MemorySpaceAssignment::AllocationSequence* allocations) {
+ int64 latest_prefetch_time, HloPosition defining_position, HloUse use,
+ const HloValue* buffer, int64 size,
+ MemorySpaceAssignment::AllocationSequence* allocations) {
HloInstruction* operand =
use.instruction->mutable_operand(use.operand_number);
// If the operand is a bitcast, we look at bitcast's operand until we find a
@@ -408,8 +457,10 @@
alternate_mem_interval.end = end_time;
VLOG(2) << "Finding allocation for " << buffer->ToShortString() << " ("
- << start_time << ", " << end_time << ") last use = " << last_use_time
- << " use = " << use.ToString() << ". Size = " << size
+ << start_time << ", " << end_time
+ << ") latest prefetch = " << latest_prefetch_time
+ << " last use = " << last_use_time << " use = " << use.ToString()
+ << ". Size = " << size
<< ", def pos = " << defining_position.ToString()
<< ", operand = " << operand->ToShortString()
<< (non_bitcast_operand != operand
@@ -445,19 +496,6 @@
}
}
- // TODO(berkin): This is curently overly restrictive and will fail using
- // alternate memory for any buffer that might leak into a different
- // computation (e.g., while body). Enable more usage of alternate memory
- // across computations.
- if (defining_position.instruction->parent() != use.instruction->parent() ||
- (!use.instruction->called_computations().empty() &&
- use.instruction->opcode() != HloOpcode::kFusion)) {
- VLOG(3) << "Use is in a different computation or calls a computation.";
- // Fail because we do not allow asynchronous copies while in the bodies of
- // other computation.
- return false;
- }
-
// First try keeping the allocation entirely in the alternate memory.
if (!definition_requires_buffer_in_default_mem &&
!use_requires_buffer_in_default_mem &&
@@ -491,7 +529,7 @@
prev_allocation->end_time())) {
AddAsyncCopy(*prev_allocation, MemorySpace::kDefault, kDummyChunk,
prev_allocation->start_time(), prev_allocation->end_time(),
- allocations);
+ prev_allocation->end_time(), allocations);
} else {
VLOG(3) << "This violates the maximum async copies.";
@@ -504,7 +542,7 @@
if (!ViolatesMaximumOutstandingAsyncCopies(time, time)) {
VLOG(3) << "Eviction successful.";
AddAsyncCopy(*prev_allocation, MemorySpace::kDefault, kDummyChunk,
- time, time, allocations);
+ time, time, time, allocations);
eviction_scheduled = true;
break;
}
@@ -558,7 +596,8 @@
// ^ ^
// Copy Copy
// Start Done
- options_.prefetch_interval_picker->Begin(use, start_time, end_time);
+ options_.prefetch_interval_picker->Begin(use, start_time,
+ latest_prefetch_time);
while (!options_.prefetch_interval_picker->Done()) {
alternate_mem_interval.start = options_.prefetch_interval_picker->Next();
VLOG(4) << "Trying alternate memory allocation ("
@@ -583,7 +622,7 @@
AddAsyncCopy(*allocations->back().get(), MemorySpace::kAlternate,
chunk_candidate.chunk, alternate_mem_interval.start,
- end_time, allocations);
+ end_time, latest_prefetch_time, allocations);
allocations->back()->AddUse(use);
return true;
@@ -598,16 +637,19 @@
void AlternateMemoryBestFitHeap::AddAsyncCopy(
const MemorySpaceAssignment::Allocation& prev_allocation,
MemorySpace memory_space, Chunk chunk, int64 start_time, int64 end_time,
+ int64 copy_done_schedule_before_time,
MemorySpaceAssignment::AllocationSequence* allocations) {
VLOG(3) << "Copy to "
<< (memory_space == MemorySpaceAssignment::MemorySpace::kDefault
? "default"
: "alternate")
- << " memory between " << start_time << " and " << end_time;
+ << " memory between " << start_time << " and "
+ << copy_done_schedule_before_time << " keeping until " << end_time;
allocations->push_back(
absl::make_unique<MemorySpaceAssignment::CopyAllocation>(
- prev_allocation, memory_space, chunk, start_time, end_time));
+ prev_allocation, memory_space, chunk, start_time, end_time,
+ copy_done_schedule_before_time));
// Register the additional async copy with the interval tree to keep track of
// the limit at any given time.
@@ -828,9 +870,12 @@
&memory_space_assignment.allocation_map_, options, *alias_analysis,
*hlo_live_range);
+ HeapSimulator::Options heap_simulator_options;
+ heap_simulator_options.may_reuse_operand_buffers = false;
TF_RETURN_IF_ERROR(HeapSimulator::Run(std::move(algorithm), *module,
module->schedule(),
- *alias_analysis.get(), options.size_fn)
+ *alias_analysis.get(), options.size_fn,
+ heap_simulator_options)
.status());
TF_RETURN_IF_ERROR(memory_space_assignment.Process());
@@ -1221,28 +1266,30 @@
instruction_index <
flattened_instruction_sequence_.instructions().size();
++instruction_index) {
- HloInstruction* instruction =
- flattened_instruction_sequence_.instructions()[instruction_index];
- if (instruction->parent() != computation) {
- continue;
- }
auto insts_before_iter = schedule_before_.find(instruction_index);
if (insts_before_iter != schedule_before_.end()) {
for (HloInstruction* new_instruction : insts_before_iter->second) {
- EnsureInstructionAndOperandsInserted(new_instruction, &new_sequence,
- &inserted_instructions);
+ if (new_instruction->parent() == computation) {
+ EnsureInstructionAndOperandsInserted(new_instruction, &new_sequence,
+ &inserted_instructions);
+ }
}
}
+ HloInstruction* instruction =
+ flattened_instruction_sequence_.instructions()[instruction_index];
// Insert only if not previously inserted.
- if (!inserted_instructions.contains(instruction)) {
+ if (!inserted_instructions.contains(instruction) &&
+ instruction->parent() == computation) {
EnsureInstructionAndOperandsInserted(instruction, &new_sequence,
&inserted_instructions);
}
auto insts_after_iter = schedule_after_.find(instruction_index);
if (insts_after_iter != schedule_after_.end()) {
for (HloInstruction* new_instruction : insts_after_iter->second) {
- EnsureInstructionAndOperandsInserted(new_instruction, &new_sequence,
- &inserted_instructions);
+ if (new_instruction->parent() == computation) {
+ EnsureInstructionAndOperandsInserted(new_instruction, &new_sequence,
+ &inserted_instructions);
+ }
}
}
}
diff --git a/tensorflow/compiler/xla/service/memory_space_assignment.h b/tensorflow/compiler/xla/service/memory_space_assignment.h
index b6cf5a0..bfc9166 100644
--- a/tensorflow/compiler/xla/service/memory_space_assignment.h
+++ b/tensorflow/compiler/xla/service/memory_space_assignment.h
@@ -268,6 +268,10 @@
// Specifies the upper bound for number of outstanding asynchronous copies,
// -1 for unlimited.
int64 max_outstanding_async_copies = -1;
+
+ // If true, tries allocating buffers across (e.g., before and inside a while
+ // loop body) sequential calls (kWhile, kCall, and kConditional).
+ bool allocate_across_sequential_calls = false;
};
// This class represents an allocation that might either be in the default or
@@ -363,13 +367,14 @@
class CopyAllocation : public Allocation {
public:
CopyAllocation(const Allocation& prev_allocation, MemorySpace memory_space,
- Chunk chunk, int64 start_time, int64 end_time)
+ Chunk chunk, int64 start_time, int64 end_time,
+ int64 copy_done_schedule_before_time)
: Allocation(/*instruction=*/nullptr,
/*defining_position=*/{nullptr, {}}, memory_space, chunk,
start_time, end_time),
prev_allocation_(prev_allocation),
copy_start_schedule_after_(start_time),
- copy_done_schedule_before_(end_time) {}
+ copy_done_schedule_before_(copy_done_schedule_before_time) {}
bool is_copy_allocation() const override { return true; }
@@ -525,8 +530,8 @@
// allocations can be in default or alternate memory spaces, or can be
// prefetches or evictions. Returns true if successful.
bool FindAllocation(int64 start_time, int64 end_time, int64 last_use_time,
- HloPosition defining_position, HloUse use,
- const HloValue* buffer, int64 size,
+ int64 latest_prefetch_time, HloPosition defining_position,
+ HloUse use, const HloValue* buffer, int64 size,
MemorySpaceAssignment::AllocationSequence* allocations);
// Try allocating in alternate memory without any copies. Returns true if
@@ -560,7 +565,7 @@
// Adds an asynchronous copy to the allocations.
void AddAsyncCopy(const MemorySpaceAssignment::Allocation& prev_allocation,
MemorySpace memory_space, Chunk chunk, int64 start_time,
- int64 end_time,
+ int64 end_time, int64 copy_done_schedule_before_time,
MemorySpaceAssignment::AllocationSequence* allocations);
// These methods are used for delaying committing the chunk candidate until
diff --git a/tensorflow/compiler/xla/service/memory_space_assignment_test.cc b/tensorflow/compiler/xla/service/memory_space_assignment_test.cc
index 7ec4ddc..6372590 100644
--- a/tensorflow/compiler/xla/service/memory_space_assignment_test.cc
+++ b/tensorflow/compiler/xla/service/memory_space_assignment_test.cc
@@ -35,7 +35,8 @@
return ShapeUtil::ByteSizeOf(shape, kPointerSize);
}
-class MemorySpaceAssignmentTest : public HloTestBase {
+class MemorySpaceAssignmentTest : public HloTestBase,
+ public ::testing::WithParamInterface<bool> {
protected:
// We use the following two memory space values to describe the default (slow
// and large) and alternate (fast and small) memory spaces.
@@ -105,6 +106,7 @@
options.size_fn = size_fn;
options.is_allowed_in_alternate_mem_fn = is_allowed_in_alternate_mem;
options.max_outstanding_async_copies = max_outstanding_async_copies;
+ options.allocate_across_sequential_calls = GetParam();
std::unique_ptr<PresetAssignments> preset_assignments =
MemorySpaceAssignment::Run(module, options).ValueOrDie();
CheckPresetAssignments(preset_assignments.get());
@@ -190,7 +192,7 @@
}
};
-TEST_F(MemorySpaceAssignmentTest, ParameterOnly) {
+TEST_P(MemorySpaceAssignmentTest, ParameterOnly) {
// A module consisting of a single parameter. Inputs/outputs are currently
// excluded from memory space assignment.
HloComputation::Builder builder(TestName());
@@ -210,7 +212,7 @@
EXPECT_THAT(p0, op::ShapeWithLayout(shape));
}
-TEST_F(MemorySpaceAssignmentTest, Simple) {
+TEST_P(MemorySpaceAssignmentTest, Simple) {
// A simple module with a few simple instructions. Expect this to be
// transformed with CopyStart and CopyDone instructions inserted after inputs
// and before outputs.
@@ -256,7 +258,7 @@
preset_assignments->chunks()[1].second.offset);
}
-TEST_F(MemorySpaceAssignmentTest, NegateChain) {
+TEST_P(MemorySpaceAssignmentTest, NegateChain) {
// The negate chain is long enough for asynchronous copy to be inserted
// between p1 and add.
HloComputation::Builder builder(TestName());
@@ -319,7 +321,7 @@
EXPECT_THAT(sequence.instructions()[10], op::CopyDone());
}
-TEST_F(MemorySpaceAssignmentTest, EvictAndPrefetch) {
+TEST_P(MemorySpaceAssignmentTest, EvictAndPrefetch) {
std::unique_ptr<HloModule> module = CreateEvictAndPrefetchModule();
AssignMemorySpace(module.get());
@@ -330,12 +332,9 @@
op::AsyncCopy(kAlternateMemorySpace, kDefaultMemorySpace,
op::AsyncCopy(kDefaultMemorySpace,
kAlternateMemorySpace, op::Tanh()))));
-
- EXPECT_EQ(MemorySpaceAssignment::CountMaximumOutstandingAsyncCopies(*module),
- 2);
}
-TEST_F(MemorySpaceAssignmentTest, EvictAndPrefetchLimitAsyncCopies0) {
+TEST_P(MemorySpaceAssignmentTest, EvictAndPrefetchLimitAsyncCopies0) {
std::unique_ptr<HloModule> module = CreateEvictAndPrefetchModule();
AssignMemorySpace(module.get(), /*max_outstanding_async_copies=*/0);
@@ -344,7 +343,7 @@
0);
}
-TEST_F(MemorySpaceAssignmentTest, EvictAndPrefetchLimitAsyncCopies1) {
+TEST_P(MemorySpaceAssignmentTest, EvictAndPrefetchLimitAsyncCopies1) {
std::unique_ptr<HloModule> module = CreateEvictAndPrefetchModule();
AssignMemorySpace(module.get(), /*max_outstanding_async_copies=*/1);
@@ -353,7 +352,16 @@
1);
}
-TEST_F(MemorySpaceAssignmentTest, While) {
+TEST_P(MemorySpaceAssignmentTest, EvictAndPrefetchLimitAsyncCopies2) {
+ std::unique_ptr<HloModule> module = CreateEvictAndPrefetchModule();
+
+ AssignMemorySpace(module.get(), /*max_outstanding_async_copies=*/2);
+
+ EXPECT_EQ(MemorySpaceAssignment::CountMaximumOutstandingAsyncCopies(*module),
+ 2);
+}
+
+TEST_P(MemorySpaceAssignmentTest, While) {
auto module = CreateNewVerifiedModule();
Shape shape = ShapeUtil::MakeShape(xla::F32, {2, 3});
Shape scalar_shape = ShapeUtil::MakeShape(xla::F32, {});
@@ -429,14 +437,18 @@
AssignMemorySpace(module.get());
// Ensure the tuple value and buffers used in the while instruction are
- // exempted from using the alternate memory. However, body_data_mul is
- // independent and can be safely be placed in the alternate memory.
- EXPECT_THAT(tuple, op::ShapeWithLayout(tuple_shape));
- EXPECT_THAT(data, op::ShapeWithLayout(shape));
- EXPECT_THAT(iter, op::ShapeWithLayout(scalar_shape));
- EXPECT_THAT(body_data, op::ShapeWithLayout(shape));
- EXPECT_THAT(body_iter, op::ShapeWithLayout(scalar_shape));
- EXPECT_THAT(cond_iter, op::ShapeWithLayout(scalar_shape));
+ // exempted from using the alternate memory when allocating across sequential
+ // calls is disabled. However, body_data_mul is independent and can be safely
+ // be placed in the alternate memory.
+ const bool allocate_across_sequential_calls = GetParam();
+ if (!allocate_across_sequential_calls) {
+ EXPECT_THAT(tuple, op::ShapeWithLayout(tuple_shape));
+ EXPECT_THAT(data, op::ShapeWithLayout(shape));
+ EXPECT_THAT(iter, op::ShapeWithLayout(scalar_shape));
+ EXPECT_THAT(body_data, op::ShapeWithLayout(shape));
+ EXPECT_THAT(body_iter, op::ShapeWithLayout(scalar_shape));
+ EXPECT_THAT(cond_iter, op::ShapeWithLayout(scalar_shape));
+ }
Shape shape_in_alternate_mem = ShapeUtil::MakeShapeWithLayout(
F32, {2, 3},
/*minor_to_major=*/{1, 0}, /*tiles=*/{}, /*element_size_in_bits=*/0,
@@ -444,7 +456,7 @@
EXPECT_THAT(body_data_mul, op::ShapeWithLayout(shape_in_alternate_mem));
}
-TEST_F(MemorySpaceAssignmentTest, Tuple) {
+TEST_P(MemorySpaceAssignmentTest, Tuple) {
HloComputation::Builder builder(TestName());
Shape shape = ShapeUtil::MakeShape(F32, {2, 3});
Shape inner_tuple_shape = ShapeUtil::MakeTupleShape({shape});
@@ -499,7 +511,7 @@
op::GetTupleElement(op::GetTupleElement()))));
}
-TEST_F(MemorySpaceAssignmentTest, Bitcast) {
+TEST_P(MemorySpaceAssignmentTest, Bitcast) {
// Bitcasts can cause the position in the alternate memory to appear multiple
// times in the preset assignments. This test ensure the preset assignments
// refer to unique positions.
@@ -528,7 +540,7 @@
EXPECT_EQ(bitcast->shape().layout().memory_space(), kAlternateMemorySpace);
}
-TEST_F(MemorySpaceAssignmentTest, Bitcast2) {
+TEST_P(MemorySpaceAssignmentTest, Bitcast2) {
HloComputation::Builder builder(TestName());
Shape shape = ShapeUtil::MakeShape(F32, {2, 3});
Shape param_shape = ShapeUtil::MakeShape(F32, {6});
@@ -564,7 +576,7 @@
EXPECT_EQ(bitcast->shape().layout().memory_space(), kAlternateMemorySpace);
}
-TEST_F(MemorySpaceAssignmentTest, Bitcast3) {
+TEST_P(MemorySpaceAssignmentTest, Bitcast3) {
HloComputation::Builder builder(TestName());
Shape shape1 = ShapeUtil::MakeShape(F32, {2, 3});
Shape shape2 = ShapeUtil::MakeShape(F32, {3, 2});
@@ -627,7 +639,7 @@
EXPECT_EQ(bitcast4->shape().layout().memory_space(), kAlternateMemorySpace);
}
-TEST_F(MemorySpaceAssignmentTest, BitcastTuple) {
+TEST_P(MemorySpaceAssignmentTest, BitcastTuple) {
HloComputation::Builder builder(TestName());
Shape shape = ShapeUtil::MakeShape(F32, {2, 3});
Shape param_shape = ShapeUtil::MakeShape(F32, {6});
@@ -678,7 +690,7 @@
AssignMemorySpace(module.get());
}
-TEST_F(MemorySpaceAssignmentTest, LastUseOpt) {
+TEST_P(MemorySpaceAssignmentTest, LastUseOpt) {
// Test that checks the last use optimization. It uses two buffers that should
// be placed in alternate memory.
//
@@ -735,7 +747,7 @@
op::Add(op::Parameter(0), op::Parameter(0)))));
}
-TEST_F(MemorySpaceAssignmentTest, CopyOrdering) {
+TEST_P(MemorySpaceAssignmentTest, CopyOrdering) {
// Test to make sure the CopyStarts follow the same CopyDone order. The shapes
// are picked in increasing order to exploit the fact that heap simulator
// processes larger tensors first. This checks the ability of the compiler to
@@ -850,7 +862,7 @@
}
}
-TEST_F(MemorySpaceAssignmentTest, NonEntryComputationSchedule1) {
+TEST_P(MemorySpaceAssignmentTest, NonEntryComputationSchedule1) {
// Test to ensure CopyStart/CopyDone is placed only in the entry computation.
auto module = CreateNewVerifiedModule();
Shape shape = ShapeUtil::MakeShape(xla::F32, {2, 3});
@@ -934,7 +946,7 @@
AssignMemorySpace(module.get(), -1, 50);
}
-TEST_F(MemorySpaceAssignmentTest, NonEntryComputationSchedule2) {
+TEST_P(MemorySpaceAssignmentTest, NonEntryComputationSchedule2) {
auto module = CreateNewVerifiedModule();
Shape shape = ShapeUtil::MakeShape(xla::F32, {2, 3});
Shape shape2 = ShapeUtil::MakeShape(xla::F32, {3, 3});
@@ -1005,7 +1017,7 @@
AssignMemorySpace(module.get(), -1, 5);
}
-TEST_F(MemorySpaceAssignmentTest, NonEntryComputationSchedule3) {
+TEST_P(MemorySpaceAssignmentTest, NonEntryComputationSchedule3) {
auto module = CreateNewVerifiedModule();
Shape shape = ShapeUtil::MakeShape(xla::F32, {2, 3});
Shape shape2 = ShapeUtil::MakeShape(xla::F32, {3, 3});
@@ -1071,7 +1083,7 @@
AssignMemorySpace(module.get(), -1, 5);
}
-TEST_F(MemorySpaceAssignmentTest, NonEntryComputationSchedule4) {
+TEST_P(MemorySpaceAssignmentTest, NonEntryComputationSchedule4) {
auto module = CreateNewVerifiedModule();
Shape shape = ShapeUtil::MakeShape(xla::F32, {2, 3});
Shape shape2 = ShapeUtil::MakeShape(xla::F32, {3, 3});
@@ -1144,7 +1156,7 @@
AssignMemorySpace(module.get(), -1, 5);
}
-TEST_F(MemorySpaceAssignmentTest, NonEntryComputationSchedule5) {
+TEST_P(MemorySpaceAssignmentTest, NonEntryComputationSchedule5) {
// This test reproduces the failure in b/143288178. Given a graph like the
// following:
//
@@ -1242,7 +1254,7 @@
HloInstruction* while_op = builder.AddInstruction(HloInstruction::CreateWhile(
tuple_shape, cond_computation, body_computation, tuple));
HloInstruction* while_data = builder.AddInstruction(
- HloInstruction::CreateGetTupleElement(shape, while_op, 0));
+ HloInstruction::CreateGetTupleElement(scalar_shape, while_op, 1));
HloInstruction* root =
builder.AddInstruction(HloInstruction::CreateTuple({while_data, sub}));
HloComputation* entry_computation =
@@ -1265,7 +1277,143 @@
AssignMemorySpace(module.get(), -1, 20);
}
-TEST_F(MemorySpaceAssignmentTest, DanglingCopy) {
+TEST_P(MemorySpaceAssignmentTest, NonEntryComputationSchedule6) {
+ auto module = CreateNewVerifiedModule();
+ Shape shape = ShapeUtil::MakeShape(xla::F32, {2, 3});
+ Shape scalar_shape = ShapeUtil::MakeShape(xla::F32, {});
+ Shape tuple_shape = ShapeUtil::MakeTupleShape({shape, scalar_shape, shape});
+
+ auto cond_builder = HloComputation::Builder("WhileCond");
+ HloInstruction* cond_param = cond_builder.AddInstruction(
+ HloInstruction::CreateParameter(0, tuple_shape, "cond_param"));
+ HloInstruction* cond_iter = cond_builder.AddInstruction(
+ HloInstruction::CreateGetTupleElement(scalar_shape, cond_param, 1));
+ HloInstruction* cond_limit = cond_builder.AddInstruction(
+ HloInstruction::CreateConstant(LiteralUtil::CreateR0<float>(50.f)));
+ HloInstruction* cond_lt = cond_builder.AddInstruction(
+ HloInstruction::CreateCompare(ShapeUtil::MakeShape(PRED, {}), cond_iter,
+ cond_limit, ComparisonDirection::kLt));
+ HloComputation* cond_computation =
+ module->AddEmbeddedComputation(cond_builder.Build());
+
+ auto body_builder = HloComputation::Builder("WhileBody");
+ HloInstruction* body_param = body_builder.AddInstruction(
+ HloInstruction::CreateParameter(0, tuple_shape, "body_param"));
+ HloInstruction* body_iter = body_builder.AddInstruction(
+ HloInstruction::CreateGetTupleElement(scalar_shape, body_param, 1));
+ HloInstruction* body_data = body_builder.AddInstruction(
+ HloInstruction::CreateGetTupleElement(shape, body_param, 0));
+ HloInstruction* body_negate0 = body_builder.AddInstruction(
+ HloInstruction::CreateUnary(shape, HloOpcode::kNegate, body_data));
+ HloInstruction* body_negate1 = body_builder.AddInstruction(
+ HloInstruction::CreateUnary(shape, HloOpcode::kNegate, body_negate0));
+ HloInstruction* body_negate2 = body_builder.AddInstruction(
+ HloInstruction::CreateUnary(shape, HloOpcode::kNegate, body_negate1));
+ HloInstruction* body_negate3 = body_builder.AddInstruction(
+ HloInstruction::CreateUnary(shape, HloOpcode::kNegate, body_negate2));
+ HloInstruction* body_negate4 = body_builder.AddInstruction(
+ HloInstruction::CreateUnary(shape, HloOpcode::kNegate, body_negate3));
+ HloInstruction* body_negate5 = body_builder.AddInstruction(
+ HloInstruction::CreateUnary(shape, HloOpcode::kNegate, body_negate4));
+ HloInstruction* body_negate6 = body_builder.AddInstruction(
+ HloInstruction::CreateUnary(shape, HloOpcode::kNegate, body_negate5));
+ HloInstruction* body_negate7 = body_builder.AddInstruction(
+ HloInstruction::CreateUnary(shape, HloOpcode::kNegate, body_negate6));
+ HloInstruction* body_iter_increment = body_builder.AddInstruction(
+ HloInstruction::CreateConstant(LiteralUtil::CreateR0<float>(1.f)));
+ HloInstruction* body_iter_next =
+ body_builder.AddInstruction(HloInstruction::CreateBinary(
+ scalar_shape, HloOpcode::kAdd, body_iter, body_iter_increment));
+ HloInstruction* body_out = body_builder.AddInstruction(
+ HloInstruction::CreateTuple({body_data, body_iter_next, body_negate7}));
+ HloComputation* body_computation =
+ module->AddEmbeddedComputation(body_builder.Build());
+
+ auto builder = HloComputation::Builder(TestName());
+ HloInstruction* data = builder.AddInstruction(
+ HloInstruction::CreateParameter(0, shape, "param_data"));
+ HloInstruction* iter = builder.AddInstruction(
+ HloInstruction::CreateParameter(1, scalar_shape, "param_iter"));
+ HloInstruction* negate0 = builder.AddInstruction(
+ HloInstruction::CreateUnary(shape, HloOpcode::kNegate, data));
+ HloInstruction* negate1 = builder.AddInstruction(
+ HloInstruction::CreateUnary(shape, HloOpcode::kNegate, negate0));
+ HloInstruction* negate2 = builder.AddInstruction(
+ HloInstruction::CreateUnary(shape, HloOpcode::kNegate, negate1));
+ HloInstruction* negate3 = builder.AddInstruction(
+ HloInstruction::CreateUnary(shape, HloOpcode::kNegate, negate2));
+ HloInstruction* negate4 = builder.AddInstruction(
+ HloInstruction::CreateUnary(shape, HloOpcode::kNegate, negate3));
+ HloInstruction* negate5 = builder.AddInstruction(
+ HloInstruction::CreateUnary(shape, HloOpcode::kNegate, negate4));
+ HloInstruction* negate6 = builder.AddInstruction(
+ HloInstruction::CreateUnary(shape, HloOpcode::kNegate, negate5));
+ HloInstruction* negate7 = builder.AddInstruction(
+ HloInstruction::CreateUnary(shape, HloOpcode::kNegate, negate6));
+ HloInstruction* tuple = builder.AddInstruction(
+ HloInstruction::CreateTuple({data, iter, negate7}));
+ HloInstruction* while_op = builder.AddInstruction(HloInstruction::CreateWhile(
+ tuple_shape, cond_computation, body_computation, tuple));
+ HloInstruction* while_data = builder.AddInstruction(
+ HloInstruction::CreateGetTupleElement(shape, while_op, 0));
+ HloInstruction* while_data2 = builder.AddInstruction(
+ HloInstruction::CreateGetTupleElement(shape, while_op, 2));
+ HloInstruction* root = builder.AddInstruction(HloInstruction::CreateBinary(
+ shape, HloOpcode::kAdd, while_data, while_data2));
+ HloComputation* entry_computation =
+ module->AddEntryComputation(builder.Build());
+
+ HloSchedule schedule(module.get());
+ schedule.set_sequence(cond_computation,
+ {cond_param, cond_iter, cond_limit, cond_lt});
+ schedule.set_sequence(
+ body_computation,
+ {body_param, body_iter, body_data, body_negate0, body_negate1,
+ body_negate2, body_negate3, body_negate4, body_negate5, body_negate6,
+ body_negate7, body_iter_increment, body_iter_next, body_out});
+ schedule.set_sequence(
+ entry_computation,
+ {iter, data, negate0, negate1, negate2, negate3, negate4, negate5,
+ negate6, negate7, tuple, while_op, while_data, while_data2, root});
+ TF_CHECK_OK(module->set_schedule(schedule));
+
+ // Pick a large max prefetch interval to ensure all the while inputs are
+ // allocated in the alternate memory.
+ AssignMemorySpace(module.get(), /*max_outstanding_async_copies=*/-1,
+ /*max_prefetch_interval=*/25);
+
+ int64 memory_space_across_while = kDefaultMemorySpace;
+ bool allocate_across_sequential_calls = GetParam();
+ if (allocate_across_sequential_calls) {
+ memory_space_across_while = kAlternateMemorySpace;
+ }
+
+ // Index {0} of the while loop argument is not written inside the while loop,
+ // so it can be trivially placed in the alternate memory space.
+ *ShapeUtil::GetMutableSubshape(&tuple_shape, {0})->mutable_layout() =
+ LayoutUtil::MakeLayout(
+ /*minor_to_major=*/{1, 0}, /*tiles=*/{}, /*element_size_in_bits=*/0,
+ kAlternateMemorySpace);
+ // Indexes {1} and {2} of the while loop argument are only placed in the
+ // alternate memory if we enable the allocate_across_sequential_calls option.
+ *ShapeUtil::GetMutableSubshape(&tuple_shape, {1})->mutable_layout() =
+ LayoutUtil::MakeLayout(
+ /*minor_to_major=*/{}, /*tiles=*/{}, /*element_size_in_bits=*/0,
+ memory_space_across_while);
+ *ShapeUtil::GetMutableSubshape(&tuple_shape, {2})->mutable_layout() =
+ LayoutUtil::MakeLayout(
+ /*minor_to_major=*/{1, 0}, /*tiles=*/{}, /*element_size_in_bits=*/0,
+ memory_space_across_while);
+
+ // Expect the layout for the while loop and its aliased buffers.
+ EXPECT_THAT(while_op, op::ShapeWithLayout(tuple_shape));
+ EXPECT_THAT(while_op->operand(0), op::ShapeWithLayout(tuple_shape));
+ EXPECT_THAT(cond_param, op::ShapeWithLayout(tuple_shape));
+ EXPECT_THAT(body_param, op::ShapeWithLayout(tuple_shape));
+ EXPECT_THAT(body_out, op::ShapeWithLayout(tuple_shape));
+}
+
+TEST_P(MemorySpaceAssignmentTest, DanglingCopy) {
// This situation was encountered in vss, where there is a mismatch in the
// memory space in preset assignments and the output graph.
HloComputation::Builder builder(TestName());
@@ -1311,7 +1459,7 @@
AssignMemorySpace(module.get());
}
-TEST_F(MemorySpaceAssignmentTest, MultiOutputFusion) {
+TEST_P(MemorySpaceAssignmentTest, MultiOutputFusion) {
HloComputation::Builder builder(TestName());
Shape shape = ShapeUtil::MakeShape(F32, {2, 3});
Shape tuple_shape = ShapeUtil::MakeTupleShape({shape, shape});
@@ -1348,7 +1496,7 @@
AssignMemorySpace(module.get());
}
-TEST_F(MemorySpaceAssignmentTest, TupleInput) {
+TEST_P(MemorySpaceAssignmentTest, TupleInput) {
HloComputation::Builder builder(TestName());
Shape shape = ShapeUtil::MakeShape(F32, {2, 3});
Shape tuple_shape = ShapeUtil::MakeTupleShape({shape, shape});
@@ -1388,7 +1536,7 @@
AssignMemorySpace(module.get());
}
-TEST_F(MemorySpaceAssignmentTest, TupleToTuple1) {
+TEST_P(MemorySpaceAssignmentTest, TupleToTuple1) {
HloComputation::Builder builder(TestName());
Shape shape = ShapeUtil::MakeShape(F32, {2, 3});
Shape tuple_shape = ShapeUtil::MakeTupleShape({shape, shape});
@@ -1467,7 +1615,7 @@
op::GetTupleElement(op::Fusion(), 1)))));
}
-TEST_F(MemorySpaceAssignmentTest, TupleToTuple2) {
+TEST_P(MemorySpaceAssignmentTest, TupleToTuple2) {
HloComputation::Builder builder(TestName());
Shape shape = ShapeUtil::MakeShape(F32, {2, 3});
Shape tuple_shape = ShapeUtil::MakeTupleShape({shape, shape});
@@ -1547,7 +1695,7 @@
op::GetTupleElement(op::Fusion(), 1), 1))))));
}
-TEST_F(MemorySpaceAssignmentTest, TupleToTuple3) {
+TEST_P(MemorySpaceAssignmentTest, TupleToTuple3) {
HloComputation::Builder builder(TestName());
Shape shape = ShapeUtil::MakeShape(F32, {2, 3});
Shape tuple_shape = ShapeUtil::MakeTupleShape({shape, shape});
@@ -1594,7 +1742,7 @@
EXPECT_THAT(fusion1, op::Fusion(op::Fusion()));
}
-TEST_F(MemorySpaceAssignmentTest, InputOutputAlias) {
+TEST_P(MemorySpaceAssignmentTest, InputOutputAlias) {
HloComputation::Builder builder(TestName());
Shape shape = ShapeUtil::MakeShape(F32, {2, 3});
Shape tuple_shape = ShapeUtil::MakeTupleShape({shape, shape});
@@ -1649,7 +1797,7 @@
kDefaultMemorySpace);
}
-TEST_F(MemorySpaceAssignmentTest, CostAnalysis) {
+TEST_P(MemorySpaceAssignmentTest, CostAnalysis) {
// This is mostly a smoke test since it's difficult and brittle to work out
// the cost of the HLO instructions.
HloComputation::Builder builder(TestName());
@@ -1701,7 +1849,7 @@
EXPECT_THAT(negate6, op::ShapeWithLayout(shape_in_alternate_mem));
}
-TEST_F(MemorySpaceAssignmentTest, MemoryBoundednessBufferIntervalCompare) {
+TEST_P(MemorySpaceAssignmentTest, MemoryBoundednessBufferIntervalCompare) {
// This test is carefully crafted to force only negates to be allocated to the
// alternate memory. The graph consists of interleaving negate and tanh
// operations:
@@ -1762,16 +1910,16 @@
F32, {4, 6},
/*minor_to_major=*/{1, 0}, /*tiles=*/{}, /*element_size_in_bits=*/0,
kDefaultMemorySpace);
- Shape shape_in_alternate_mem = ShapeUtil::MakeShapeWithLayout(
- F32, {4, 6},
- /*minor_to_major=*/{1, 0}, /*tiles=*/{}, /*element_size_in_bits=*/0,
- kAlternateMemorySpace);
- // Expect only negates to be in alternate memory space.
- EXPECT_THAT(negate0, op::ShapeWithLayout(shape_in_alternate_mem));
- EXPECT_THAT(negate1, op::ShapeWithLayout(shape_in_alternate_mem));
- EXPECT_THAT(negate2, op::ShapeWithLayout(shape_in_alternate_mem));
- EXPECT_THAT(negate3, op::ShapeWithLayout(shape_in_alternate_mem));
- EXPECT_THAT(negate4, op::ShapeWithLayout(shape_in_alternate_mem));
+ // Expect only negates to be in alternate memory space. Not all might fit but
+ // make sure at least one does.
+ std::vector<HloInstruction*> negate_instructions = {negate0, negate1, negate2,
+ negate3, negate4};
+ int64 num_negates_in_alternate_mem = absl::c_count_if(
+ negate_instructions, [&](const HloInstruction* instruction) {
+ return instruction->shape().layout().memory_space() ==
+ kAlternateMemorySpace;
+ });
+ EXPECT_GE(num_negates_in_alternate_mem, 1);
EXPECT_THAT(tanh0, op::ShapeWithLayout(shape_in_default_mem));
EXPECT_THAT(tanh1, op::ShapeWithLayout(shape_in_default_mem));
EXPECT_THAT(tanh2, op::ShapeWithLayout(shape_in_default_mem));
@@ -1779,5 +1927,9 @@
EXPECT_THAT(tanh4, op::ShapeWithLayout(shape_in_default_mem));
}
+INSTANTIATE_TEST_SUITE_P(MemorySpaceAssignmentInstantiation,
+ MemorySpaceAssignmentTest,
+ ::testing::Values(false, true));
+
} // namespace
} // namespace xla
diff --git a/tensorflow/compiler/xla/util.cc b/tensorflow/compiler/xla/util.cc
index c32dd9e..7b17db1 100644
--- a/tensorflow/compiler/xla/util.cc
+++ b/tensorflow/compiler/xla/util.cc
@@ -330,33 +330,26 @@
// [2] T. J. Dekker, A floating point technique for extending the available
// precision, Numerische Mathematik, vol. 18, pp. 224–242, 1971.
std::pair<float, float> SplitF64ToF32(double x) {
+ const float x_f32 = static_cast<float>(x);
// Early return if x is an infinity or NaN.
if (!std::isfinite(x)) {
- return std::make_pair(static_cast<float>(x), 0.0f);
+ return std::make_pair(x_f32, 0.0f);
}
- // Following [1], the splitter is chosen as 2^{s} + 1, so that the most
- // significant (p - s) bits comprise the mantissa of 'hi'.
- static_assert(std::numeric_limits<double>::radix == 2,
- "Double is not Binary FP");
- constexpr double kSplitter = (1 << (std::numeric_limits<double>::digits -
- std::numeric_limits<float>::digits)) +
- 1;
-
// Only values within the range of F32 are supported, unless it is infinity.
// Small values with large negative exponents would be rounded to zero.
- CHECK(std::isfinite(static_cast<float>(x))) << x;
+ CHECK(std::isfinite(x_f32)) << x;
- // The value of '(shifted - x)' should algebraically be exactly 2^{29} * x
- // but it can a bit smaller, because of rounding to 53 bits in computation of
- // (2^29 + 1) * x'. This overestimates the value of 'hi' by a multiple of
- // 2^{-29} (assuming exponent was 0), and makes 'lo' negative. An extra bit is
- // squeezed into the 'sign' bit of 'lo' to represent 25 bits of significand.
- const double shifted = kSplitter * x;
- // TODO(anudhyan): Write a test to ensure that compiler is not optimizing away
- // the following computation to 'hi = x;'.
- const float hi = shifted - (shifted - x);
- const float lo = x - hi;
+ // The high float is simply the double rounded to the nearest float. Because
+ // we are roundinng to nearest with ties to even, the error introduced in
+ // rounding is less than half an ULP in the high ULP.
+ const float hi = x_f32;
+ // We can compute the low term using Sterbenz' lemma: If a and b are two
+ // positive floating point numbers and a/2 ≤ b ≤ 2a, then their difference can
+ // be computed exactly.
+ // Note: the difference is computed exactly but is rounded to the nearest
+ // float which will introduce additional error.
+ const float lo = static_cast<float>(x - static_cast<double>(hi));
return std::make_pair(hi, lo);
}
diff --git a/tensorflow/core/api_def/python_api/api_def_Erfinv.pbtxt b/tensorflow/core/api_def/python_api/api_def_Erfinv.pbtxt
new file mode 100644
index 0000000..fae017d
--- /dev/null
+++ b/tensorflow/core/api_def/python_api/api_def_Erfinv.pbtxt
@@ -0,0 +1,4 @@
+op {
+ graph_op_name: "Erfinv"
+ visibility: HIDDEN
+}
diff --git a/tensorflow/core/api_def/python_api/api_def_Ndtri.pbtxt b/tensorflow/core/api_def/python_api/api_def_Ndtri.pbtxt
new file mode 100644
index 0000000..7e6e68e
--- /dev/null
+++ b/tensorflow/core/api_def/python_api/api_def_Ndtri.pbtxt
@@ -0,0 +1,4 @@
+op {
+ graph_op_name: "Ndtri"
+ visibility: HIDDEN
+}
diff --git a/tensorflow/core/common_runtime/direct_session.cc b/tensorflow/core/common_runtime/direct_session.cc
index 001ee7d..fd76b4d 100644
--- a/tensorflow/core/common_runtime/direct_session.cc
+++ b/tensorflow/core/common_runtime/direct_session.cc
@@ -497,6 +497,7 @@
const uint64 start_time_usecs = options_.env->NowMicros();
const int64 executor_step_count = executors_and_keys->step_count.fetch_add(1);
RunState run_state(step_id, &devices_);
+ const size_t num_executors = executors_and_keys->items.size();
profiler::TraceMe activity(
[&] {
@@ -553,21 +554,64 @@
}
#endif
- // Start parallel Executors.
- const size_t num_executors = executors_and_keys->items.size();
- Notification executors_done;
+ // Use std::unique_ptr to ensure garbage collection
+ std::unique_ptr<thread::ThreadPool> threadpool_wrapper;
+ thread::ThreadPool* pool = nullptr;
- // TODO(mrry): Switch the RunInternal() synchronous use of ExecutorBarrier
- // to use a stack-allocated barrier.
- ExecutorBarrier* barrier =
- new ExecutorBarrier(num_executors, run_state.rendez.get(),
- [&run_state, &executors_done](const Status& ret) {
- {
- mutex_lock l(run_state.mu);
- run_state.status.Update(ret);
- }
- executors_done.Notify();
- });
+ if (run_options.inter_op_thread_pool() < -1 ||
+ run_options.inter_op_thread_pool() >=
+ static_cast<int32>(thread_pools_.size())) {
+ return errors::InvalidArgument("Invalid inter_op_thread_pool: ",
+ run_options.inter_op_thread_pool());
+ }
+
+ if (run_in_caller_thread_) {
+ pool = nullptr;
+ } else if (threadpool_options.inter_op_threadpool != nullptr) {
+ threadpool_wrapper = absl::make_unique<thread::ThreadPool>(
+ threadpool_options.inter_op_threadpool);
+ pool = threadpool_wrapper.get();
+ } else if (run_options.inter_op_thread_pool() >= 0) {
+ pool = thread_pools_[run_options.inter_op_thread_pool()].first;
+ }
+
+ if (pool == nullptr) {
+ // We allow using the caller thread only when having a single executor
+ // specified.
+ if (executors_and_keys->items.size() > 1) {
+ pool = thread_pools_[0].first;
+ } else {
+ VLOG(1) << "Executing Session::Run() synchronously!";
+ }
+ }
+
+ std::unique_ptr<RunHandler> handler;
+ if (ShouldUseRunHandlerPool(run_options) &&
+ run_options.experimental().use_run_handler_pool()) {
+ VLOG(1) << "Using RunHandler to scheduler inter-op closures.";
+ handler = GetOrCreateRunHandlerPool(options_)->Get(step_id);
+ }
+ auto* handler_ptr = handler.get();
+
+ Executor::Args::Runner default_runner = nullptr;
+
+ if (pool == nullptr) {
+ default_runner = [](Executor::Args::Closure c) { c(); };
+ } else if (handler_ptr != nullptr) {
+ default_runner = [handler_ptr](Executor::Args::Closure c) {
+ handler_ptr->ScheduleInterOpClosure(std::move(c));
+ };
+ } else {
+ default_runner = [pool](Executor::Args::Closure c) {
+ pool->Schedule(std::move(c));
+ };
+ }
+
+ // Start parallel Executors.
+ const int64 call_timeout = run_options.timeout_in_ms() > 0
+ ? run_options.timeout_in_ms()
+ : operation_timeout_in_ms_;
+ const bool can_execute_synchronously = pool == nullptr && call_timeout == 0;
Executor::Args args;
args.step_id = step_id;
@@ -611,14 +655,6 @@
profiler_session = ProfilerSession::Create();
}
- if (run_options.inter_op_thread_pool() < -1 ||
- run_options.inter_op_thread_pool() >=
- static_cast<int32>(thread_pools_.size())) {
- delete barrier;
- return errors::InvalidArgument("Invalid inter_op_thread_pool: ",
- run_options.inter_op_thread_pool());
- }
-
// Register this step with session's cancellation manager, so that
// `Session::Close()` will cancel the step.
const CancellationToken cancellation_token =
@@ -628,98 +664,76 @@
step_cancellation_manager.StartCancel();
});
if (already_cancelled) {
- delete barrier;
return errors::Cancelled("Run call was cancelled");
}
- // Use std::unique_ptr to ensure garbage collection
- std::unique_ptr<thread::ThreadPool> threadpool_wrapper;
- thread::ThreadPool* pool = nullptr;
+ Status run_status;
- if (run_in_caller_thread_) {
- pool = nullptr;
- } else if (threadpool_options.inter_op_threadpool != nullptr) {
- threadpool_wrapper = absl::make_unique<thread::ThreadPool>(
- threadpool_options.inter_op_threadpool);
- pool = threadpool_wrapper.get();
- } else if (run_options.inter_op_thread_pool() >= 0) {
- pool = thread_pools_[run_options.inter_op_thread_pool()].first;
- }
-
- if (pool == nullptr) {
- // We allow using the caller thread only when having a single executor
- // specified.
- if (executors_and_keys->items.size() > 1) {
- pool = thread_pools_[0].first;
- } else {
- VLOG(1) << "Executing Session::Run() synchronously!";
- }
- }
-
- std::unique_ptr<RunHandler> handler;
- if (ShouldUseRunHandlerPool(run_options) &&
- run_options.experimental().use_run_handler_pool()) {
- VLOG(1) << "Using RunHandler to scheduler inter-op closures.";
- handler = GetOrCreateRunHandlerPool(options_)->Get(step_id);
- }
- auto* handler_ptr = handler.get();
-
- Executor::Args::Runner default_runner = nullptr;
-
- if (pool == nullptr) {
- default_runner = [](Executor::Args::Closure c) { c(); };
- } else if (handler_ptr != nullptr) {
- default_runner = [handler_ptr](Executor::Args::Closure c) {
- handler_ptr->ScheduleInterOpClosure(std::move(c));
- };
- } else {
- default_runner = [this, pool](Executor::Args::Closure c) {
- pool->Schedule(std::move(c));
- };
- }
-
- for (const auto& item : executors_and_keys->items) {
- // TODO(azaks): support partial run.
- // TODO(azaks): if the device picks its own threadpool, we need to assign
- // less threads to the main compute pool by default.
- thread::ThreadPool* device_thread_pool =
- item.device->tensorflow_device_thread_pool();
- // TODO(crk): Investigate usage of RunHandlerPool when using device specific
- // thread pool(s).
- if (!device_thread_pool) {
- args.runner = default_runner;
- } else {
- args.runner = [this, device_thread_pool](Executor::Args::Closure c) {
- device_thread_pool->Schedule(std::move(c));
+ auto set_threadpool_args_for_item =
+ [&default_runner, &handler](const PerPartitionExecutorsAndLib& item,
+ Executor::Args* args) {
+ // TODO(azaks): support partial run.
+ // TODO(azaks): if the device picks its own threadpool, we need to
+ // assign
+ // less threads to the main compute pool by default.
+ thread::ThreadPool* device_thread_pool =
+ item.device->tensorflow_device_thread_pool();
+ // TODO(crk): Investigate usage of RunHandlerPool when using device
+ // specific thread pool(s).
+ if (!device_thread_pool) {
+ args->runner = default_runner;
+ } else {
+ args->runner = [device_thread_pool](Executor::Args::Closure c) {
+ device_thread_pool->Schedule(std::move(c));
+ };
+ }
+ if (handler != nullptr) {
+ args->user_intra_op_threadpool =
+ handler->AsIntraThreadPoolInterface();
+ }
};
- }
- if (handler != nullptr) {
- args.user_intra_op_threadpool = handler->AsIntraThreadPoolInterface();
+
+ if (can_execute_synchronously) {
+ const auto& item = executors_and_keys->items[0];
+ set_threadpool_args_for_item(item, &args);
+ run_status = item.executor->Run(args);
+ } else {
+ // `barrier` will delete itself after the final executor finishes.
+ Notification executors_done;
+ ExecutorBarrier* barrier =
+ new ExecutorBarrier(num_executors, run_state.rendez.get(),
+ [&run_state, &executors_done](const Status& ret) {
+ {
+ mutex_lock l(run_state.mu);
+ run_state.status.Update(ret);
+ }
+ executors_done.Notify();
+ });
+
+ for (const auto& item : executors_and_keys->items) {
+ set_threadpool_args_for_item(item, &args);
+ item.executor->RunAsync(args, barrier->Get());
}
- item.executor->RunAsync(args, barrier->Get());
+ WaitForNotification(&executors_done, &run_state, &step_cancellation_manager,
+ call_timeout);
+ {
+ tf_shared_lock l(run_state.mu);
+ run_status = run_state.status;
+ }
}
- WaitForNotification(&executors_done, &run_state, &step_cancellation_manager,
- run_options.timeout_in_ms() > 0
- ? run_options.timeout_in_ms()
- : operation_timeout_in_ms_);
-
if (!cancellation_manager_->DeregisterCallback(cancellation_token)) {
// The step has been cancelled: make sure we don't attempt to receive the
// outputs as this would make it block forever.
- mutex_lock l(run_state.mu);
- run_state.status.Update(errors::Cancelled("Run call was cancelled"));
+ run_status.Update(errors::Cancelled("Run call was cancelled"));
}
if (profiler_session) {
TF_RETURN_IF_ERROR(profiler_session->CollectData(run_metadata));
}
- {
- mutex_lock l(run_state.mu);
- TF_RETURN_IF_ERROR(run_state.status);
- }
+ TF_RETURN_IF_ERROR(run_status);
// Save the output tensors of this run we choose to keep.
if (!run_state.tensor_store.empty()) {
diff --git a/tensorflow/core/common_runtime/eager/execute.cc b/tensorflow/core/common_runtime/eager/execute.cc
index c819f0f..56ec9e6 100644
--- a/tensorflow/core/common_runtime/eager/execute.cc
+++ b/tensorflow/core/common_runtime/eager/execute.cc
@@ -330,32 +330,6 @@
return {x, tensorflow::FingerprintCat64(a.high64, x)};
}
-bool IsMultiDevice(const FunctionDef* fdef) {
- if (fdef == nullptr) {
- // Primitive op.
- return false;
- }
-
- // Run all functions as multi-device.
- return true;
-
- // We can eliminate some overhead by running simple functions using regular
- // CallOp kernel. However, it is tricky to figure out which functions should
- // be run using CallOp. Also, currently CallOp runs neither optimization
- // passes (needed for TPU/XLA) nor grappler.
- // Here are some cases where a function should be run in multi-device mode:
- // - Function takes at least two resources on different devices.
- // - Function takes a resource on deviceA and a body op explicitly placed
- // on deviceB.
- // - Function has a colocation constraint.
- // - Function has an explicit device annotation (which might not be using
- // full canonical device name) different from op_device. Note that false
- // positives are ok.
- // - Function has a node or a (node) attribute that can potentially make
- // the function multi-device after a rewrite pass (e.g. various XLA/TPU
- // special nodes and attributes)
-}
-
Status GetDeviceForInput(const EagerContext* ctx, TensorHandle* tensor_handle,
Device** result) {
Device* cpu_device = ctx->HostCPU();
@@ -486,13 +460,25 @@
Fprint128 cache_key = op->MutableAttrs()->CacheKey(op->GetDeviceName());
- bool is_multi_device_function =
- IsMultiDevice(ctx->FindFunctionDef(op->Name()));
-
std::vector<Device*> input_dev_ptrs;
std::unordered_map<int, DtypeAndPartialTensorShape>
input_resource_variable_dtypes_and_shapes;
- if (is_multi_device_function) {
+ // We can eliminate some overhead by running simple functions using regular
+ // CallOp kernel. However, it is tricky to figure out which functions should
+ // be run using CallOp. Also, currently CallOp runs neither optimization
+ // passes (needed for TPU/XLA) nor grappler.
+ // Here are some cases where a function should be run in multi-device mode:
+ // - Function takes at least two resources on different devices.
+ // - Function takes a resource on deviceA and a body op explicitly placed
+ // on deviceB.
+ // - Function has a colocation constraint.
+ // - Function has an explicit device annotation (which might not be using
+ // full canonical device name) different from op_device. Note that false
+ // positives are ok.
+ // - Function has a node or a (node) attribute that can potentially make
+ // the function multi-device after a rewrite pass (e.g. various XLA/TPU
+ // special nodes and attributes)
+ if (op->is_function()) {
profiler::TraceMe activity("EagerCopyToDeviceAndAddCacheKey",
profiler::TraceMeLevel::kInfo);
input_dev_ptrs.reserve(op->Inputs().size());
@@ -549,15 +535,20 @@
if (kernel == nullptr) {
DVLOG(2) << "Creating new kernel for " << op->Name() << " on device "
<< DeviceNameOrUnspecified(op->Device());
- bool compile_with_xla;
- TF_RETURN_IF_ERROR(ShouldCompileWithXLA(op, ctx, &compile_with_xla));
- if (compile_with_xla) {
- // Note that it is not ideal, but currently correct, to set this
- // attribute after computing the kernel cache key above.
- // Note: If the attribute is already set to true, this is a noop.
- op->MutableAttrs()->Set(kXlaCompileAttr, true);
+ bool run_function_with_flr = false;
+ bool compile_with_xla = false;
+ if (op->is_function()) {
+ bool compile_with_xla;
+ TF_RETURN_IF_ERROR(ShouldCompileWithXLA(op, ctx, &compile_with_xla));
+ if (compile_with_xla) {
+ // Note that it is not ideal, but currently correct, to set this
+ // attribute after computing the kernel cache key above.
+ // Note: If the attribute is already set to true, this is a noop.
+ op->MutableAttrs()->Set(kXlaCompileAttr, true);
+ } else {
+ run_function_with_flr = true;
+ }
}
- bool run_function_with_flr = is_multi_device_function && !compile_with_xla;
const NodeDef& ndef = op->MutableAttrs()->BuildNodeDef();
if (device == nullptr) {
diff --git a/tensorflow/core/common_runtime/executor.cc b/tensorflow/core/common_runtime/executor.cc
index 228ce68..e1135ec 100644
--- a/tensorflow/core/common_runtime/executor.cc
+++ b/tensorflow/core/common_runtime/executor.cc
@@ -81,7 +81,7 @@
// Helper routines for collecting step stats.
namespace nodestats {
-inline int64 NowInNsec() { return Env::Default()->NowNanos(); }
+inline int64 NowInNsec() { return EnvTime::NowNanos(); }
void SetScheduled(NodeExecStatsInterface* stats, int64 micros) {
if (!stats) return;
diff --git a/tensorflow/core/common_runtime/executor.h b/tensorflow/core/common_runtime/executor.h
index ad85e71..42d5b9e 100644
--- a/tensorflow/core/common_runtime/executor.h
+++ b/tensorflow/core/common_runtime/executor.h
@@ -111,7 +111,7 @@
virtual void RunAsync(const Args& args, DoneCallback done) = 0;
// Synchronous wrapper for RunAsync().
- Status Run(const Args& args) {
+ virtual Status Run(const Args& args) {
Status ret;
Notification n;
RunAsync(args, [&ret, &n](const Status& s) {
diff --git a/tensorflow/core/framework/dataset.h b/tensorflow/core/framework/dataset.h
index 99433ac..496544f 100644
--- a/tensorflow/core/framework/dataset.h
+++ b/tensorflow/core/framework/dataset.h
@@ -942,7 +942,7 @@
// this iterator has started work.
void RecordStart(IteratorContext* ctx, bool stop_output = false) {
if (collect_resource_usage(ctx)) {
- int64 now_nanos = Env::Default()->NowNanos();
+ int64 now_nanos = EnvTime::NowNanos();
if (stop_output && node_->output()) {
node_->output()->record_stop(now_nanos);
}
@@ -954,7 +954,7 @@
// this iterator has stopped work.
void RecordStop(IteratorContext* ctx, bool start_output = false) {
if (collect_resource_usage(ctx)) {
- int64 now_nanos = Env::Default()->NowNanos();
+ int64 now_nanos = EnvTime::NowNanos();
node_->record_stop(now_nanos);
if (start_output && node_->output()) {
node_->output()->record_start(now_nanos);
diff --git a/tensorflow/core/framework/run_handler.cc b/tensorflow/core/framework/run_handler.cc
index 156f240..73e49bb 100644
--- a/tensorflow/core/framework/run_handler.cc
+++ b/tensorflow/core/framework/run_handler.cc
@@ -948,7 +948,7 @@
CHECK_EQ(handler->tws()->TaskQueueSize(true), 0);
CHECK_EQ(handler->tws()->TaskQueueSize(false), 0);
- uint64 now = tensorflow::Env::Default()->NowMicros();
+ uint64 now = tensorflow::EnvTime::NowMicros();
double elapsed = (now - handler->start_time_us()) / 1000.0;
time_hist_.Add(elapsed);
diff --git a/tensorflow/core/kernels/data/experimental/BUILD b/tensorflow/core/kernels/data/experimental/BUILD
index d845d93..f4ad23a 100644
--- a/tensorflow/core/kernels/data/experimental/BUILD
+++ b/tensorflow/core/kernels/data/experimental/BUILD
@@ -184,6 +184,7 @@
"//tensorflow/core:core_cpu_internal",
"//tensorflow/core:dataset_ops_op_lib",
"//tensorflow/core:framework",
+ "//tensorflow/core:framework_internal",
"//tensorflow/core:lib",
"//tensorflow/core:lib_internal",
"//tensorflow/core:nn_ops_op_lib",
diff --git a/tensorflow/core/kernels/data/experimental/map_and_batch_dataset_op.cc b/tensorflow/core/kernels/data/experimental/map_and_batch_dataset_op.cc
index f765cff..6fbf153 100644
--- a/tensorflow/core/kernels/data/experimental/map_and_batch_dataset_op.cc
+++ b/tensorflow/core/kernels/data/experimental/map_and_batch_dataset_op.cc
@@ -20,6 +20,7 @@
#include "tensorflow/core/common_runtime/function.h"
#include "tensorflow/core/common_runtime/input_colocation_exemption_registry.h"
#include "tensorflow/core/common_runtime/metrics.h"
+#include "tensorflow/core/framework/model.h"
#include "tensorflow/core/framework/partial_tensor_shape.h"
#include "tensorflow/core/framework/stats_aggregator.h"
#include "tensorflow/core/framework/tensor.h"
@@ -170,9 +171,12 @@
num_parallel_calls_(std::make_shared<model::SharedState>(
params.dataset->num_parallel_calls_, mu_, cond_var_)),
max_batch_results_(
- std::min(kMaxBatchResults, (params.dataset->num_parallel_calls_ +
- params.dataset->batch_size_ - 1) /
- params.dataset->batch_size_)) {}
+ params.dataset->num_parallel_calls_ == model::kAutotune
+ ? kMaxBatchResults
+ : std::min(kMaxBatchResults,
+ (params.dataset->num_parallel_calls_ +
+ params.dataset->batch_size_ - 1) /
+ params.dataset->batch_size_)) {}
~Iterator() override {
mutex_lock l(*mu_);
diff --git a/tensorflow/core/kernels/data/experimental/snapshot_dataset_op.cc b/tensorflow/core/kernels/data/experimental/snapshot_dataset_op.cc
index 9a7f558..72c9b0c 100644
--- a/tensorflow/core/kernels/data/experimental/snapshot_dataset_op.cc
+++ b/tensorflow/core/kernels/data/experimental/snapshot_dataset_op.cc
@@ -70,7 +70,9 @@
constexpr char kSeparator[] = "::";
constexpr char kBookkeeping[] = "Bookkeeping";
constexpr char kSnapshotReadElements[] = "snapshot_read_elements";
+constexpr char kSnapshotReadThroughput[] = "snapshot_read_throughput";
constexpr char kSnapshotWrittenElements[] = "snapshot_written_elements";
+constexpr char kSnapshotWriteThroughput[] = "snapshot_write_throughput";
class SnapshotWriter {
public:
@@ -667,7 +669,7 @@
stats_aggregator->AddScalar(
absl::StrCat(dataset()->node_name(), kSeparator,
kSnapshotReadElements),
- static_cast<float>(num_elements_read_), num_elements());
+ static_cast<float>(num_elements_read_), elements_produced_);
}
if (!buffer_.empty()) {
@@ -689,11 +691,18 @@
absl::Duration d = end - start;
time_spent_micros_ += absl::ToInt64Microseconds(d);
kbytes_read_ += static_cast<double>(num_bytes) / 1024.0;
+ float read_throughput =
+ (kbytes_read_ / 1024.0) / (time_spent_micros_ / 1000000.0);
+ if (stats_aggregator) {
+ stats_aggregator->AddScalar(
+ absl::StrCat(dataset()->node_name(), kSeparator,
+ kSnapshotReadThroughput),
+ read_throughput, elements_produced_);
+ }
elements_produced_++;
if (elements_produced_ % 10000 == 0) {
- LOG(INFO) << "Current read throughput (MBPS): "
- << ((kbytes_read_ / 1024.0) /
- (time_spent_micros_ / 1000000.0));
+ LOG(INFO)
+ << "Current read throughput (MBPS): " << read_throughput;
}
}
}
@@ -938,23 +947,32 @@
num_bytes += out_tensor.TotalBytes();
}
- absl::Time end = absl::Now();
- absl::Duration d = end - start;
- time_spent_micros_ += absl::ToInt64Microseconds(d);
- bytes_produced_ += num_bytes;
- elements_produced_++;
-
- if (elements_produced_ % 10000 == 0) {
- LOG(INFO) << "Current write throughput (MBPS): "
- << (bytes_produced_ * 1000000.0) /
- (time_spent_micros_ * 1024.0 * 1024.0);
- }
const auto& stats_aggregator = ctx->stats_aggregator();
if (stats_aggregator) {
stats_aggregator->AddScalar(
absl::StrCat(dataset()->node_name(), kSeparator,
kSnapshotWrittenElements),
- static_cast<float>(num_elements_written_), num_elements());
+ static_cast<float>(num_elements_written_),
+ elements_produced_);
+ }
+
+ absl::Time end = absl::Now();
+ absl::Duration d = end - start;
+ time_spent_micros_ += absl::ToInt64Microseconds(d);
+ bytes_produced_ += num_bytes;
+ float write_throughput = (bytes_produced_ * 1000000.0) /
+ (time_spent_micros_ * 1024.0 * 1024.0);
+ if (stats_aggregator) {
+ stats_aggregator->AddScalar(
+ absl::StrCat(dataset()->node_name(), kSeparator,
+ kSnapshotWriteThroughput),
+ write_throughput, elements_produced_);
+ }
+
+ elements_produced_++;
+ if (elements_produced_ % 10000 == 0) {
+ LOG(INFO) << "Current write throughput (MBPS): "
+ << write_throughput;
}
}
return Status::OK();
diff --git a/tensorflow/core/kernels/data/prefetch_dataset_op.cc b/tensorflow/core/kernels/data/prefetch_dataset_op.cc
index 618e2b1..097f3cd 100644
--- a/tensorflow/core/kernels/data/prefetch_dataset_op.cc
+++ b/tensorflow/core/kernels/data/prefetch_dataset_op.cc
@@ -208,14 +208,17 @@
}
mutex_lock parent_l(*parent_mu_);
- mutex_lock l(*mu_);
- if (stats_aggregator) {
- stats_aggregator->AddScalar(
- stats_utils::BufferSizeScalarName(dataset()->node_name()),
- static_cast<float>(buffer_.size()), num_elements());
- stats_aggregator->AddScalar(
- stats_utils::BufferCapacityScalarName(dataset()->node_name()),
- static_cast<float>(buffer_limit()), num_elements());
+ {
+ mutex_lock l(*mu_);
+ if (stats_aggregator) {
+ stats_aggregator->AddScalar(
+ stats_utils::BufferSizeScalarName(dataset()->node_name()),
+ static_cast<float>(buffer_.size()), num_elements());
+ stats_aggregator->AddScalar(
+ stats_utils::BufferCapacityScalarName(dataset()->node_name()),
+ static_cast<float>(buffer_limit()), num_elements());
+ }
+ // Release mu_
}
return input_impl_->GetNext(ctx, out_tensors, end_of_sequence);
}
@@ -477,6 +480,7 @@
// This mutex is used to ensure exclusivity between multiple threads
// reading/writing this iterator's local state.
+ // Note: We should never call GetNext on the input while holding this.
const std::shared_ptr<mutex> mu_;
// This mutex is used to ensure exclusivity between multiple threads
// accessing the parent iterator. We keep this separate from `mu_` to
diff --git a/tensorflow/core/kernels/data/single_threaded_executor.cc b/tensorflow/core/kernels/data/single_threaded_executor.cc
index cd47da0..a6b3167 100644
--- a/tensorflow/core/kernels/data/single_threaded_executor.cc
+++ b/tensorflow/core/kernels/data/single_threaded_executor.cc
@@ -195,10 +195,7 @@
return Status::OK();
}
- // TODO(mrry): Consider specializing the implementation of Executor::Run()
- // instead, to avoid unnecessary atomic operations in the callback when
- // running synchronously.
- void RunAsync(const Args& args, DoneCallback done) override {
+ Status Run(const Args& args) override {
// The inputs to each kernel are stored contiguously in `inputs`.
//
// We use `kernels_[i].input_start_index` and `kernels_[i].num_inputs` to
@@ -275,9 +272,9 @@
const size_t received_args =
args.call_frame ? args.call_frame->num_args() : 0;
if (arg_output_locations_.size() > received_args) {
- done(errors::InvalidArgument("Expected ", arg_output_locations_.size(),
- " arguments, but only received ",
- received_args, "."));
+ return errors::InvalidArgument("Expected ", arg_output_locations_.size(),
+ " arguments, but only received ",
+ received_args, ".");
}
// ArgOp is a relatively expensive OpKernel due to the Tensor
@@ -351,8 +348,7 @@
}
}
}
- done(ctx.status());
- return;
+ return ctx.status();
}
// Free the inputs to the current kernel.
@@ -379,7 +375,11 @@
delete val.tensor;
}
}
- done(Status::OK());
+ return Status::OK();
+ }
+
+ void RunAsync(const Args& args, DoneCallback done) override {
+ done(Run(args));
}
private:
diff --git a/tensorflow/core/kernels/decode_image_op.cc b/tensorflow/core/kernels/decode_image_op.cc
index f89533d..ffa6ca5 100644
--- a/tensorflow/core/kernels/decode_image_op.cc
+++ b/tensorflow/core/kernels/decode_image_op.cc
@@ -67,6 +67,9 @@
// Decode an image (either jpeg, png, or gif). We use a single op so that
// users don't have to care about which format they have.
+// TODO(b/141645641): Separate concerns here: constructors uses name to
+// determine type of parsing, compute uses file magic to parse and these might
+// not match.
class DecodeImageOp : public OpKernel {
public:
explicit DecodeImageOp(OpKernelConstruction* context) : OpKernel(context) {
diff --git a/tensorflow/core/lib/core/status.h b/tensorflow/core/lib/core/status.h
index a4c2c72..a863f2f 100644
--- a/tensorflow/core/lib/core/status.h
+++ b/tensorflow/core/lib/core/status.h
@@ -47,7 +47,11 @@
/// Copy the specified status.
Status(const Status& s);
- void operator=(const Status& s);
+ Status& operator=(const Status& s);
+#ifndef SWIG
+ Status(Status&& s) noexcept;
+ Status& operator=(Status&& s) noexcept;
+#endif // SWIG
static Status OK() { return Status(); }
@@ -133,16 +137,28 @@
};
inline Status::Status(const Status& s)
- : state_((s.state_ == NULL) ? NULL : new State(*s.state_)) {}
+ : state_((s.state_ == nullptr) ? nullptr : new State(*s.state_)) {}
-inline void Status::operator=(const Status& s) {
+inline Status& Status::operator=(const Status& s) {
// The following condition catches both aliasing (when this == &s),
// and the common case where both s and *this are ok.
if (state_ != s.state_) {
SlowCopyFrom(s.state_.get());
}
+ return *this;
}
+#ifndef SWIG
+inline Status::Status(Status&& s) noexcept : state_(std::move(s.state_)) {}
+
+inline Status& Status::operator=(Status&& s) noexcept {
+ if (state_ != s.state_) {
+ state_ = std::move(s.state_);
+ }
+ return *this;
+}
+#endif // SWIG
+
inline bool Status::operator==(const Status& x) const {
return (this->state_ == x.state_) || (ToString() == x.ToString());
}
diff --git a/tensorflow/core/lib/core/status_test.cc b/tensorflow/core/lib/core/status_test.cc
index 52fdfb3..d9a9539 100644
--- a/tensorflow/core/lib/core/status_test.cc
+++ b/tensorflow/core/lib/core/status_test.cc
@@ -58,6 +58,19 @@
ASSERT_EQ(a.ToString(), b.ToString());
}
+TEST(Status, Move) {
+ Status a(errors::InvalidArgument("Invalid"));
+ Status b(std::move(a));
+ ASSERT_EQ("Invalid argument: Invalid", b.ToString());
+}
+
+TEST(Status, MoveAssign) {
+ Status a(errors::InvalidArgument("Invalid"));
+ Status b;
+ b = std::move(a);
+ ASSERT_EQ("Invalid argument: Invalid", b.ToString());
+}
+
TEST(Status, Update) {
Status s;
s.Update(Status::OK());
diff --git a/tensorflow/core/nccl/nccl_manager.cc b/tensorflow/core/nccl/nccl_manager.cc
index c3d6af9..b5c0798 100644
--- a/tensorflow/core/nccl/nccl_manager.cc
+++ b/tensorflow/core/nccl/nccl_manager.cc
@@ -703,6 +703,9 @@
if (p->output) {
recvbuff = const_cast<char*>(p->output->tensor_data().data());
num_elements = p->output->NumElements();
+ } else {
+ // Operate in-place if no output (for the src node).
+ recvbuff = const_cast<void*>(sendbuff);
}
if (num_elements < 0) {
p->done_callback(errors::Internal(
diff --git a/tensorflow/core/platform/cloud/gcs_throttle.cc b/tensorflow/core/platform/cloud/gcs_throttle.cc
index 940d98f..4eeee36 100644
--- a/tensorflow/core/platform/cloud/gcs_throttle.cc
+++ b/tensorflow/core/platform/cloud/gcs_throttle.cc
@@ -19,10 +19,18 @@
namespace tensorflow {
+namespace {
+EnvTime* get_default_env_time() {
+ static EnvTime* default_env_time = new EnvTime;
+ return default_env_time;
+}
+} // namespace
+
GcsThrottle::GcsThrottle(EnvTime* env_time)
- : last_updated_secs_(env_time->NowSeconds()),
+ : last_updated_secs_(env_time ? env_time->GetOverridableNowSeconds()
+ : EnvTime::NowSeconds()),
available_tokens_(0),
- env_time_(env_time) {}
+ env_time_(env_time ? env_time : get_default_env_time()) {}
bool GcsThrottle::AdmitRequest() {
mutex_lock l(mu_);
@@ -44,12 +52,12 @@
mutex_lock l(mu_);
config_ = config;
available_tokens_ = config.initial_tokens;
- last_updated_secs_ = env_time_->NowSeconds();
+ last_updated_secs_ = env_time_->GetOverridableNowSeconds();
}
void GcsThrottle::UpdateState() {
// TODO(b/72643279): Switch to a monotonic clock.
- int64 now = env_time_->NowSeconds();
+ int64 now = env_time_->GetOverridableNowSeconds();
uint64 delta_secs =
std::max(int64{0}, now - static_cast<int64>(last_updated_secs_));
available_tokens_ += delta_secs * config_.token_rate;
diff --git a/tensorflow/core/platform/cloud/gcs_throttle.h b/tensorflow/core/platform/cloud/gcs_throttle.h
index 8c9e2e0..ac5a536 100644
--- a/tensorflow/core/platform/cloud/gcs_throttle.h
+++ b/tensorflow/core/platform/cloud/gcs_throttle.h
@@ -70,7 +70,7 @@
/**
* Constructs a GcsThrottle.
*/
- explicit GcsThrottle(EnvTime* env_time = EnvTime::Default());
+ explicit GcsThrottle(EnvTime* env_time = nullptr);
/**
* AdmitRequest updates the GcsThrottle to record a request will be made.
diff --git a/tensorflow/core/platform/cloud/gcs_throttle_test.cc b/tensorflow/core/platform/cloud/gcs_throttle_test.cc
index 774855a..e8eebc5 100644
--- a/tensorflow/core/platform/cloud/gcs_throttle_test.cc
+++ b/tensorflow/core/platform/cloud/gcs_throttle_test.cc
@@ -24,7 +24,9 @@
class TestTime : public EnvTime {
public:
- uint64 NowNanos() const override { return now_micros_ * kMicrosToNanos; }
+ uint64 GetOverridableNowNanos() const override {
+ return now_micros_ * kMicrosToNanos;
+ }
void SetTime(uint64 now_micros) { now_micros_ = now_micros; }
diff --git a/tensorflow/core/platform/default/build_config.bzl b/tensorflow/core/platform/default/build_config.bzl
index 5bec011..2b13730 100644
--- a/tensorflow/core/platform/default/build_config.bzl
+++ b/tensorflow/core/platform/default/build_config.bzl
@@ -1,7 +1,7 @@
# Platform-specific build configurations.
load("@com_google_protobuf//:protobuf.bzl", "proto_gen")
-load("//tensorflow:tensorflow.bzl", "if_not_windows")
+load("//tensorflow:tensorflow.bzl", "clean_dep", "if_not_windows")
load("//tensorflow/core/platform:default/build_config_root.bzl", "if_static")
load("@local_config_cuda//cuda:build_defs.bzl", "if_cuda")
load("@local_config_rocm//rocm:build_defs.bzl", "if_rocm")
@@ -219,7 +219,7 @@
if use_grpc_plugin:
cc_libs += select({
- "//tensorflow:linux_s390x": ["//external:grpc_lib_unsecure"],
+ clean_dep("//tensorflow:linux_s390x"): ["//external:grpc_lib_unsecure"],
"//conditions:default": ["//external:grpc_lib"],
})
@@ -246,6 +246,18 @@
**kargs
)
+ # Temporarily also add an alias with the 'protolib_name'. So far we relied
+ # on copybara to switch dependencies to the _cc dependencies. Now that these
+ # copybara rules are removed, we need to first change the internal BUILD
+ # files to depend on the correct targets instead, then this can be removed.
+ # TODO(b/143648532): Remove this once all reverse dependencies are migrated.
+ if protolib_name != name:
+ native.alias(
+ name = protolib_name,
+ actual = name,
+ visibility = kargs["visibility"],
+ )
+
# Re-defined protocol buffer rule to bring in the change introduced in commit
# https://github.com/google/protobuf/commit/294b5758c373cbab4b72f35f4cb62dc1d8332b68
# which was not part of a stable protobuf release in 04/2018.
@@ -361,6 +373,18 @@
deps = [s + "_genproto" for s in protolib_deps],
)
+ # Temporarily also add an alias with 'name'. So far we relied on
+ # copybara to switch dependencies to the _cc dependencies. Now that these
+ # copybara rules are removed, we need to change the internal BUILD files to
+ # depend on the correct targets instead.
+ # TODO(b/143648532): Remove this once all reverse dependencies are
+ # migrated.
+ native.alias(
+ name = name,
+ actual = cc_name,
+ testonly = testonly,
+ visibility = visibility,
+ )
native.cc_library(
name = cc_name,
deps = cc_deps + ["@com_google_protobuf//:protobuf_headers"] + if_static([name + "_cc_impl"]),
@@ -419,7 +443,7 @@
)
native.py_library(
name = py_name,
- deps = py_deps + ["@com_google_protobuf//:protobuf_python"],
+ deps = py_deps + [clean_dep("@com_google_protobuf//:protobuf_python")],
testonly = testonly,
visibility = visibility,
)
@@ -429,12 +453,12 @@
name = py_name,
testonly = testonly,
srcs = srcs,
- default_runtime = "@com_google_protobuf//:protobuf_python",
+ default_runtime = clean_dep("@com_google_protobuf//:protobuf_python"),
protoc = "@com_google_protobuf//:protoc",
srcs_version = srcs_version,
use_grpc_plugin = use_grpc_plugin,
visibility = visibility,
- deps = deps + py_deps + ["@com_google_protobuf//:protobuf_python"],
+ deps = deps + py_deps + [clean_dep("@com_google_protobuf//:protobuf_python")],
)
def tf_jspb_proto_library(**kwargs):
@@ -494,7 +518,7 @@
posix_set = base_set + ["posix/" + f for f in files]
return select({
- "//tensorflow:windows": native.glob(windows_set),
+ clean_dep("//tensorflow:windows"): native.glob(windows_set),
"//conditions:default": native.glob(posix_set),
})
@@ -508,7 +532,7 @@
"default/posix_file_system.h",
])
return select({
- "//tensorflow:windows": windows_hdrs,
+ clean_dep("//tensorflow:windows"): windows_hdrs,
"//conditions:default": native.glob([
"default/*.h",
"posix/*.h",
@@ -531,7 +555,7 @@
"default/stacktrace_handler.cc",
])
return select({
- "//tensorflow:windows": windows_srcs,
+ clean_dep("//tensorflow:windows"): windows_srcs,
"//conditions:default": native.glob([
"default/*.cc",
"posix/*.cc",
@@ -553,28 +577,28 @@
]
def tf_additional_all_protos():
- return ["//tensorflow/core:protos_all"]
+ return [clean_dep("//tensorflow/core:protos_all")]
def tf_protos_all_impl():
return [
- "//tensorflow/core:autotuning_proto_cc_impl",
- "//tensorflow/core:conv_autotuning_proto_cc_impl",
- "//tensorflow/core:protos_all_cc_impl",
+ clean_dep("//tensorflow/core:autotuning_proto_cc_impl"),
+ clean_dep("//tensorflow/core:conv_autotuning_proto_cc_impl"),
+ clean_dep("//tensorflow/core:protos_all_cc_impl"),
]
def tf_protos_all():
return if_static(
extra_deps = tf_protos_all_impl(),
- otherwise = ["//tensorflow/core:protos_all_cc"],
+ otherwise = [clean_dep("//tensorflow/core:protos_all_cc")],
)
def tf_protos_grappler_impl():
- return ["//tensorflow/core/grappler/costs:op_performance_data_cc_impl"]
+ return [clean_dep("//tensorflow/core/grappler/costs:op_performance_data_cc_impl")]
def tf_protos_grappler():
return if_static(
extra_deps = tf_protos_grappler_impl(),
- otherwise = ["//tensorflow/core/grappler/costs:op_performance_data_cc"],
+ otherwise = [clean_dep("//tensorflow/core/grappler/costs:op_performance_data_cc")],
)
def tf_additional_device_tracer_srcs():
@@ -606,45 +630,45 @@
"@com_google_absl//absl/types:span",
"@com_google_absl//absl/types:optional",
] + if_static(
- ["@nsync//:nsync_cpp"],
- ["@nsync//:nsync_headers"],
+ [clean_dep("@nsync//:nsync_cpp")],
+ [clean_dep("@nsync//:nsync_headers")],
)
def tf_additional_core_deps():
return select({
- "//tensorflow:android": [],
- "//tensorflow:ios": [],
- "//tensorflow:linux_s390x": [],
- "//tensorflow:windows": [],
- "//tensorflow:no_gcp_support": [],
+ clean_dep("//tensorflow:android"): [],
+ clean_dep("//tensorflow:ios"): [],
+ clean_dep("//tensorflow:linux_s390x"): [],
+ clean_dep("//tensorflow:windows"): [],
+ clean_dep("//tensorflow:no_gcp_support"): [],
"//conditions:default": [
"//tensorflow/core/platform/cloud:gcs_file_system",
],
}) + select({
- "//tensorflow:android": [],
- "//tensorflow:ios": [],
- "//tensorflow:linux_s390x": [],
- "//tensorflow:windows": [],
- "//tensorflow:no_hdfs_support": [],
+ clean_dep("//tensorflow:android"): [],
+ clean_dep("//tensorflow:ios"): [],
+ clean_dep("//tensorflow:linux_s390x"): [],
+ clean_dep("//tensorflow:windows"): [],
+ clean_dep("//tensorflow:no_hdfs_support"): [],
"//conditions:default": [
- "//tensorflow/core/platform/hadoop:hadoop_file_system",
+ clean_dep("//tensorflow/core/platform/hadoop:hadoop_file_system"),
],
}) + select({
- "//tensorflow:android": [],
- "//tensorflow:ios": [],
- "//tensorflow:linux_s390x": [],
- "//tensorflow:windows": [],
- "//tensorflow:no_aws_support": [],
+ clean_dep("//tensorflow:android"): [],
+ clean_dep("//tensorflow:ios"): [],
+ clean_dep("//tensorflow:linux_s390x"): [],
+ clean_dep("//tensorflow:windows"): [],
+ clean_dep("//tensorflow:no_aws_support"): [],
"//conditions:default": [
- "//tensorflow/core/platform/s3:s3_file_system",
+ clean_dep("//tensorflow/core/platform/s3:s3_file_system"),
],
})
def tf_lib_proto_parsing_deps():
return [
":protos_all_cc",
- "//third_party/eigen3",
- "//tensorflow/core/platform/default/build_config:proto_parsing",
+ clean_dep("//third_party/eigen3"),
+ clean_dep("//tensorflow/core/platform/default/build_config:proto_parsing"),
]
def tf_py_clif_cc(name, visibility = None, **kwargs):
@@ -660,24 +684,24 @@
native.filegroup(name = name + "_pb2")
def tf_additional_binary_deps():
- return ["@nsync//:nsync_cpp"] + if_cuda(
+ return [clean_dep("@nsync//:nsync_cpp")] + if_cuda(
[
- "//tensorflow/stream_executor:cuda_platform",
+ clean_dep("//tensorflow/stream_executor:cuda_platform"),
],
) + if_rocm(
[
- "//tensorflow/stream_executor:rocm_platform",
- "//tensorflow/core/platform/default/build_config:rocm",
+ clean_dep("//tensorflow/stream_executor:rocm_platform"),
+ clean_dep("//tensorflow/core/platform/default/build_config:rocm"),
],
) + [
# TODO(allenl): Split these out into their own shared objects (they are
# here because they are shared between contrib/ op shared objects and
# core).
- "//tensorflow/core/kernels:lookup_util",
- "//tensorflow/core/util/tensor_bundle",
+ clean_dep("//tensorflow/core/kernels:lookup_util"),
+ clean_dep("//tensorflow/core/util/tensor_bundle"),
] + if_mkl_ml(
[
- "//third_party/mkl:intel_binary_blob",
+ clean_dep("//third_party/mkl:intel_binary_blob"),
],
)
@@ -695,15 +719,15 @@
def tf_protobuf_deps():
return if_static(
[
- "@com_google_protobuf//:protobuf",
+ clean_dep("@com_google_protobuf//:protobuf"),
],
- otherwise = ["@com_google_protobuf//:protobuf_headers"],
+ otherwise = [clean_dep("@com_google_protobuf//:protobuf_headers")],
)
def tf_protobuf_compiler_deps():
return if_static(
[
- "@com_google_protobuf//:protobuf",
+ clean_dep("@com_google_protobuf//:protobuf"),
],
- otherwise = ["@com_google_protobuf//:protobuf_headers"],
+ otherwise = [clean_dep("@com_google_protobuf//:protobuf_headers")],
)
diff --git a/tensorflow/core/platform/default/env_time.cc b/tensorflow/core/platform/default/env_time.cc
index 78f3f74..8618fe8 100644
--- a/tensorflow/core/platform/default/env_time.cc
+++ b/tensorflow/core/platform/default/env_time.cc
@@ -20,25 +20,12 @@
namespace tensorflow {
-namespace {
-
-class PosixEnvTime : public EnvTime {
- public:
- PosixEnvTime() {}
-
- uint64 NowNanos() const override {
- struct timespec ts;
- clock_gettime(CLOCK_REALTIME, &ts);
- return (static_cast<uint64>(ts.tv_sec) * kSecondsToNanos +
- static_cast<uint64>(ts.tv_nsec));
- }
-};
-
-} // namespace
-
-EnvTime* EnvTime::Default() {
- static EnvTime* default_env_time = new PosixEnvTime;
- return default_env_time;
+/* static */
+uint64 EnvTime::NowNanos() {
+ struct timespec ts;
+ clock_gettime(CLOCK_REALTIME, &ts);
+ return (static_cast<uint64>(ts.tv_sec) * kSecondsToNanos +
+ static_cast<uint64>(ts.tv_nsec));
}
} // namespace tensorflow
diff --git a/tensorflow/core/platform/default/logging.cc b/tensorflow/core/platform/default/logging.cc
index ba1c32c..98ee225 100644
--- a/tensorflow/core/platform/default/logging.cc
+++ b/tensorflow/core/platform/default/logging.cc
@@ -242,8 +242,7 @@
void LogMessage::GenerateLogMessage() {
static bool log_thread_id = EmitThreadIdFromEnv();
- static EnvTime* env_time = tensorflow::EnvTime::Default();
- uint64 now_micros = env_time->NowMicros();
+ uint64 now_micros = EnvTime::NowMicros();
time_t now_seconds = static_cast<time_t>(now_micros / 1000000);
int32 micros_remainder = static_cast<int32>(now_micros % 1000000);
const size_t time_buffer_size = 30;
diff --git a/tensorflow/core/platform/env.h b/tensorflow/core/platform/env.h
index 932bffc..be8399c 100644
--- a/tensorflow/core/platform/env.h
+++ b/tensorflow/core/platform/env.h
@@ -270,13 +270,13 @@
// provide a routine to get the absolute time.
/// \brief Returns the number of nano-seconds since the Unix epoch.
- virtual uint64 NowNanos() const { return env_time_->NowNanos(); }
+ virtual uint64 NowNanos() const { return EnvTime::NowNanos(); }
/// \brief Returns the number of micro-seconds since the Unix epoch.
- virtual uint64 NowMicros() const { return env_time_->NowMicros(); }
+ virtual uint64 NowMicros() const { return EnvTime::NowMicros(); }
/// \brief Returns the number of seconds since the Unix epoch.
- virtual uint64 NowSeconds() const { return env_time_->NowSeconds(); }
+ virtual uint64 NowSeconds() const { return EnvTime::NowSeconds(); }
/// Sleeps/delays the thread for the prescribed number of micro-seconds.
virtual void SleepForMicroseconds(int64 micros) = 0;
@@ -346,7 +346,6 @@
private:
std::unique_ptr<FileSystemRegistry> file_system_registry_;
TF_DISALLOW_COPY_AND_ASSIGN(Env);
- EnvTime* env_time_ = EnvTime::Default();
};
/// \brief An implementation of Env that forwards all calls to another Env.
diff --git a/tensorflow/core/platform/env_time.h b/tensorflow/core/platform/env_time.h
index 1d879a0..c09c335 100644
--- a/tensorflow/core/platform/env_time.h
+++ b/tensorflow/core/platform/env_time.h
@@ -36,20 +36,27 @@
EnvTime() = default;
virtual ~EnvTime() = default;
- /// \brief Returns a default impl suitable for the current operating
- /// system.
- ///
- /// The result of Default() belongs to this library and must never be deleted.
- static EnvTime* Default();
-
/// \brief Returns the number of nano-seconds since the Unix epoch.
- virtual uint64 NowNanos() const = 0;
+ static uint64 NowNanos();
/// \brief Returns the number of micro-seconds since the Unix epoch.
- virtual uint64 NowMicros() const { return NowNanos() / kMicrosToNanos; }
+ static uint64 NowMicros() { return NowNanos() / kMicrosToNanos; }
/// \brief Returns the number of seconds since the Unix epoch.
- virtual uint64 NowSeconds() const { return NowNanos() / kSecondsToNanos; }
+ static uint64 NowSeconds() { return NowNanos() / kSecondsToNanos; }
+
+ /// \brief A version of NowNanos() that may be overridden by a subclass.
+ virtual uint64 GetOverridableNowNanos() const { return NowNanos(); }
+
+ /// \brief A version of NowMicros() that may be overridden by a subclass.
+ virtual uint64 GetOverridableNowMicros() const {
+ return GetOverridableNowNanos() / kMicrosToNanos;
+ }
+
+ /// \brief A version of NowSeconds() that may be overridden by a subclass.
+ virtual uint64 GetOverridableNowSeconds() const {
+ return GetOverridableNowNanos() / kSecondsToNanos;
+ }
};
} // namespace tensorflow
diff --git a/tensorflow/core/platform/port_test.cc b/tensorflow/core/platform/port_test.cc
index f9693d7..94a9e4d 100644
--- a/tensorflow/core/platform/port_test.cc
+++ b/tensorflow/core/platform/port_test.cc
@@ -83,9 +83,9 @@
mutex m;
m.lock();
time_t start = time(nullptr);
- bool result = m.AwaitWithDeadline(
- Condition(&always_false),
- EnvTime::Default()->NowNanos() + 3 * EnvTime::kSecondsToNanos);
+ bool result =
+ m.AwaitWithDeadline(Condition(&always_false),
+ EnvTime::NowNanos() + 3 * EnvTime::kSecondsToNanos);
time_t finish = time(nullptr);
m.unlock();
EXPECT_EQ(result, false);
@@ -107,9 +107,8 @@
woken = true;
m.unlock();
});
- bool result =
- m.AwaitWithDeadline(Condition(&woken), EnvTime::Default()->NowNanos() +
- 3 * EnvTime::kSecondsToNanos);
+ bool result = m.AwaitWithDeadline(
+ Condition(&woken), EnvTime::NowNanos() + 3 * EnvTime::kSecondsToNanos);
time_t finish = time(nullptr);
m.unlock();
EXPECT_EQ(result, true);
diff --git a/tensorflow/core/platform/windows/env_time.cc b/tensorflow/core/platform/windows/env_time.cc
index f6d77dc..a08216e 100644
--- a/tensorflow/core/platform/windows/env_time.cc
+++ b/tensorflow/core/platform/windows/env_time.cc
@@ -26,62 +26,46 @@
namespace tensorflow {
namespace {
+typedef VOID(WINAPI* FnGetSystemTimePreciseAsFileTime)(LPFILETIME);
+}
-class WindowsEnvTime : public EnvTime {
- public:
- WindowsEnvTime() : GetSystemTimePreciseAsFileTime_(NULL) {
- // GetSystemTimePreciseAsFileTime function is only available in the latest
- // versions of Windows. For that reason, we try to look it up in
- // kernel32.dll at runtime and use an alternative option if the function
- // is not available.
+uint64 EnvTime::NowNanos() {
+ static FnGetSystemTimePreciseAsFileTime precise_time_function =
+ []() -> FnGetSystemTimePreciseAsFileTime {
HMODULE module = GetModuleHandleW(L"kernel32.dll");
if (module != NULL) {
- auto func = (FnGetSystemTimePreciseAsFileTime)GetProcAddress(
+ return (FnGetSystemTimePreciseAsFileTime)GetProcAddress(
module, "GetSystemTimePreciseAsFileTime");
- GetSystemTimePreciseAsFileTime_ = func;
+ } else {
+ return NULL;
}
+ }();
+
+ if (precise_time_function != NULL) {
+ // GetSystemTimePreciseAsFileTime function is only available in latest
+ // versions of Windows, so we need to check for its existence here.
+ // All std::chrono clocks on Windows proved to return values that may
+ // repeat, which is not good enough for some uses.
+ constexpr int64_t kUnixEpochStartTicks = 116444736000000000i64;
+
+ // This interface needs to return system time and not just any time
+ // because it is often used as an argument to TimedWait() on condition
+ // variable.
+ FILETIME system_time;
+ precise_time_function(&system_time);
+
+ LARGE_INTEGER li;
+ li.LowPart = system_time.dwLowDateTime;
+ li.HighPart = system_time.dwHighDateTime;
+ // Subtract unix epoch start
+ li.QuadPart -= kUnixEpochStartTicks;
+
+ constexpr int64_t kFtToNanoSec = 100;
+ li.QuadPart *= kFtToNanoSec;
+ return li.QuadPart;
}
-
- uint64 NowNanos() const override {
- if (GetSystemTimePreciseAsFileTime_ != NULL) {
- // GetSystemTimePreciseAsFileTime function is only available in latest
- // versions of Windows, so we need to check for its existence here.
- // All std::chrono clocks on Windows proved to return values that may
- // repeat, which is not good enough for some uses.
- constexpr int64_t kUnixEpochStartTicks = 116444736000000000i64;
-
- // This interface needs to return system time and not just any time
- // because it is often used as an argument to TimedWait() on condition
- // variable.
- FILETIME system_time;
- GetSystemTimePreciseAsFileTime_(&system_time);
-
- LARGE_INTEGER li;
- li.LowPart = system_time.dwLowDateTime;
- li.HighPart = system_time.dwHighDateTime;
- // Subtract unix epoch start
- li.QuadPart -= kUnixEpochStartTicks;
-
- constexpr int64_t kFtToNanoSec = 100;
- li.QuadPart *= kFtToNanoSec;
- return li.QuadPart;
- }
- return duration_cast<nanoseconds>(system_clock::now().time_since_epoch())
- .count();
- }
-
- void SleepForMicroseconds(int64 micros) { Sleep(micros / 1000); }
-
- private:
- typedef VOID(WINAPI* FnGetSystemTimePreciseAsFileTime)(LPFILETIME);
- FnGetSystemTimePreciseAsFileTime GetSystemTimePreciseAsFileTime_;
-};
-
-} // namespace
-
-EnvTime* EnvTime::Default() {
- static EnvTime* default_time_env = new WindowsEnvTime;
- return default_time_env;
+ return duration_cast<nanoseconds>(system_clock::now().time_since_epoch())
+ .count();
}
} // namespace tensorflow
diff --git a/tensorflow/core/profiler/convert/BUILD b/tensorflow/core/profiler/convert/BUILD
index a423eab..51af083 100644
--- a/tensorflow/core/profiler/convert/BUILD
+++ b/tensorflow/core/profiler/convert/BUILD
@@ -14,3 +14,19 @@
"@com_google_absl//absl/algorithm:container",
],
)
+
+cc_library(
+ name = "op_stats_to_tf_stats",
+ srcs = ["op_stats_to_tf_stats.cc"],
+ hdrs = ["op_stats_to_tf_stats.h"],
+ deps = [
+ "//tensorflow/core/profiler/convert:op_metrics_to_record",
+ "//tensorflow/core/profiler/protobuf:op_metrics_proto_cc",
+ "//tensorflow/core/profiler/protobuf:op_stats_proto_cc",
+ "//tensorflow/core/profiler/protobuf:tf_stats_proto_cc",
+ "//tensorflow/core/profiler/utils:op_metrics_db_utils",
+ "//tensorflow/core/profiler/utils:tf_op_utils",
+ "//tensorflow/core/profiler/utils:time_utils",
+ "@com_google_absl//absl/container:flat_hash_set",
+ ],
+)
diff --git a/tensorflow/core/profiler/convert/op_stats_to_tf_stats.cc b/tensorflow/core/profiler/convert/op_stats_to_tf_stats.cc
new file mode 100644
index 0000000..08e73b2
--- /dev/null
+++ b/tensorflow/core/profiler/convert/op_stats_to_tf_stats.cc
@@ -0,0 +1,104 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+#include "tensorflow/core/profiler/convert/op_stats_to_tf_stats.h"
+
+#include "absl/container/flat_hash_set.h"
+#include "tensorflow/core/profiler/convert/op_metrics_to_record.h"
+#include "tensorflow/core/profiler/protobuf/op_metrics.pb.h"
+#include "tensorflow/core/profiler/protobuf/op_stats.pb.h"
+#include "tensorflow/core/profiler/protobuf/tf_stats.pb.h"
+#include "tensorflow/core/profiler/utils/op_metrics_db_utils.h"
+#include "tensorflow/core/profiler/utils/tf_op_utils.h"
+#include "tensorflow/core/profiler/utils/time_utils.h"
+
+namespace tensorflow {
+namespace profiler {
+namespace {
+
+TfStatsRecord ConvertOpMetricsToTfStatsRecord(
+ bool on_device, const OpMetrics& metrics,
+ double ridge_point_operational_intensity) {
+ TfStatsRecord record;
+ record.set_host_or_device(on_device ? "Device" : "Host");
+ record.set_op_type(metrics.category());
+ record.set_op_name(metrics.name());
+ SetExecutionTimes(metrics, &record);
+ SetRooflineMetrics(metrics, ridge_point_operational_intensity, &record);
+ return record;
+}
+
+TfStatsTable GenerateTfStatsTable(const OpMetricsDb& host_tf_metrics_db,
+ const OpMetricsDb& device_tf_metrics_db,
+ double ridge_point, bool exclude_idle) {
+ TfStatsTable tf_stats_table;
+ TfStatsRecord sentinel;
+ sentinel.set_rank(0);
+ sentinel.set_device_cumulative_total_self_time_as_fraction(0.0);
+ sentinel.set_host_cumulative_total_self_time_as_fraction(0.0);
+ const TfStatsRecord* prev_record = &sentinel;
+
+ // Sets device-side TF stats.
+ uint64 total_device_time_ps = device_tf_metrics_db.total_time_ps();
+ if (exclude_idle) {
+ total_device_time_ps -= IdleTimePs(device_tf_metrics_db);
+ }
+ double total_device_time_us = PicosToMicros(total_device_time_ps);
+ for (const OpMetrics* metrics : SortedOpMetricsDb(device_tf_metrics_db)) {
+ if (exclude_idle && metrics->category() == "IDLE") continue;
+ TfStatsRecord* record = tf_stats_table.add_tf_stats_record();
+ *record = ConvertOpMetricsToTfStatsRecord(
+ /*on_device=*/true, *metrics, ridge_point);
+ SetRankAndDeviceTimeFractions(total_device_time_us, *prev_record, record);
+ prev_record = record;
+ }
+
+ // Sets host-side TF stats.
+ uint64 total_host_time_ps = host_tf_metrics_db.total_time_ps();
+ if (exclude_idle) {
+ total_host_time_ps -= IdleTimePs(host_tf_metrics_db);
+ }
+ double total_host_time_us = PicosToMicros(total_host_time_ps);
+ for (const OpMetrics* metrics :
+ tensorflow::profiler::SortedOpMetricsDb(host_tf_metrics_db)) {
+ if (exclude_idle && metrics->category() == "IDLE") continue;
+ TfStatsRecord* record = tf_stats_table.add_tf_stats_record();
+ *record = ConvertOpMetricsToTfStatsRecord(
+ /*on_device=*/false, *metrics, ridge_point);
+ SetRankAndHostTimeFractions(total_host_time_us, *prev_record, record);
+ prev_record = record;
+ }
+ return tf_stats_table;
+}
+
+} // namespace
+
+TfStatsDatabase ConvertOpStatsToTfStats(const OpStats& op_stats) {
+ const OpMetricsDb& host_tf_metrics_db = op_stats.host_op_metrics_db();
+ OpMetricsDb device_tf_metrics_db =
+ CreateTfMetricsDbFromHloMetricsDb(op_stats.device_op_metrics_db());
+ double ridge_point = op_stats.perf_env().ridge_point();
+ TfStatsDatabase tf_stats_db;
+ *tf_stats_db.mutable_with_idle() =
+ GenerateTfStatsTable(host_tf_metrics_db, device_tf_metrics_db,
+ ridge_point, /*exclude_idle=*/false);
+ *tf_stats_db.mutable_without_idle() =
+ GenerateTfStatsTable(host_tf_metrics_db, device_tf_metrics_db,
+ ridge_point, /*exclude_idle=*/true);
+ return tf_stats_db;
+}
+
+} // namespace profiler
+} // namespace tensorflow
diff --git a/tensorflow/core/profiler/convert/op_stats_to_tf_stats.h b/tensorflow/core/profiler/convert/op_stats_to_tf_stats.h
new file mode 100644
index 0000000..3b8a06e
--- /dev/null
+++ b/tensorflow/core/profiler/convert/op_stats_to_tf_stats.h
@@ -0,0 +1,30 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+#ifndef TENSORFLOW_CORE_PROFILER_CONVERT_OP_STATS_TO_TF_STATS_H_
+#define TENSORFLOW_CORE_PROFILER_CONVERT_OP_STATS_TO_TF_STATS_H_
+
+#include "tensorflow/core/profiler/protobuf/op_stats.pb.h"
+#include "tensorflow/core/profiler/protobuf/tf_stats.pb.h"
+
+namespace tensorflow {
+namespace profiler {
+
+TfStatsDatabase ConvertOpStatsToTfStats(const OpStats& op_stats);
+
+} // namespace profiler
+} // namespace tensorflow
+
+#endif // TENSORFLOW_CORE_PROFILER_CONVERT_OP_STATS_TO_TF_STATS_H_
diff --git a/tensorflow/core/profiler/internal/gpu/cupti_tracer.cc b/tensorflow/core/profiler/internal/gpu/cupti_tracer.cc
index 68d2444..34d6427 100644
--- a/tensorflow/core/profiler/internal/gpu/cupti_tracer.cc
+++ b/tensorflow/core/profiler/internal/gpu/cupti_tracer.cc
@@ -410,7 +410,6 @@
}
void AddKernelActivityEvent(CuptiTraceCollector *collector,
- AnnotationMap *annotation_map,
const CUpti_ActivityKernel4 *kernel) {
CuptiTracerEvent event;
event.type = CuptiTracerEventType::Kernel;
@@ -422,8 +421,8 @@
event.context_id = kernel->contextId;
event.stream_id = kernel->streamId;
event.correlation_id = kernel->correlationId;
- event.annotation =
- annotation_map->LookUp(event.device_id, event.correlation_id);
+ event.annotation = collector->annotation_map()->LookUp(event.device_id,
+ event.correlation_id);
event.kernel_info.registers_per_thread = kernel->registersPerThread;
event.kernel_info.static_shared_memory_usage = kernel->staticSharedMemory;
event.kernel_info.dynamic_shared_memory_usage = kernel->dynamicSharedMemory;
@@ -437,7 +436,6 @@
}
void AddMemcpyActivityEvent(CuptiTraceCollector *collector,
- AnnotationMap *annotation_map,
const CUpti_ActivityMemcpy *memcpy) {
CuptiTracerEvent event;
switch (memcpy->copyKind) {
@@ -469,8 +467,8 @@
event.context_id = memcpy->contextId;
event.stream_id = memcpy->streamId;
event.correlation_id = memcpy->correlationId;
- event.annotation =
- annotation_map->LookUp(event.device_id, event.correlation_id);
+ event.annotation = collector->annotation_map()->LookUp(event.device_id,
+ event.correlation_id);
event.memcpy_info.kind = memcpy->copyKind;
event.memcpy_info.num_bytes = memcpy->bytes;
event.memcpy_info.destination = memcpy->deviceId;
@@ -480,7 +478,6 @@
// Invokes callback upon peer-2-peer memcpy between different GPU devices.
void AddMemcpy2ActivityEvent(CuptiTraceCollector *collector,
- AnnotationMap *annotation_map,
const CUpti_ActivityMemcpy2 *memcpy2) {
CuptiTracerEvent event;
event.type = CuptiTracerEventType::MemcpyP2P;
@@ -492,8 +489,8 @@
event.context_id = memcpy2->contextId;
event.stream_id = memcpy2->streamId;
event.correlation_id = memcpy2->correlationId;
- event.annotation =
- annotation_map->LookUp(event.device_id, event.correlation_id);
+ event.annotation = collector->annotation_map()->LookUp(event.device_id,
+ event.correlation_id);
event.memcpy_info.kind = CUPTI_ACTIVITY_MEMCPY_KIND_PTOP;
event.memcpy_info.num_bytes = memcpy2->bytes;
event.memcpy_info.destination = memcpy2->dstDeviceId;
@@ -587,12 +584,10 @@
public:
CuptiDriverApiHookWithActivityApi(const CuptiTracerOptions &option,
CuptiInterface *cupti_interface,
- CuptiTraceCollector *collector,
- AnnotationMap *annotation_map)
+ CuptiTraceCollector *collector)
: option_(option),
cupti_interface_(cupti_interface),
- collector_(collector),
- annotation_map_(annotation_map) {}
+ collector_(collector) {}
Status OnDriverApiEnter(int device_id, CUpti_CallbackDomain domain,
CUpti_CallbackId cbid,
@@ -622,7 +617,6 @@
const CuptiTracerOptions option_;
CuptiInterface *cupti_interface_;
CuptiTraceCollector *collector_;
- AnnotationMap *annotation_map_;
TF_DISALLOW_COPY_AND_ASSIGN(CuptiDriverApiHookWithActivityApi);
};
@@ -983,11 +977,9 @@
public:
CuptiDriverApiHookWithCudaEvent(const CuptiTracerOptions &option,
CuptiInterface *cupti_interface,
- CuptiTraceCollector *collector,
- AnnotationMap *annotation_map)
+ CuptiTraceCollector *collector)
: option_(option),
cupti_interface_(cupti_interface),
- annotation_map_(annotation_map),
collector_(collector) {
int num_gpus = CuptiTracer::NumGpus();
cuda_event_recorders_.reserve(num_gpus);
@@ -1040,7 +1032,8 @@
if (!dev_id) return errors::Internal("Invalid CUDA stream");
// Because annotation are per device, therefore we need to populate
// annotation for each device invovled.
- annotation_map_->Add(*dev_id, cbdata->correlationId, annotation);
+ collector_->annotation_map()->Add(*dev_id, cbdata->correlationId,
+ annotation);
record_indices.push_back(
cuda_event_recorders_[*dev_id]->StartKernel<CUDA_LAUNCH_PARAMS>(
"CooperativeKernelMultiDevice", *context,
@@ -1162,7 +1155,7 @@
TF_RETURN_IF_ERROR(recorder->Stop());
}
for (auto &recorder : cuda_event_recorders_) {
- TF_RETURN_IF_ERROR(recorder->Flush(annotation_map_));
+ TF_RETURN_IF_ERROR(recorder->Flush(collector_->annotation_map()));
}
return Status::OK();
}
@@ -1230,7 +1223,6 @@
const CuptiTracerOptions option_;
CuptiInterface *cupti_interface_;
- AnnotationMap *annotation_map_;
CuptiTraceCollector *collector_;
std::set<CuptiApiCallbackContext *> callback_contexts_;
std::vector<std::unique_ptr<CudaEventRecorder>> cuda_event_recorders_;
@@ -1372,15 +1364,13 @@
CuptiTraceCollector *collector) {
option_ = option;
collector_ = collector;
- annotation_map_.emplace(option.max_annotation_strings, NumGpus());
-
if (option_->enable_event_based_activity) {
option_->enable_activity_api = false;
cupti_driver_api_hook_.reset(new CuptiDriverApiHookWithCudaEvent(
- option, cupti_interface_, collector, &*annotation_map_));
+ option, cupti_interface_, collector));
} else {
cupti_driver_api_hook_.reset(new CuptiDriverApiHookWithActivityApi(
- option, cupti_interface_, collector, &*annotation_map_));
+ option, cupti_interface_, collector));
}
EnableApiTracing().IgnoreError();
@@ -1401,7 +1391,6 @@
collector_ = nullptr;
option_.reset();
cupti_driver_api_hook_.reset();
- annotation_map_.reset();
}
Status CuptiTracer::EnableApiTracing() {
@@ -1534,7 +1523,8 @@
// Set up the map from correlation id to annotation string.
const auto &annotation = AnnotationStack::Get();
if (!annotation.empty()) {
- annotation_map_->Add(device_id, cbdata->correlationId, annotation);
+ collector_->annotation_map()->Add(device_id, cbdata->correlationId,
+ annotation);
}
TF_RETURN_IF_ERROR(cupti_driver_api_hook_->OnDriverApiExit(
@@ -1593,18 +1583,15 @@
case CUPTI_ACTIVITY_KIND_KERNEL: // sequential
case CUPTI_ACTIVITY_KIND_CONCURRENT_KERNEL:
AddKernelActivityEvent(
- collector_, &*annotation_map_,
- reinterpret_cast<CUpti_ActivityKernel4 *>(record));
+ collector_, reinterpret_cast<CUpti_ActivityKernel4 *>(record));
break;
case CUPTI_ACTIVITY_KIND_MEMCPY:
AddMemcpyActivityEvent(
- collector_, &*annotation_map_,
- reinterpret_cast<CUpti_ActivityMemcpy *>(record));
+ collector_, reinterpret_cast<CUpti_ActivityMemcpy *>(record));
break;
case CUPTI_ACTIVITY_KIND_MEMCPY2:
AddMemcpy2ActivityEvent(
- collector_, &*annotation_map_,
- reinterpret_cast<CUpti_ActivityMemcpy2 *>(record));
+ collector_, reinterpret_cast<CUpti_ActivityMemcpy2 *>(record));
break;
case CUPTI_ACTIVITY_KIND_OVERHEAD:
AddCuptiOverheadActivityEvent(
diff --git a/tensorflow/core/profiler/internal/gpu/cupti_tracer.h b/tensorflow/core/profiler/internal/gpu/cupti_tracer.h
index 23d0e5c..bcfe1c2 100644
--- a/tensorflow/core/profiler/internal/gpu/cupti_tracer.h
+++ b/tensorflow/core/profiler/internal/gpu/cupti_tracer.h
@@ -132,8 +132,6 @@
bool enable_event_based_activity = false;
bool required_callback_api_events = true;
- // Maximum number of annotation strings that we can accommodate.
- uint64 max_annotation_strings = 1024 * 1024;
// The callback ids that will be enabled and monitored, if empty, all
// Callback ids to be enabled using Callback API.
// We only care CUPTI_CB_DOMAIN_DRIVER_API domain for now. It is kind of
@@ -154,24 +152,10 @@
uint64 max_callback_api_events = 2 * 1024 * 1024;
// Maximum number of events to collect from activity API; if -1, no limit.
uint64 max_activity_api_events = 2 * 1024 * 1024;
-};
-
-class CuptiTraceCollector {
- public:
- explicit CuptiTraceCollector(const CuptiTracerCollectorOptions& options)
- : options_(options) {}
- virtual ~CuptiTraceCollector() {}
-
- virtual void AddEvent(CuptiTracerEvent&& event) = 0;
- virtual void OnEventsDropped(const std::string& reason,
- uint32 num_events) = 0;
- virtual void Flush() = 0;
-
- protected:
- CuptiTracerCollectorOptions options_;
-
- private:
- TF_DISALLOW_COPY_AND_ASSIGN(CuptiTraceCollector);
+ // Maximum number of annotation strings that we can accommodate.
+ uint64 max_annotation_strings = 1024 * 1024;
+ // Number of GPUs involved.
+ uint32 num_gpus;
};
class AnnotationMap {
@@ -198,6 +182,29 @@
TF_DISALLOW_COPY_AND_ASSIGN(AnnotationMap);
};
+class CuptiTraceCollector {
+ public:
+ explicit CuptiTraceCollector(const CuptiTracerCollectorOptions& options)
+ : options_(options),
+ annotation_map_(options.max_annotation_strings, options.num_gpus) {}
+ virtual ~CuptiTraceCollector() {}
+
+ virtual void AddEvent(CuptiTracerEvent&& event) = 0;
+ virtual void OnEventsDropped(const std::string& reason,
+ uint32 num_events) = 0;
+ virtual void Flush() = 0;
+
+ AnnotationMap* annotation_map() { return &annotation_map_; }
+
+ protected:
+ CuptiTracerCollectorOptions options_;
+
+ private:
+ AnnotationMap annotation_map_;
+
+ TF_DISALLOW_COPY_AND_ASSIGN(CuptiTraceCollector);
+};
+
class CuptiDriverApiHook {
public:
virtual ~CuptiDriverApiHook() {}
@@ -259,7 +266,6 @@
absl::optional<CuptiTracerOptions> option_;
CuptiInterface* cupti_interface_ = nullptr;
CuptiTraceCollector* collector_ = nullptr;
- absl::optional<AnnotationMap> annotation_map_;
bool api_tracing_enabled_ = false;
// Cupti handle for driver or runtime API callbacks. Cupti permits a single
diff --git a/tensorflow/core/profiler/internal/gpu/device_tracer.cc b/tensorflow/core/profiler/internal/gpu/device_tracer.cc
index 118e2b8..1388fab 100644
--- a/tensorflow/core/profiler/internal/gpu/device_tracer.cc
+++ b/tensorflow/core/profiler/internal/gpu/device_tracer.cc
@@ -45,26 +45,14 @@
class StepStatsCuptiTracerAdaptor : public CuptiTraceCollector {
public:
StepStatsCuptiTracerAdaptor(const CuptiTracerCollectorOptions& option,
- const std::string prefix, int num_gpus,
- uint64 start_walltime_ns, uint64 start_gpu_ns,
- StepStatsCollector* trace_collector)
+ uint64 start_walltime_ns, uint64 start_gpu_ns)
: CuptiTraceCollector(option),
- trace_collector_(trace_collector),
num_callback_events_(0),
num_activity_events_(0),
start_walltime_ns_(start_walltime_ns),
start_gpu_ns_(start_gpu_ns),
- num_gpus_(num_gpus),
- per_device_adaptor_(num_gpus) {
- for (int i = 0; i < num_gpus; ++i) { // for each device id.
- per_device_adaptor_[i].stream_device =
- strings::StrCat(prefix, "/device:GPU:", i, "/stream:");
- per_device_adaptor_[i].memcpy_device =
- strings::StrCat(prefix, "/device:GPU:", i, "/memcpy");
- per_device_adaptor_[i].sync_device =
- strings::StrCat(prefix, "/device:GPU:", i, "/sync");
- }
- }
+ num_gpus_(option.num_gpus),
+ per_device_adaptor_(option.num_gpus) {}
void AddEvent(CuptiTracerEvent&& event) override {
if (event.device_id >= num_gpus_) return;
@@ -84,18 +72,18 @@
per_device_adaptor_[event.device_id].AddEvent(std::move(event));
}
void OnEventsDropped(const std::string& reason, uint32 num_events) override {}
- void Flush() override {
+ void Flush() override {}
+ void Export(StepStatsCollector* trace_collector) {
LOG(INFO) << " GpuTracer has collected " << num_callback_events_
<< " callback api events and " << num_activity_events_
<< " activity events.";
for (int i = 0; i < num_gpus_; ++i) {
- per_device_adaptor_[i].Flush(trace_collector_, start_walltime_ns_,
+ per_device_adaptor_[i].Flush(trace_collector, i, start_walltime_ns_,
start_gpu_ns_);
}
}
private:
- StepStatsCollector* trace_collector_;
std::atomic<int> num_callback_events_;
std::atomic<int> num_activity_events_;
uint64 start_walltime_ns_;
@@ -125,9 +113,12 @@
events.emplace_back(std::move(event));
}
}
- void Flush(StepStatsCollector* collector, uint64 start_walltime_ns,
- uint64 start_gpu_ns) {
+ void Flush(StepStatsCollector* collector, int32 device_ordinal,
+ uint64 start_walltime_ns, uint64 start_gpu_ns) {
absl::MutexLock lock(&mutex);
+ stream_device = absl::StrCat("/device:GPU:", device_ordinal, "/stream:");
+ memcpy_device = absl::StrCat("/device:GPU:", device_ordinal, "/memcpy");
+ sync_device = absl::StrCat("/device:GPU:", device_ordinal, "/sync");
for (auto& event : events) {
NodeExecStats* ns = new NodeExecStats;
ns->set_all_start_micros(
@@ -224,7 +215,7 @@
class GpuTracer : public profiler::ProfilerInterface {
public:
GpuTracer(CuptiTracer* cupti_tracer, CuptiInterface* cupti_interface)
- : cupti_tracer_(cupti_tracer), trace_collector_(&step_stats_) {
+ : cupti_tracer_(cupti_tracer) {
VLOG(1) << "GpuTracer created.";
}
~GpuTracer() override {}
@@ -254,7 +245,6 @@
CuptiTracer* cupti_tracer_;
CuptiTracerOptions options_;
StepStats step_stats_;
- StepStatsCollector trace_collector_;
std::unique_ptr<StepStatsCuptiTracerAdaptor> step_stats_cupti_adaptor_;
};
@@ -313,12 +303,11 @@
#endif
CuptiTracerCollectorOptions collector_options;
+ collector_options.num_gpus = cupti_tracer_->NumGpus();
uint64 start_gputime_ns = CuptiTracer::GetTimestamp();
- uint64 start_walltime_ns = tensorflow::EnvTime::Default()->NowNanos();
- int num_gpus = cupti_tracer_->NumGpus();
+ uint64 start_walltime_ns = tensorflow::EnvTime::NowNanos();
step_stats_cupti_adaptor_ = absl::make_unique<StepStatsCuptiTracerAdaptor>(
- collector_options, "", num_gpus, start_walltime_ns, start_gputime_ns,
- &trace_collector_);
+ collector_options, start_walltime_ns, start_gputime_ns);
AnnotationStack::Enable(true);
cupti_tracer_->Enable(options_, step_stats_cupti_adaptor_.get());
@@ -365,7 +354,11 @@
return Status::OK();
case State::kStoppedOk: {
// Input run_metadata is shared by profiler interfaces, we need append.
- trace_collector_.Finalize();
+ StepStatsCollector trace_collector(&step_stats_);
+ if (step_stats_cupti_adaptor_) {
+ step_stats_cupti_adaptor_->Export(&trace_collector);
+ }
+ trace_collector.Finalize();
for (auto& dev_stats : *step_stats_.mutable_dev_stats()) {
run_metadata->mutable_step_stats()->add_dev_stats()->Swap(&dev_stats);
}
diff --git a/tensorflow/core/profiler/lib/traceme.cc b/tensorflow/core/profiler/lib/traceme.cc
index 7d02cfa..a267f24 100644
--- a/tensorflow/core/profiler/lib/traceme.cc
+++ b/tensorflow/core/profiler/lib/traceme.cc
@@ -32,14 +32,14 @@
absl::string_view activity_name) {
uint64 activity_id = NewActivityId();
TraceMeRecorder::Record({activity_id, string(activity_name),
- /*start_time=*/EnvTime::Default()->NowNanos(),
+ /*start_time=*/EnvTime::NowNanos(),
/*end_time=*/0});
return activity_id;
}
/* static */ void TraceMe::ActivityEndImpl(uint64 activity_id) {
TraceMeRecorder::Record({activity_id, /*name=*/"", /*start_time=*/0,
- /*end_time=*/EnvTime::Default()->NowNanos()});
+ /*end_time=*/EnvTime::NowNanos()});
}
} // namespace profiler
diff --git a/tensorflow/core/profiler/lib/traceme.h b/tensorflow/core/profiler/lib/traceme.h
index 4721795..1573a06 100644
--- a/tensorflow/core/profiler/lib/traceme.h
+++ b/tensorflow/core/profiler/lib/traceme.h
@@ -82,7 +82,7 @@
DCHECK_GE(level, 1);
if (TF_PREDICT_FALSE(TraceMeRecorder::Active(level))) {
new (&no_init_.name) string(activity_name);
- start_time_ = EnvTime::Default()->NowNanos();
+ start_time_ = EnvTime::NowNanos();
} else {
start_time_ = kUntracedActivity;
}
@@ -97,7 +97,7 @@
DCHECK_GE(level, 1);
if (TF_PREDICT_FALSE(TraceMeRecorder::Active(level))) {
new (&no_init_.name) string(std::move(activity_name));
- start_time_ = EnvTime::Default()->NowNanos();
+ start_time_ = EnvTime::NowNanos();
} else {
start_time_ = kUntracedActivity;
}
@@ -127,7 +127,7 @@
DCHECK_GE(level, 1);
if (TF_PREDICT_FALSE(TraceMeRecorder::Active(level))) {
new (&no_init_.name) string(name_generator());
- start_time_ = EnvTime::Default()->NowNanos();
+ start_time_ = EnvTime::NowNanos();
} else {
start_time_ = kUntracedActivity;
}
@@ -148,7 +148,7 @@
if (TF_PREDICT_FALSE(start_time_ != kUntracedActivity)) {
if (TF_PREDICT_TRUE(TraceMeRecorder::Active())) {
TraceMeRecorder::Record({kCompleteActivity, std::move(no_init_.name),
- start_time_, EnvTime::Default()->NowNanos()});
+ start_time_, EnvTime::NowNanos()});
}
no_init_.name.~string();
start_time_ = kUntracedActivity;
diff --git a/tensorflow/core/profiler/utils/BUILD b/tensorflow/core/profiler/utils/BUILD
index 189dfb1..edba7b3 100644
--- a/tensorflow/core/profiler/utils/BUILD
+++ b/tensorflow/core/profiler/utils/BUILD
@@ -30,6 +30,20 @@
)
cc_library(
+ name = "op_metrics_db_utils",
+ srcs = ["op_metrics_db_utils.cc"],
+ hdrs = ["op_metrics_db_utils.h"],
+ deps = [
+ ":math_utils",
+ ":tf_op_utils",
+ "//tensorflow/core:tflite_portable_logging",
+ "//tensorflow/core/profiler/protobuf:op_metrics_proto_cc",
+ "@com_google_absl//absl/container:flat_hash_map",
+ "@com_google_absl//absl/strings",
+ ],
+)
+
+cc_library(
name = "tf_op_utils",
srcs = ["tf_op_utils.cc"],
hdrs = ["tf_op_utils.h"],
diff --git a/tensorflow/core/profiler/utils/op_metrics_db_utils.cc b/tensorflow/core/profiler/utils/op_metrics_db_utils.cc
new file mode 100644
index 0000000..47bc798
--- /dev/null
+++ b/tensorflow/core/profiler/utils/op_metrics_db_utils.cc
@@ -0,0 +1,111 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+#include "tensorflow/core/profiler/utils/op_metrics_db_utils.h"
+
+#include "absl/strings/string_view.h"
+#include "tensorflow/core/platform/logging.h"
+#include "tensorflow/core/profiler/protobuf/op_metrics.pb.h"
+#include "tensorflow/core/profiler/utils/math_utils.h"
+#include "tensorflow/core/profiler/utils/tf_op_utils.h"
+
+namespace tensorflow {
+namespace profiler {
+namespace {
+
+class DeviceTfOpMetricsDbBuilder : public OpMetricsDbBuilder {
+ public:
+ explicit DeviceTfOpMetricsDbBuilder(OpMetricsDb* db)
+ : OpMetricsDbBuilder(db) {}
+
+ void UpdateTfOpMetricsWithHloOpMetrics(absl::string_view tf_op_name,
+ absl::string_view tf_op_type,
+ const OpMetrics& hlo_op_metrics) {
+ OpMetrics* tf_op_metrics = OpMetricsDbBuilder::LookupOrInsertNewOpMetrics(
+ /*hlo_module_id=*/0, tf_op_name);
+ if (tf_op_metrics->category().empty())
+ tf_op_metrics->set_category(tf_op_type.data(), tf_op_type.size());
+ // The occurrences of a TF-op is the maximum among the occurrences of all
+ // HLO-ops that it contains.
+ tf_op_metrics->set_occurrences(
+ std::max(tf_op_metrics->occurrences(), hlo_op_metrics.occurrences()));
+ tf_op_metrics->set_time_ps(tf_op_metrics->time_ps() +
+ hlo_op_metrics.time_ps());
+ tf_op_metrics->set_self_time_ps(tf_op_metrics->self_time_ps() +
+ hlo_op_metrics.self_time_ps());
+ tf_op_metrics->set_flops(tf_op_metrics->flops() + hlo_op_metrics.flops());
+ tf_op_metrics->set_bytes_accessed(tf_op_metrics->bytes_accessed() +
+ hlo_op_metrics.bytes_accessed());
+ }
+};
+
+} // namespace
+
+OpMetricsDbBuilder::OpMetricsDbBuilder(OpMetricsDb* db)
+ : db_(db) {
+ DCHECK_NE(db_, nullptr);
+ DCHECK_EQ(db_->metrics_db_size(), 0);
+}
+
+OpMetrics* OpMetricsDbBuilder::LookupOrInsertNewOpMetrics(
+ uint64 hlo_module_id, absl::string_view name) {
+ OpMetrics*& op_metrics = op_metrics_map_[hlo_module_id][name];
+ if (op_metrics == nullptr) {
+ op_metrics = db_->add_metrics_db();
+ op_metrics->set_hlo_module_id(hlo_module_id);
+ op_metrics->set_name(name.data(), name.size());
+ }
+ return op_metrics;
+}
+
+double IdleTimeRatio(const OpMetricsDb& metrics_db) {
+ return 1.0 -
+ SafeDivide(metrics_db.total_op_time_ps(), metrics_db.total_time_ps());
+}
+
+uint64 IdleTimePs(const OpMetricsDb& metrics_db) {
+ return metrics_db.total_time_ps() - metrics_db.total_op_time_ps();
+}
+
+void AddIdleOp(OpMetricsDb* db) {
+ uint64 idle_time_ps = IdleTimePs(*db);
+ OpMetrics* metrics = db->add_metrics_db();
+ metrics->set_name("IDLE");
+ metrics->set_category("IDLE");
+ metrics->set_occurrences(1);
+ metrics->set_time_ps(idle_time_ps);
+ metrics->set_self_time_ps(idle_time_ps);
+}
+
+OpMetricsDb CreateTfMetricsDbFromHloMetricsDb(
+ const OpMetricsDb& hlo_metrics_db) {
+ OpMetricsDb tf_op_metrics_db;
+ DeviceTfOpMetricsDbBuilder builder(&tf_op_metrics_db);
+ for (const auto& hlo_op_metrics : hlo_metrics_db.metrics_db()) {
+ if (!hlo_op_metrics.provenance().empty()) {
+ TfOp tf_op = ParseTfOpFullname(hlo_op_metrics.provenance());
+ builder.UpdateTfOpMetricsWithHloOpMetrics(tf_op.name, tf_op.type,
+ hlo_op_metrics);
+ } else {
+ DCHECK_EQ(hlo_op_metrics.name(), "IDLE");
+ builder.UpdateTfOpMetricsWithHloOpMetrics("IDLE", "IDLE", hlo_op_metrics);
+ }
+ }
+ tf_op_metrics_db.set_total_op_time_ps(hlo_metrics_db.total_op_time_ps());
+ tf_op_metrics_db.set_total_time_ps(hlo_metrics_db.total_time_ps());
+ return tf_op_metrics_db;
+}
+} // namespace profiler
+} // namespace tensorflow
diff --git a/tensorflow/core/profiler/utils/op_metrics_db_utils.h b/tensorflow/core/profiler/utils/op_metrics_db_utils.h
new file mode 100644
index 0000000..52b895e
--- /dev/null
+++ b/tensorflow/core/profiler/utils/op_metrics_db_utils.h
@@ -0,0 +1,76 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+#ifndef TENSORFLOW_CORE_PROFILER_UTILS_OP_METRICS_DB_UTILS_H_
+#define TENSORFLOW_CORE_PROFILER_UTILS_OP_METRICS_DB_UTILS_H_
+
+#include <string>
+
+#include "absl/container/flat_hash_map.h"
+#include "absl/strings/string_view.h"
+#include "tensorflow/core/platform/types.h"
+#include "tensorflow/core/profiler/protobuf/op_metrics.pb.h"
+
+namespace tensorflow {
+namespace profiler {
+// Helps build an op metrics database (borrowed).
+// Enables fast lookup of existing ops and prevents the creation of duplicate
+// ops. It is the user's responsibility to ensure an op metrics database
+// outlives its builder, and that no ops are added to the database outside of
+// the builder.
+class OpMetricsDbBuilder {
+ public:
+ // Create with a borrowed op database.
+ // REQUIRED: The op database must be empty.
+ explicit OpMetricsDbBuilder(OpMetricsDb* db);
+
+ protected:
+ // Looks up the given OP name. If it is already in the database,
+ // return its OpMetrics; otherwise, insert a new one.
+ OpMetrics* LookupOrInsertNewOpMetrics(uint64 hlo_module_id,
+ absl::string_view name);
+
+ OpMetricsDb* db() { return db_; }
+
+ private:
+ // Map op (hlo_module_id, name) to the corresponding metrics in the op
+ // database.
+ absl::flat_hash_map<uint64 /*hlo_module_id*/,
+ absl::flat_hash_map<std::string /*name*/, OpMetrics*>>
+ op_metrics_map_;
+
+ // The op database.
+ OpMetricsDb* db_;
+};
+
+// Returns the ratio of time that is idle (no op execution) over total time.
+double IdleTimeRatio(const OpMetricsDb& metrics_db);
+
+// Returns the idle time in picoseconds.
+uint64 IdleTimePs(const OpMetricsDb& metrics_db);
+
+// Adds an op representing idle time, i.e., the amount of time spent without any
+// op execution.
+// REQUIRED: All ops must have been added to the database and the total time
+// must have been set.
+void AddIdleOp(OpMetricsDb* db);
+
+// Converts from Hlo-op metrics to Tf-op metrics.
+OpMetricsDb CreateTfMetricsDbFromHloMetricsDb(
+ const OpMetricsDb& hlo_metrics_db);
+} // namespace profiler
+} // namespace tensorflow
+
+#endif // TENSORFLOW_CORE_PROFILER_UTILS_OP_METRICS_DB_UTILS_H_
diff --git a/tensorflow/examples/android/BUILD b/tensorflow/examples/android/BUILD
index 8baf622..506d3be 100644
--- a/tensorflow/examples/android/BUILD
+++ b/tensorflow/examples/android/BUILD
@@ -66,7 +66,6 @@
srcs = glob([
"src/**/*.java",
]),
- aapt_version = "aapt2",
# Package assets from assets dir as well as all model targets. Remove undesired models
# (and corresponding Activities in source) to reduce APK size.
assets = [
diff --git a/tensorflow/lite/BUILD b/tensorflow/lite/BUILD
index 4a112af..2783a38 100644
--- a/tensorflow/lite/BUILD
+++ b/tensorflow/lite/BUILD
@@ -215,7 +215,7 @@
"//tensorflow/lite/c:c_api_internal",
"//tensorflow/lite/core/api",
"//tensorflow/lite/delegates/nnapi:nnapi_delegate",
- "//tensorflow/lite/experimental/resource_variable",
+ "//tensorflow/lite/experimental/resource",
"//tensorflow/lite/nnapi:nnapi_implementation",
"//tensorflow/lite/schema:schema_fbs",
],
diff --git a/tensorflow/lite/c/builtin_op_data.h b/tensorflow/lite/c/builtin_op_data.h
index aa9e4b7..6fc7e1b 100644
--- a/tensorflow/lite/c/builtin_op_data.h
+++ b/tensorflow/lite/c/builtin_op_data.h
@@ -23,6 +23,10 @@
extern "C" {
#endif // __cplusplus
+// TfLiteReshapeParams can't have dynamic data so we fix the maximum possible
+// number of dimensions.
+#define TFLITE_RESHAPE_PARAMS_MAX_DIMENSION_COUNT 8
+
// TODO(aselle): Consider using "if this then that" for testing.
// Useful placeholder to put in otherwise empty structs to avoid size warnings.
@@ -266,7 +270,7 @@
typedef struct {
// TODO(ahentz): We can't have dynamic data in this struct, at least not yet.
// For now we will fix the maximum possible number of dimensions.
- int shape[8];
+ int shape[TFLITE_RESHAPE_PARAMS_MAX_DIMENSION_COUNT];
int num_dimensions;
} TfLiteReshapeParams;
diff --git a/tensorflow/lite/core/subgraph.cc b/tensorflow/lite/core/subgraph.cc
index 6ed93f7..8d69523 100644
--- a/tensorflow/lite/core/subgraph.cc
+++ b/tensorflow/lite/core/subgraph.cc
@@ -158,13 +158,13 @@
Subgraph::Subgraph(ErrorReporter* error_reporter,
TfLiteExternalContext** external_contexts,
std::vector<std::unique_ptr<Subgraph>>* subgraphs,
- ResourceVariableMap* resource_variables)
+ resource::ResourceMap* resources)
: external_contexts_(external_contexts),
error_reporter_(error_reporter),
next_execution_plan_index_to_prepare_(0),
next_execution_plan_index_to_plan_allocation_(0),
subgraphs_(subgraphs),
- resource_variables_(resource_variables) {
+ resources_(resources) {
context_.impl_ = static_cast<void*>(this);
context_.ResizeTensor = ResizeTensor;
context_.ReportError = ReportErrorC;
diff --git a/tensorflow/lite/core/subgraph.h b/tensorflow/lite/core/subgraph.h
index 688cffe..e679626 100644
--- a/tensorflow/lite/core/subgraph.h
+++ b/tensorflow/lite/core/subgraph.h
@@ -24,7 +24,7 @@
#include "tensorflow/lite/c/c_api_internal.h"
#include "tensorflow/lite/core/api/profiler.h"
#include "tensorflow/lite/delegates/nnapi/nnapi_delegate.h"
-#include "tensorflow/lite/experimental/resource_variable/resource_variable.h"
+#include "tensorflow/lite/experimental/resource/resource_base.h"
#include "tensorflow/lite/memory_planner.h"
#include "tensorflow/lite/util.h"
@@ -40,7 +40,7 @@
Subgraph(ErrorReporter* error_reporter,
TfLiteExternalContext** external_contexts,
std::vector<std::unique_ptr<Subgraph>>* subgraphs,
- ResourceVariableMap* resource_variables);
+ resource::ResourceMap* resources);
Subgraph(const Subgraph&) = delete;
@@ -166,7 +166,7 @@
// WARNING: Experimental interface, subject to change.
// TODO(ycling): Move this function to an external context interface.
- ResourceVariableMap& resource_variables() { return *resource_variables_; }
+ resource::ResourceMap& resources() { return *resources_; }
size_t tensors_size() const { return tensors_.size(); }
@@ -635,9 +635,8 @@
// `check_cancelled_func_`.
void* cancellation_data_ = nullptr;
- // A map of resource variables. Owned by interpreter and shared by multiple
- // subgraphs.
- ResourceVariableMap* resource_variables_ = nullptr;
+ // A map of resources. Owned by interpreter and shared by multiple subgraphs.
+ resource::ResourceMap* resources_ = nullptr;
};
} // namespace tflite
diff --git a/tensorflow/lite/delegates/flex/buffer_map_test.cc b/tensorflow/lite/delegates/flex/buffer_map_test.cc
index 6b09b69..72c49b1 100644
--- a/tensorflow/lite/delegates/flex/buffer_map_test.cc
+++ b/tensorflow/lite/delegates/flex/buffer_map_test.cc
@@ -34,7 +34,7 @@
template <typename T>
UniqueTfLiteTensor MakeLiteTensor(const std::vector<int>& shape,
const std::vector<T>& data) {
- auto tensor = UniqueTfLiteTensor(new TfLiteTensor, [](TfLiteTensor* t) {
+ auto tensor = UniqueTfLiteTensor(new TfLiteTensor(), [](TfLiteTensor* t) {
TfLiteTensorDataFree(t);
TfLiteIntArrayFree(t->dims);
delete t;
@@ -42,9 +42,6 @@
tensor->allocation_type = kTfLiteDynamic;
tensor->type = typeToTfLiteType<T>();
tensor->dims = ConvertVectorToTfLiteIntArray(shape);
- tensor->data.raw = nullptr;
- tensor->is_variable = false;
- memset(&tensor->quantization, 0, sizeof(TfLiteQuantization));
TfLiteTensorRealloc(data.size() * sizeof(T), tensor.get());
memcpy(tensor->data.raw, data.data(), data.size() * sizeof(T));
return tensor;
@@ -53,7 +50,7 @@
template <>
UniqueTfLiteTensor MakeLiteTensor<string>(const std::vector<int>& shape,
const std::vector<string>& data) {
- auto tensor = UniqueTfLiteTensor(new TfLiteTensor, [](TfLiteTensor* t) {
+ auto tensor = UniqueTfLiteTensor(new TfLiteTensor(), [](TfLiteTensor* t) {
TfLiteTensorDataFree(t);
TfLiteIntArrayFree(t->dims);
delete t;
@@ -61,9 +58,6 @@
tensor->allocation_type = kTfLiteDynamic;
tensor->type = typeToTfLiteType<string>();
tensor->dims = ConvertVectorToTfLiteIntArray(shape);
- tensor->data.raw = nullptr;
- tensor->is_variable = false;
- memset(&tensor->quantization, 0, sizeof(TfLiteQuantization));
TfLiteTensorRealloc(data.size() * sizeof(string), tensor.get());
DynamicBuffer b;
diff --git a/tensorflow/lite/delegates/gpu/api.h b/tensorflow/lite/delegates/gpu/api.h
index cf3cd26..ae54cd4 100644
--- a/tensorflow/lite/delegates/gpu/api.h
+++ b/tensorflow/lite/delegates/gpu/api.h
@@ -268,6 +268,30 @@
virtual Status Run() = 0;
};
+// Encapsulated compilation/runtime tradeoffs.
+enum class InferenceUsage {
+ UNKNOWN,
+
+ // InferenceRunner will be used only once. Therefore, it is important to
+ // minimize bootstrap time as well.
+ FAST_SINGLE_ANSWER,
+
+ // Prefer maximizing the throughput. Same inference runner will be used
+ // repeatedly on different inputs.
+ SUSTAINED_SPEED,
+};
+
+// Defines aspects to control while instantiating a runner.
+enum class InferencePriority {
+ UNKNOWN,
+
+ MIN_LATENCY,
+
+ MAX_PRECISION,
+
+ MIN_MEMORY_USAGE,
+};
+
} // namespace gpu
} // namespace tflite
diff --git a/tensorflow/lite/delegates/gpu/cl/BUILD b/tensorflow/lite/delegates/gpu/cl/BUILD
index 6eb475c..95a1d84 100644
--- a/tensorflow/lite/delegates/gpu/cl/BUILD
+++ b/tensorflow/lite/delegates/gpu/cl/BUILD
@@ -272,6 +272,7 @@
"//tensorflow/lite:kernel_api",
"//tensorflow/lite/c:c_api_internal",
"//tensorflow/lite/delegates/gpu:api",
+ "//tensorflow/lite/delegates/gpu:delegate",
"//tensorflow/lite/delegates/gpu/common:model",
"//tensorflow/lite/delegates/gpu/common:model_builder",
"//tensorflow/lite/delegates/gpu/common:model_transformer",
diff --git a/tensorflow/lite/delegates/gpu/cl/api.h b/tensorflow/lite/delegates/gpu/cl/api.h
index b4579b0..ec47c3b 100644
--- a/tensorflow/lite/delegates/gpu/cl/api.h
+++ b/tensorflow/lite/delegates/gpu/cl/api.h
@@ -48,24 +48,6 @@
namespace gpu {
namespace cl {
-enum class InferenceUsage {
- // InferenceRunner will be used only once. Therefore, it is important to
- // minimize bootstrap time as well.
- FAST_SINGLE_ANSWER,
-
- // Prefer maximizing the throughput. Same inference runner will be used
- // repeatedly on different inputs.
- SUSTAINED_SPEED,
-};
-
-enum class InferencePriority {
- MIN_LATENCY,
-
- MAX_PRECISION,
-
- MIN_MEMORY_USAGE,
-};
-
struct InferenceOptions {
InferenceUsage usage = InferenceUsage::SUSTAINED_SPEED;
diff --git a/tensorflow/lite/delegates/gpu/cl/gpu_api_delegate.h b/tensorflow/lite/delegates/gpu/cl/gpu_api_delegate.h
index 1b44262..5cb3225 100644
--- a/tensorflow/lite/delegates/gpu/cl/gpu_api_delegate.h
+++ b/tensorflow/lite/delegates/gpu/cl/gpu_api_delegate.h
@@ -21,6 +21,7 @@
#include <EGL/egl.h>
#include <GLES3/gl31.h>
#include "tensorflow/lite/c/c_api_internal.h"
+#include "tensorflow/lite/delegates/gpu/delegate.h"
#ifdef SWIG
#define TFL_CAPI_EXPORT
@@ -40,11 +41,6 @@
extern "C" {
#endif // __cplusplus
-enum TfLiteGpuInferencePriority {
- TFLITE_GPU_INFERENCE_PRIORITY_MAX_PRECISION = 0,
- TFLITE_GPU_INFERENCE_PRIORITY_MIN_LATENCY = 1,
-};
-
// Shader compilation options.
typedef struct {
// When set to zero, computations are carried out in 32-bit floating point.
diff --git a/tensorflow/lite/delegates/gpu/delegate.cc b/tensorflow/lite/delegates/gpu/delegate.cc
index ef69dcf..7c899fb 100644
--- a/tensorflow/lite/delegates/gpu/delegate.cc
+++ b/tensorflow/lite/delegates/gpu/delegate.cc
@@ -37,6 +37,36 @@
namespace gpu {
namespace {
+InferencePriority ToPriority(int32_t priority) {
+ switch (priority) {
+ case TFLITE_GPU_INFERENCE_PRIORITY_MAX_PRECISION:
+ return InferencePriority::MAX_PRECISION;
+ case TFLITE_GPU_INFERENCE_PRIORITY_MIN_LATENCY:
+ return InferencePriority::MIN_LATENCY;
+ case TFLITE_GPU_INFERENCE_PRIORITY_MIN_MEMORY_USAGE:
+ return InferencePriority::MIN_MEMORY_USAGE;
+ }
+ return InferencePriority::UNKNOWN;
+}
+
+InferenceUsage ToUsage(int32_t usage) {
+ switch (usage) {
+ case TFLITE_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER:
+ return InferenceUsage::FAST_SINGLE_ANSWER;
+ case TFLITE_GPU_INFERENCE_PREFERENCE_SUSTAINED_SPEED:
+ return InferenceUsage::SUSTAINED_SPEED;
+ }
+ return InferenceUsage::UNKNOWN;
+}
+
+int GetPriorityPosition(const TfLiteGpuDelegateOptionsV2& options,
+ InferencePriority p) {
+ if (ToPriority(options.inference_priority1) == p) return 1;
+ if (ToPriority(options.inference_priority2) == p) return 2;
+ if (ToPriority(options.inference_priority3) == p) return 3;
+ return 4; // least important
+}
+
// Forward declarations.
TfLiteStatus DelegatePrepare(TfLiteContext* context, TfLiteDelegate* delegate);
@@ -148,21 +178,25 @@
RETURN_IF_ERROR(cl::NewInferenceEnvironment(env_options, &cl_environment_,
&properties));
cl::InferenceOptions options;
- if (options_.is_precision_loss_allowed == 0) {
- options.priority1 = cl::InferencePriority::MAX_PRECISION;
- options.priority2 = cl::InferencePriority::MIN_MEMORY_USAGE;
- options.priority3 = cl::InferencePriority::MIN_LATENCY;
+ // If is_precision_loss_allowed == -1, then just use priorities instead
+ // of paying attention to is_precision_loss_allowed value.
+ if (options_.is_precision_loss_allowed == -1) {
+ options.priority1 = ToPriority(options_.inference_priority1);
+ options.priority2 = ToPriority(options_.inference_priority2);
+ options.priority3 = ToPriority(options_.inference_priority3);
} else {
- options.priority1 = cl::InferencePriority::MIN_LATENCY;
- options.priority2 = cl::InferencePriority::MIN_MEMORY_USAGE;
- options.priority3 = cl::InferencePriority::MAX_PRECISION;
+ // Users set is_precision_loss_allowed explicitly, thus use it explicitly.
+ if (options_.is_precision_loss_allowed == 0) {
+ options.priority1 = InferencePriority::MAX_PRECISION;
+ options.priority2 = InferencePriority::MIN_MEMORY_USAGE;
+ options.priority3 = InferencePriority::MIN_LATENCY;
+ } else {
+ options.priority1 = InferencePriority::MIN_LATENCY;
+ options.priority2 = InferencePriority::MIN_MEMORY_USAGE;
+ options.priority3 = InferencePriority::MAX_PRECISION;
+ }
}
- if (options_.inference_preference ==
- TFLITE_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER) {
- options.usage = cl::InferenceUsage::FAST_SINGLE_ANSWER;
- } else {
- options.usage = cl::InferenceUsage::SUSTAINED_SPEED;
- }
+ options.usage = ToUsage(options_.inference_preference);
RETURN_IF_ERROR(cl_environment_->NewInferenceBuilder(
options, std::move(*graph), builder));
TFLITE_LOG_PROD_ONCE(tflite::TFLITE_LOG_INFO,
@@ -177,7 +211,13 @@
RETURN_IF_ERROR(
NewInferenceEnvironment(env_options, &gl_environment_, &properties));
gl::InferenceOptions options;
- options.allow_precision_loss = options_.is_precision_loss_allowed != 0;
+ if (options_.is_precision_loss_allowed == -1) {
+ // DEFAULT
+ options.allow_precision_loss =
+ GetPriorityPosition(options_, InferencePriority::MAX_PRECISION) > 1;
+ } else {
+ options.allow_precision_loss = options_.is_precision_loss_allowed != 0;
+ }
options.fuse_operations =
options_.inference_preference !=
TFLITE_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER;
@@ -276,9 +316,13 @@
TfLiteGpuDelegateOptionsV2 TfLiteGpuDelegateOptionsV2Default() {
TfLiteGpuDelegateOptionsV2 options;
- options.is_precision_loss_allowed = 0;
+ // set it to -1 to detect whether it was later adjusted.
+ options.is_precision_loss_allowed = -1;
options.inference_preference =
TFLITE_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER;
+ options.inference_priority1 = TFLITE_GPU_INFERENCE_PRIORITY_MAX_PRECISION;
+ options.inference_priority2 = TFLITE_GPU_INFERENCE_PRIORITY_MIN_LATENCY;
+ options.inference_priority3 = TFLITE_GPU_INFERENCE_PRIORITY_MIN_MEMORY_USAGE;
return options;
}
diff --git a/tensorflow/lite/delegates/gpu/delegate.h b/tensorflow/lite/delegates/gpu/delegate.h
index 5b947be..023ce3c 100644
--- a/tensorflow/lite/delegates/gpu/delegate.h
+++ b/tensorflow/lite/delegates/gpu/delegate.h
@@ -38,8 +38,8 @@
extern "C" {
#endif // __cplusplus
-// Encapsulated precision/compilation/runtime tradeoffs.
-enum TfLiteGpuInferencePreference {
+// Encapsulated compilation/runtime tradeoffs.
+enum TfLiteGpuInferenceUsage {
// Delegate will be used only once, therefore, bootstrap/init time should
// be taken into account.
TFLITE_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER = 0,
@@ -49,6 +49,12 @@
TFLITE_GPU_INFERENCE_PREFERENCE_SUSTAINED_SPEED = 1,
};
+enum TfLiteGpuInferencePriority {
+ TFLITE_GPU_INFERENCE_PRIORITY_MAX_PRECISION = 0,
+ TFLITE_GPU_INFERENCE_PRIORITY_MIN_LATENCY = 1,
+ TFLITE_GPU_INFERENCE_PRIORITY_MIN_MEMORY_USAGE = 2,
+};
+
// IMPORTANT: Always use TfLiteGpuDelegateOptionsV2Default() method to create
// new instance of TfLiteGpuDelegateOptionsV2, otherwise every new added option
// may break inference.
@@ -57,15 +63,32 @@
// precision. Otherwise, the GPU may quantify tensors, downcast values,
// process in FP16 to increase performance. For most models precision loss is
// warranted.
+ // [OBSOLETE]: to be removed
int32_t is_precision_loss_allowed;
// Preference is defined in TfLiteGpuInferencePreference.
int32_t inference_preference;
+
+ // Ordered priorities provide better control over desired semantics,
+ // where priority(n) is more important than priority(n+1), therefore,
+ // each time inference engine needs to make a decision, it uses
+ // ordered priorities to do so.
+ // For example:
+ // MAX_PRECISION at priority1 would not allow to decrease presision,
+ // but moving it to priority2 or priority3 would result in F16 calculation.
+ //
+ // Priority is defined in TfLiteGpuInferencePriority.
+ int32_t inference_priority1;
+ int32_t inference_priority2;
+ int32_t inference_priority3;
} TfLiteGpuDelegateOptionsV2;
// Populates TfLiteGpuDelegateOptionsV2 as follows:
// is_precision_loss_allowed = false
// inference_preference = TFLITE_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER
+// priority1 = TFLITE_GPU_INFERENCE_PRIORITY_MAX_PRECISION
+// priority2 = TFLITE_GPU_INFERENCE_PRIORITY_MIN_LATENCY
+// priority3 = TFLITE_GPU_INFERENCE_PRIORITY_MIN_MEMORY_USAGE
TFL_CAPI_EXPORT TfLiteGpuDelegateOptionsV2 TfLiteGpuDelegateOptionsV2Default();
// Creates a new delegate instance that need to be destroyed with
diff --git a/tensorflow/lite/delegates/gpu/gl/converters/bhwc_to_phwc4.cc b/tensorflow/lite/delegates/gpu/gl/converters/bhwc_to_phwc4.cc
index 8b42ded..3b37ba2 100644
--- a/tensorflow/lite/delegates/gpu/gl/converters/bhwc_to_phwc4.cc
+++ b/tensorflow/lite/delegates/gpu/gl/converters/bhwc_to_phwc4.cc
@@ -86,7 +86,7 @@
if (shape.b != 1) {
return UnimplementedError("BhwcToPhwc4: Batch size is not equal to 1.");
}
- uint3 workload = uint3(shape.w, shape.h, shape.c);
+ uint3 workload = uint3(shape.w, shape.h, IntegralDivideRoundUp(shape.c, 4));
uint3 num_workgroups = IntegralDivideRoundUp(workload, workgroup_size_);
RETURN_IF_ERROR(program_.SetParameter(
diff --git a/tensorflow/lite/experimental/examples/lstm/bidirectional_sequence_lstm_test.py b/tensorflow/lite/experimental/examples/lstm/bidirectional_sequence_lstm_test.py
index de29580..f04a265 100644
--- a/tensorflow/lite/experimental/examples/lstm/bidirectional_sequence_lstm_test.py
+++ b/tensorflow/lite/experimental/examples/lstm/bidirectional_sequence_lstm_test.py
@@ -88,8 +88,8 @@
"""
# Weights and biases for output softmax layer.
out_weights = tf.Variable(
- tf.random_normal([self.num_units * 2, self.n_classes]))
- out_bias = tf.Variable(tf.random_normal([self.n_classes]))
+ tf.random.normal([self.num_units * 2, self.n_classes]))
+ out_bias = tf.Variable(tf.random.normal([self.n_classes]))
# input image placeholder
x = tf.placeholder(
diff --git a/tensorflow/lite/experimental/examples/lstm/bidirectional_sequence_rnn_test.py b/tensorflow/lite/experimental/examples/lstm/bidirectional_sequence_rnn_test.py
index aa2cbc8..606f969 100644
--- a/tensorflow/lite/experimental/examples/lstm/bidirectional_sequence_rnn_test.py
+++ b/tensorflow/lite/experimental/examples/lstm/bidirectional_sequence_rnn_test.py
@@ -92,8 +92,8 @@
"""
# Weights and biases for output softmax layer.
out_weights = tf.Variable(
- tf.random_normal([self.num_units * 2, self.n_classes]))
- out_bias = tf.Variable(tf.random_normal([self.n_classes]))
+ tf.random.normal([self.num_units * 2, self.n_classes]))
+ out_bias = tf.Variable(tf.random.normal([self.n_classes]))
batch_size = self.batch_size
if is_inference:
diff --git a/tensorflow/lite/experimental/examples/lstm/g3doc/README.md b/tensorflow/lite/experimental/examples/lstm/g3doc/README.md
index 20179d1..87c37d3 100644
--- a/tensorflow/lite/experimental/examples/lstm/g3doc/README.md
+++ b/tensorflow/lite/experimental/examples/lstm/g3doc/README.md
@@ -71,7 +71,7 @@
+ tf.lite.experimental.nn.TFLiteLSTMCell(
self.num_lstm_units, forget_bias=0))
# Weights and biases for output softmax layer.
- out_weights = tf.Variable(tf.random_normal([self.units, self.num_class]))
+ out_weights = tf.Variable(tf.random.normal([self.units, self.num_class]))
@@ -67,7 +67,7 @@ class MnistLstmModel(object):
lstm_cells = tf.nn.rnn_cell.MultiRNNCell(lstm_layers)
# Note here, we use `tf.lite.experimental.nn.dynamic_rnn` and `time_major`
@@ -170,7 +170,7 @@
tf.lite.experimental.nn.TFLiteLSTMCell(
self.num_lstm_units, forget_bias=0))
# Weights and biases for output softmax layer.
- out_weights = tf.Variable(tf.random_normal([self.units, self.num_class]))
+ out_weights = tf.Variable(tf.random.normal([self.units, self.num_class]))
out_bias = tf.Variable(tf.zeros([self.num_class]))
# Transpose input x to make it time major.
diff --git a/tensorflow/lite/experimental/examples/lstm/unidirectional_sequence_lstm_test.py b/tensorflow/lite/experimental/examples/lstm/unidirectional_sequence_lstm_test.py
index 5d96eb9..d937a11 100644
--- a/tensorflow/lite/experimental/examples/lstm/unidirectional_sequence_lstm_test.py
+++ b/tensorflow/lite/experimental/examples/lstm/unidirectional_sequence_lstm_test.py
@@ -85,8 +85,8 @@
"""
# Weights and biases for output softmax layer.
out_weights = tf.Variable(
- tf.random_normal([self.num_units, self.n_classes]))
- out_bias = tf.Variable(tf.random_normal([self.n_classes]))
+ tf.random.normal([self.num_units, self.n_classes]))
+ out_bias = tf.Variable(tf.random.normal([self.n_classes]))
# input image placeholder
x = tf.placeholder(
diff --git a/tensorflow/lite/experimental/examples/lstm/unidirectional_sequence_rnn_test.py b/tensorflow/lite/experimental/examples/lstm/unidirectional_sequence_rnn_test.py
index a9158d2..a3859e1 100644
--- a/tensorflow/lite/experimental/examples/lstm/unidirectional_sequence_rnn_test.py
+++ b/tensorflow/lite/experimental/examples/lstm/unidirectional_sequence_rnn_test.py
@@ -81,8 +81,8 @@
"""
# Weights and biases for output softmax layer.
out_weights = tf.Variable(
- tf.random_normal([self.num_units, self.n_classes]))
- out_bias = tf.Variable(tf.random_normal([self.n_classes]))
+ tf.random.normal([self.num_units, self.n_classes]))
+ out_bias = tf.Variable(tf.random.normal([self.n_classes]))
# input image placeholder
x = tf.placeholder(
diff --git a/tensorflow/lite/experimental/ios/BUILD.apple b/tensorflow/lite/experimental/ios/BUILD.apple
index 58a57c0..7a8171d 100644
--- a/tensorflow/lite/experimental/ios/BUILD.apple
+++ b/tensorflow/lite/experimental/ios/BUILD.apple
@@ -1,5 +1,6 @@
# TensorFlow Lite for iOS
+load("@bazel_skylib//rules:build_test.bzl", "build_test")
load("//tensorflow/lite/experimental/ios:ios.bzl", "TFL_MINIMUM_OS_VERSION")
load("@build_bazel_rules_apple//apple:ios.bzl", "ios_static_framework")
@@ -69,3 +70,12 @@
"//tensorflow/lite/experimental/c:c_api",
],
)
+
+# Used for building TensorFlowLiteC framework on TAP.
+build_test(
+ name = "framework_build_test",
+ targets = [
+ ":TensorFlowLiteC_framework",
+ ":TensorFlowLiteCWithSelectTfOps_framework",
+ ],
+)
diff --git a/tensorflow/lite/experimental/micro/examples/hello_world/BUILD b/tensorflow/lite/experimental/micro/examples/hello_world/BUILD
index 8b381d1..98d0b54 100644
--- a/tensorflow/lite/experimental/micro/examples/hello_world/BUILD
+++ b/tensorflow/lite/experimental/micro/examples/hello_world/BUILD
@@ -1,8 +1,5 @@
# Description:
# TensorFlow Lite for Microcontrollers "hello world" example.
-
-package(default_visibility = ["//visibility:public"])
-
licenses(["notice"]) # Apache 2.0
load(
diff --git a/tensorflow/lite/experimental/micro/examples/hello_world/README.md b/tensorflow/lite/experimental/micro/examples/hello_world/README.md
index 933cccc..b79d29a 100644
--- a/tensorflow/lite/experimental/micro/examples/hello_world/README.md
+++ b/tensorflow/lite/experimental/micro/examples/hello_world/README.md
@@ -18,6 +18,7 @@
- [Deploy to Arduino](#deploy-to-arduino)
- [Deploy to SparkFun Edge](#deploy-to-sparkfun-edge)
- [Deploy to STM32F746](#deploy-to-STM32F746)
+- [Deploy to Adafruit devices](#deploy-to-adafruit)
- [Run the tests on a development machine](#run-the-tests-on-a-development-machine)
## Understand the model
@@ -346,11 +347,6 @@
Copying the file will initiate the flashing process. Once this is complete, you
should see an animation on the device's screen.
-
-```
-screen /dev/tty.usbmodem14403 9600
-```
-
In addition to this animation, debug information is logged by the board while
the program is running. To view it, establish a serial connection to the board
using a baud rate of `9600`. On OSX and Linux, the following command should
@@ -373,6 +369,16 @@
To stop viewing the debug output with `screen`, hit `Ctrl+A`, immediately
followed by the `K` key, then hit the `Y` key.
+## Deploy to Adafruit devices <a name="deploy-to-adafruit"></a>
+
+This sample has been tested with the following Adafruit devices. To deploy to
+each device, read the accompanying guide on Adafruit's website.
+
+| Device | Guide |
+|--------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------|
+| [Adafruit EdgeBadge](https://www.adafruit.com/product/4400) | [TensorFlow Lite for EdgeBadge Kit Quickstart](https://learn.adafruit.com/tensorflow-lite-for-edgebadge-kit-quickstart?view=all) |
+| [Adafruit TensorFlow Lite for Microcontrollers Kit](https://www.adafruit.com/product/4317) | [TensorFlow Lite for EdgeBadge Kit Quickstart](https://learn.adafruit.com/tensorflow-lite-for-edgebadge-kit-quickstart?view=all) |
+
### Run the tests on a development machine
To compile and test this example on a desktop Linux or macOS machine, first
diff --git a/tensorflow/lite/experimental/micro/examples/hello_world/arduino/output_handler.cc b/tensorflow/lite/experimental/micro/examples/hello_world/arduino/output_handler.cc
index 3dbbf24..bbe8c5d 100644
--- a/tensorflow/lite/experimental/micro/examples/hello_world/arduino/output_handler.cc
+++ b/tensorflow/lite/experimental/micro/examples/hello_world/arduino/output_handler.cc
@@ -18,20 +18,17 @@
#include "Arduino.h"
#include "tensorflow/lite/experimental/micro/examples/hello_world/constants.h"
-// The pin of the Arduino's built-in LED
-int led = LED_BUILTIN;
-
-// Track whether the function has run at least once
-bool initialized = false;
-
-// Animates a dot across the screen to represent the current x and y values
+// Adjusts brightness of an LED to represent the current y value
void HandleOutput(tflite::ErrorReporter* error_reporter, float x_value,
float y_value) {
+ // Track whether the function has run at least once
+ static bool is_initialized = false;
+
// Do this only once
- if (!initialized) {
+ if (!is_initialized) {
// Set the LED pin to output
- pinMode(led, OUTPUT);
- initialized = true;
+ pinMode(LED_BUILTIN, OUTPUT);
+ is_initialized = true;
}
// Calculate the brightness of the LED such that y=-1 is fully off
@@ -40,7 +37,7 @@
// Set the brightness of the LED. If the specified pin does not support PWM,
// this will result in the LED being on when y > 127, off otherwise.
- analogWrite(led, brightness);
+ analogWrite(LED_BUILTIN, brightness);
// Log the current brightness value for display in the Arduino plotter
error_reporter->Report("%d\n", brightness);
diff --git a/tensorflow/lite/experimental/micro/examples/hello_world/create_sine_model.ipynb b/tensorflow/lite/experimental/micro/examples/hello_world/create_sine_model.ipynb
index d5cd852..f776a33 100644
--- a/tensorflow/lite/experimental/micro/examples/hello_world/create_sine_model.ipynb
+++ b/tensorflow/lite/experimental/micro/examples/hello_world/create_sine_model.ipynb
@@ -4,14 +4,13 @@
"metadata": {
"colab": {
"name": "create_sine_model.ipynb",
- "version": "0.3.2",
"provenance": [],
"collapsed_sections": [],
"toc_visible": true
},
"kernelspec": {
- "name": "python2",
- "display_name": "Python 2"
+ "name": "python3",
+ "display_name": "Python 3"
}
},
"cells": [
@@ -84,16 +83,11 @@
"metadata": {
"id": "53PBJBv1jEtJ",
"colab_type": "code",
- "outputId": "9b035753-60e5-43db-a78d-284ea9de9513",
- "colab": {
- "base_uri": "https://localhost:8080/",
- "height": 479
- }
+ "colab": {}
},
"source": [
"# TensorFlow is an open source machine learning library\n",
- "# Note: The following line is temporary to use v2\n",
- "!pip install tensorflow==2.0.0-beta0\n",
+ "!pip install tensorflow==2.0\n",
"import tensorflow as tf\n",
"# Numpy is a math library\n",
"import numpy as np\n",
@@ -127,7 +121,7 @@
"metadata": {
"id": "uKjg7QeMDsDx",
"colab_type": "code",
- "outputId": "b17a43c6-eba1-4cc7-8807-14fcf5918d01",
+ "outputId": "0387a48d-286d-4ae5-b5c1-b0f2c2b41472",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 269
@@ -138,8 +132,10 @@
"SAMPLES = 1000\n",
"\n",
"# Set a \"seed\" value, so we get the same random numbers each time we run this\n",
- "# notebook\n",
- "np.random.seed(1337)\n",
+ "# notebook. Any number can be used here.\n",
+ "SEED = 1337\n",
+ "np.random.seed(SEED)\n",
+ "tf.random.set_seed(SEED)\n",
"\n",
"# Generate a uniformly distributed set of random numbers in the range from\n",
"# 0 to 2π, which covers a complete sine wave oscillation\n",
@@ -160,7 +156,7 @@
{
"output_type": "display_data",
"data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYYAAAD8CAYAAABzTgP2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi40LCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcv7US4rQAAIABJREFUeJzt3X2UVPWd5/H3F1pU1ASRjhLhgDNy\nJpJJgrOVZioa4yQGNJsjzE7iqvRKcpwpH+Im2TkrrZNzNg8ziTSZGcnOEUNHozCgxjUjYtZZMEYH\nZyyBZgYThSgswRFWpBWZaFSQ5rt/3NtD3apb/VQPt27V53VOna77rVvd3/ahvv17NndHRERkwJik\nExARkcaiwiAiIhEqDCIiEqHCICIiESoMIiISocIgIiIRKgwiIhKhwiAiIhEqDCIiEtGWdAKjMWnS\nJJ8+fXrSaYiIpMqWLVtedff2oe5LZWGYPn06vb29SachIpIqZvbicO5TV5KIiESoMIiISIQKg4iI\nRKgwiIhIhAqDiIhEVKUwmNkPzWy/mT1b5nUzs/9pZjvN7Odm9nsFry00sx3hY2E18hERkdGrVovh\nbuDiQV6/BJgRPnLA7QBmNhH4OjAb6AC+bmanViknGYXZs6GtDU45BcaPB7PgMXYsnHsu5PNJZygi\ntVaVwuDuG4ADg9wyD1jpgaeBCWY2GZgLPOruB9z9deBRBi8wUkX5PHziE0EROP74oABs2gT9/fDm\nm/D228fuPXoUtm6Fj33sWKE45RTo6koufxGpjXqNMZwJvFRwvSeMlYuXMLOcmfWaWW9fX1/NEm0V\nM2cGH/IbNgRF4PDhkb3/6NHgfUuWwJgxMGkS9PTUJlcRqa/UDD67e4+7Z9w9094+5IpuidHVBe97\nHxx3HGzfXr3v6w6vvQbXXBO0PDo7q/e9RaT+6lUY9gJTC66nhLFycamifB6mTg3+uu/rgyNHhn6P\nGZx44sh/1uHDsHp1ME6hbiaRdKpXYVgLXBXOTvp94N/c/WVgHTDHzE4NB53nhDGpgp4e+OAHgy6j\nPXsGv7etLegSMoOOjqCr6K23gtbAwGPOnGBsAYL7BtPfHxQijUOIpE+1pqveC+SB3zGzPWZ2tZld\na2bXhrc8AuwCdgI/AK4HcPcDwJ8Dm8PHt8KYVKirK+ja2bZt8PsmTYKnnoJ33w0+zI8ehY0b4+9d\nty5obbgH9y1fDhMnDl4kBsYh1L0kkh7m7knnMGKZTMa1u2p5c+fC+vXlXx8/Pvjrf9EiyGar8zN7\neuDLX4ZDh8rf094ODz1UvZ8pIiNjZlvcPTPUfakZfJah5fPBh365ovC+9wXF4De/gQcfrO4HdC4H\n77wDCxbAuHHx9/T1Bd1amr0k0thUGJpET0/woVu49qDQokXwyivQ3V3bPFatCloNy5eXv+faa1Uc\nRBpZKg/qkajp0+HFMsdvTJwIt9wS/EVfTwM/75prSl9zh+uui94nIo1DLYaUO+20wYvCa68l9+Gb\nywUD2xMmlL529GhQNDQoLdJ4VBhSbPZsOFBmDte0aUFRSFo2C6+/HnRlxVm9WsVBpNGoMKTU3LnB\nvkZxFi2C3bvrms6QuruD1sNJJ5W+ds892pxPpJGoMKTQ7NnxM49OPDH48K31APNoZbPwpS+Vxt3h\nggvUchBpFBp8TplyA80dHeUXpjWSgaK1bFmw+G3AkSNBt9KOHen4PUSamVoMKVKuKMyZk64P0+5u\neOONoJgV27QpaBGJSHJUGFKis7N8S2FdSneXuvrq+PimTcG24CKSDBWGFOjsDLpZik2blq6WQrFc\nLlgIN7AxX6Ht2zXmIJIUFYYGN3dufFGYMKHxZh6NRi4XjDfEWb1aK6RFkqDC0MDy+fjZR2PHwiOP\n1D+fWsnlyq9zuOYaTWUVqTcVhgZ22WWlsZNPhiefbL4dSru7y++v9JnP1DcXkVanwtCgpk+PP1zn\nr/6q+YrCgFwumGFV7OBBmDy5/vmItKpqHdRzsZk9b2Y7zeymmNdvNbOt4eMFMztY8Fp/wWtrq5FP\n2s2dGz8DacGC5t90bt26+Gms+/ZpGqtIvVRcGMxsLHAbcAkwE7jCzCKTDd39v7n7LHefBfwN8HcF\nL7898Jq7X1ppPmlXblyhoyPY0roVbNwYzLgqtmmTjgkVqYdqtBg6gJ3uvsvdDwP3AfMGuf8K4N4q\n/NymtGRJaSzt01JHY/fu+H2V7rqr7qmItJxqFIYzgZcKrveEsRJmNg04C/hZQfgEM+s1s6fNbH4V\n8kmtmTNhzZpobMaM5piWOhqPPloae+01zVISqbV6Dz5fDjzg7v0FsWnhGaRXAkvN7Lfj3mhmubCA\n9Pb19dUj17qaOTNY1FVo7FhYsSKZfBpBNls6U+no0fhWlYhUTzUKw15gasH1lDAW53KKupHcfW/4\ndRfwBHBu3BvdvcfdM+6eaW9vrzTnhtLVVVoUIFj41awzkIZrYHX0mIL/Utes0ViDSC1VozBsBmaY\n2VlmNo7gw79kdpGZfQA4FcgXxE41s+PD55OA84BtVcgpNXp64v8CPuec5p+BNFy5HGQy0diSJSoO\nIrVScWFw9yPADcA6YDtwv7s/Z2bfMrPCWUaXA/e5uxfEzgF6zewZ4HFgsbu3VGG4+ebS2EknwbaW\n+qcwtLgN91QcRGrDop/T6ZDJZLy3tzfpNCrW0xNs+VBs+XK1FuJ0dcW3rp56Sl1uIsNhZlvCMd1B\naeVzgm65pTQ2Z46KQjnd3fF7Ki1cWP9cRJqZCkNCOjtLp6GefXZ6z1aol+7uYLuQQjt2qEtJpJpU\nGBLQ01O6lbYZrFyZTD5pEzcu873vaYtukWpRYUjAl79cGrvxRvWTD1fcZnuHDgXjNSoOIpVTYaiz\nuXODD7FCY8YEXSQyfOvWwQUXlMbjxm1EZGRUGOqoqyt+g7yLLqp/Ls1g8eKgC67Q7t1qNYhUSoWh\nTvJ5+O53S+MTJmjAebSyWZgXs12jWg0ilVFhqJOVK6F4yYhZcx3RmYRFi+JbDZqlJDJ6KgwJmTYN\n/umfNOBcqWwWvv/90viSJdqFVWS0VBjqoKsLfvKTYJDZDMaNg3vvVVGollwu6JIrdv319c9FpBmo\nMNTYwDYOe/YEW0Z//OPwxBMqCtUWt1p869ZgFpiIjIwKQ43dfXf0etcuFYVa6O4OzsQutn69ZimJ\njJQKQw11dcH+/dHYb/1WMrm0glWrYPLk0rhmKYmMjApDjcSds2AWzL2X2vnGN0pjL71UGhOR8lQY\namTp0tLY97+vbqRay+WCzQgL9fdrrEFkJFQYauTFF6PX06drO+16iduM8Gc/q38eImlVlcJgZheb\n2fNmttPMbop5/Qtm1mdmW8PHHxe8ttDMdoSPpthZf+ZMeOutaCxuR1CpjWwWOjqisSNHYPbsZPIR\nSZuKC4OZjQVuAy4BZgJXmNnMmFt/5O6zwscd4XsnAl8HZgMdwNfN7NRKc0pSTw9s3x6NHXecWgv1\ntnEjjB8fjW3aFJyDISKDq0aLoQPY6e673P0wcB8Qs4NNrLnAo+5+wN1fBx4FLq5CTomJaxn8wR/U\nPw+BG24oja1erRXRIkOpRmE4Eyic97EnjBX7IzP7uZk9YGZTR/jeVOjshAMHorHx47VJXlK6u+E9\n7ymN60AkkcHVa/D5YWC6u3+YoFWwYqTfwMxyZtZrZr19fX1VT7BS+XzpqWwAt95a/1zkmLgdbfft\nq38eImlSjcKwF5hacD0ljP07d3/N3QeOp7kD+A/DfW/B9+hx94y7Z9rb26uQdnXdVDLkDjNmaGwh\nablc6Q6sDz+s1dAig6lGYdgMzDCzs8xsHHA5sLbwBjMrXI96KTAwPLsOmGNmp4aDznPCWKr09MCG\nDaXxFSNuF0ktdHcHx34O6O+Ha6/VWINIORUXBnc/AtxA8IG+Hbjf3Z8zs2+Z2aXhbV82s+fM7Bng\ny8AXwvceAP6coLhsBr4VxlLlO98pjS1frsVsjeSqq6KtBne47rrk8hFpZObFp8ekQCaT8d7e3qTT\nAI7tnlpo0SKd4dyITjopur7khBPg7beTy0ek3sxsi7tnhrpPK58rVNxXPWGCikKj+sM/jF6/845O\nehOJo8JQga4uOHgwGvvwh5PJRYa2ahWccUY09pd/qbEGkWIqDKOUz5d2IYF2T2103/xm9Pro0fgZ\nZSKtTIVhlOIWSV1wgQacG93A9NVCGzaoS0mkkArDKP30p9FrnbWQHt3dpWdEa12DyDEqDKPQ1QU7\nd0Zj8+aptZAmxYXh4EEVB5EBKgyjEPcBUtw9IY0tbrPD667TQLQIqDCMWNxMpFmz1FpIm1wuODyp\n0NGjcP31iaQj0lBUGEYgn4/flG3ZsvrnIpWLazVs3aozG0RUGEbgiSeCrRQKTZ+u1kJa5XLB1iVj\niv4vePjhZPIRaRQqDCNw4YXBaWyFdGRnuuVypYsSTzklmVxEGoUKwzB1dsJnPxucxjZ/fnCm8PLl\n2la7GSxbFt1gb+9edSdJa1NhGIbOzuAQngMHYP36YDO2jRtVFJpFNgsf/Wg0tnq1pq9K61JhGIZ7\n741er1mTTB5SO1dfXRq788765yHSCFQYhjB3bjCNsZD6oJtPLgdz5kRjmzZpXYO0JhWGIRRvfQGl\nG7FJc7jwwtLYwoV1T0MkcVUpDGZ2sZk9b2Y7zaxkr0oz+1Mz22ZmPzezx8xsWsFr/Wa2NXysLX5v\nkrq6SlsL48drbKFZXXhhdBAaYMcOjTVI66m4MJjZWOA24BJgJnCFmc0suu1fgIy7fxh4ACjcsPpt\nd58VPi6lgdx1V2ns1lvrn4fURzYLV15ZGteUZGk11WgxdAA73X2Xux8G7gPmFd7g7o+7+8Chik8D\nU6rwc2sqn4e+vmjs7LPVWmh2q1bBaadFYwcOaFtuaS3VKAxnAi8VXO8JY+VcDfx9wfUJZtZrZk+b\n2fxybzKzXHhfb1/xJ3YNXHZZaSzuDAZpPt/5Tmnsnnvqn4dIUuo6+GxmnUAGKNxxaFp4OPWVwFIz\n++2497p7j7tn3D3T3t5e0zy7umDPnmisvV1bX7SKuBlKL7+sGUrSOqpRGPYCUwuup4SxCDO7CPga\ncKm7HxqIu/ve8Osu4Ang3CrkVJG4vw6/+MX65yHJWbcuOJFvQH9//FGuIs2oGoVhMzDDzM4ys3HA\n5UBkdpGZnQssJygK+wvip5rZ8eHzScB5wLYq5DRqPT2lrYWOjuDUL2ktixdDW9ux6zVrNENJWkPF\nhcHdjwA3AOuA7cD97v6cmX3LzAZmGX0XOBn4X0XTUs8Bes3sGeBxYLG7J1YY8nm49tpo7Mwzg+0v\npPVkszBpUjQWN/4g0mzahr5laO7+CPBIUex/FDy/qMz7ngI+VI0cqmHlytJttaW1Fa9j2bcvmTxE\n6kkrnwv8+MelsQUL6p+HNI4vfCF6fehQsE2KSDNTYQjNnl26bmHBAo0ttLrubjj++Ghs/XrNUJLm\npsIQ2rw5em0WLHYS+dznSmM3lWz8ItI8VBgI/vorHlvQDqoyYNUqmDgxGnvySbUapHmpMBA/P/27\n3y2NSeu65ZbotbtWwkvzavnCkM/D2qI9XS+4QHsiSVQuB4sWRXdf/cEP1GqQ5tTyhWHlyuiUxDFj\ngoVNIsW6u+HjHz923d8P11+fXD4itdLyheHpp6PXl16qPZGkvHfeiV5v3arV0NJ8WrowTJ8e/I89\nYMyYoLtApJy4s6G//vX65yFSSy1bGObOhRdfjMbe/361FmRwuRzMmhWN7dun8xqkubRsYXj88dJY\n3OldIsWWLSuNqTtJmklLFoZ8Ht59NxqbMEGrnGV4stnSVsPBgyoO0jxasjDErVp95JHSmEg5ca2G\npUvrn4dILbRcYejqgg0bjl2bwfLlGluQkclmSycqbN+uVoM0B/MU7jOdyWS8t7d3VO89/XTYv//Y\n9fveB6+8UqXEpOVMnhzdinvmTHjuueTyERmMmW0Jj1IeVFVaDGZ2sZk9b2Y7zayko8bMjjezH4Wv\nbzSz6QWv3RzGnzezmm5onM9HiwLABz5Qy58oze7UU6PXOq9BmkHFhcHMxgK3AZcAM4ErzGxm0W1X\nA6+7+9nArUB3+N6ZBEeBfhC4GFgWfr+aiBtb0CpnqcRXvxq9PnAAOjuTyUWkWqrRYugAdrr7Lnc/\nDNwHzCu6Zx6wInz+APApM7Mwfp+7H3L3XwE7w+9Xdfl8sCNmoXPO0diCVCaXC7ojC61erT2UpPry\n+WAzx3r8t1WNwnAm8FLB9Z4wFntPeEb0vwGnDfO9VRF3bGfxX3sio1F8yhvAddfVPQ1pYvk8XHgh\nfO1rwddaF4fUzEoys5yZ9ZpZb1/xUWujoB1UpVq6u6Gt6PT0Z55Rq0GqZ8kSOHw4+OP28OHab/le\njcKwF5hacD0ljMXeY2ZtwHuB14b5XgDcvcfdM+6eaW9vH3GSV10F48YF01PHjdPYglTXySeXxuLO\n+RAZqXwe1qwpjdVSNQrDZmCGmZ1lZuMIBpOLTjhgLbAwfP454GcezJNdC1wezlo6C5gBbKpCTiWy\nWXjiCfj2t4OvGluQaoprfa5dq1aDVC7uD4xXX63tz2wb+pbBufsRM7sBWAeMBX7o7s+Z2beAXndf\nC9wJ/K2Z7QQOEBQPwvvuB7YBR4AvuXt/pTmVk82qIEhtdHfD+vXR3XqPHg2a/PpvTipR+N/UgAUL\navszW26Bm0it5PNw/vnRg5/mz4cHH0wuJ0m3rq7SFsOMGfDCC6P7fnVd4CYiQcvg9tuDcz0GrFmj\nLblldHp6SouCGaxYEX9/NakwiFRRLhe0GgotWaI9lGTkvve90ti8efXpmlRhEKmy4uM/Ae68s/55\nSHrl87BtW2m8XidMqjCIVFnc8Z8nnFD/PCS94mYizZ9fv4kMKgwiVZbLBQsoC736qqauyvDErVsw\nq+959CoMIjWweHF0NfS2bcHYg4qDDCWutVCvsYUBKgwiNZDNwh//cTR29Kj2UJKhPf109LrerQVQ\nYRCpmauuKo398pf1z0PSo6ur9EyPG2+s/yJJFQaRGslmg8VIhQ4d0tRVKe/226PXJ58crKqvNxUG\nkRqKW4x03XUaa5BSXV3wxhvR2KRJyeSiwiBSQ9lsMM2w0NGj2nlVovL5+P8mbr65/rmACoNIzS1a\nFN0mA7TzqkTFHTs8a1ZyZ8aoMIjU2MAeSmbHYmo1SKHNm0tjy5bVP48BKgwidZDLBXPRC61Zo1aD\nBGMLb78djc2alex27SoMInUSNxf9+uvrn4c0lrhZakm2FkCFQaRuslk47rho7LnnkslFGkM+DwcP\nRmNnnJH84U4VFQYzm2hmj5rZjvDrqTH3zDKzvJk9Z2Y/N7P/XPDa3Wb2KzPbGj5mVZKPSKM77bTo\n9bvv6ryGVrZyZWnsm9+sfx7FKm0x3AQ85u4zgMfC62JvAVe5+weBi4GlZjah4PUb3X1W+Ig5xE6k\necT9T6/zGlpTPg8/+MGx64GtL5KaiVSo0sIwDxhYwrMCmF98g7u/4O47wuf/D9gPtFf4c0VSKW7n\nVYAf/7j+uUiybroJ+gtOuP/4x5NZ5Ryn0sJwuru/HD7fB5w+2M1m1gGMA/5vQfjbYRfTrWZ2fIX5\niDS8xYth7NhorF1/KrWUfB6efDIaizvgKSlDFgYz+6mZPRvziEy+c3cHfJDvMxn4W+CL7j5wXPrN\nwAeAjwITgbK9rWaWM7NeM+vt6+sb+jcTaVDZLPzJn0Rj99+vqaut5KabwIs+LeMOeErKkIXB3S9y\n99+NeTwEvBJ+4A988O+P+x5m9h7gfwNfc/enC773yx44BNwFdAySR4+7Z9w9064/ryTlrroqel7D\nkSPxA5HSfOJaC9OmNcbYwoBKu5LWAgvD5wuBh4pvMLNxwIPASnd/oOi1gaJiBOMTz1aYj0gqZLNw\n223HupTcg4FIDUI3vyeeKI392Z/VPY1BVVoYFgOfNrMdwEXhNWaWMbM7wnsuAy4AvhAzLXW1mf0C\n+AUwCfiLCvMRSY1cLuhSGtgqo78frr1WXUrN7sILgzPAzYI9tBplJlIh8+KOrhTIZDLe29ubdBoi\nFcvn4bzzov3NF1wA//APyeUktdPTE8xAmzULJkwIikQ9F7OZ2RZ3zwx1X9tQN4hI7WSz8N73Rle/\n6pS35tTVdWzjxPXrYfny5Fc4l6MtMUQS9uEPR69PPFHdSc0m7ryF730vmVyGQ4VBJGHF6xpefDFY\n7KTi0Dzizlto5F58FQaRhGWzwfTFadOOxfr7tfNqM9m1qzT21a/WP4/hUmEQaQDZbHR7BICtW9Vq\naAb5fOnZzXPmNN5MpEIqDCIN4sorS2Of/3z985Dqyefh/PODIg/BFNUFC2DdumTzGooKg0iD6O4u\n3UNp714tekuzhQuDY1wHuMMHP5hcPsOlwiDSQD71qdLY9derSymNenpgx45ozCxYu9DoVBhEGsi6\nddBRtGNYf7/2UUqjO+8sjV15ZeOuXSikwiDSYDZuDFbGFtq2LZlcZHTyedi0KRo75xxYtSqZfEZK\nhUGkAY0bF71upL36ZWjFi9kApk6tfx6jpcIg0oCK9+Z/4w0NQqfJCy+Uxv7oj+qfx2ipMIg0oFwu\n2Etn5szgevt2uOYa6OxMNi8ZWmdnadffggWNvW6hmAqDSIPK5eDkk6Ox1avVcmhknZ3Bv6NC8+en\nZ2xhgAqDSAN7//tLY428+Vory+dLiwIE5y2kjQqDSAOL+1DZtk3rGhpR3JTiWbPSMT21WEWFwcwm\nmtmjZrYj/Hpqmfv6C05vW1sQP8vMNprZTjP7UXgMqIiEstlgrKHYZZfVPxcZ3NNPl8aWLat/HtVQ\naYvhJuAxd58BPBZex3nb3WeFj0sL4t3Are5+NvA6cHX820VaVy4H7e3R2J49wcEv0hi6uo7thzRg\n/vx0thag8sIwD1gRPl8BzB/uG83MgE8CD4zm/SKt5ItfLI0tXVr/PKRU3CE8ZukcWxhQaWE43d1f\nDp/vA04vc98JZtZrZk+b2cCH/2nAQXc/El7vAc4s94PMLBd+j96+vr4K0xZJl+7u0kVvhw+r1dAI\nFi4sjd14Y3pbCzCMwmBmPzWzZ2Me8wrvc3cHyp1JNC08gPpKYKmZ/fZIE3X3HnfPuHumvbhdLdIC\n4g52ufvuuqchBbq6SjfKmzAhKORpNmRhcPeL3P13Yx4PAa+Y2WSA8Ov+Mt9jb/h1F/AEcC7wGjDB\nzNrC26YAeyv+jUSaVHd36QZ7+/drXUNS4rqQIF0L2cqptCtpLTDQkFoIPFR8g5mdambHh88nAecB\n28IWxuPA5wZ7v4gcs3EjnHFGNHbLLcnk0uriZoadfXb6WwtQeWFYDHzazHYAF4XXmFnGzO4I7zkH\n6DWzZwgKwWJ3H1gw3gX8qZntJBhziNmoVkQK/f7vR69371arod7y+WBmWLFm2R7dgj/c0yWTyXhv\nb2/SaYgkIp+H884LTgMb0NERtCakPqZMCU7XK4699FIy+QyXmW0Jx3sHpZXPIimTzQazXgpt3qwZ\nSvWSz5cWBYD7769/LrWiwiCSQt3dwQKqAe7BQKi6lGrvpphlvB0d6Z6eWkyFQSSlFi2CMUX/B8cd\nJynV09UFGzZEY83YjafCIJJS2Sycf3409qtfqdVQS8X/bCdMaL6iACoMIqm2eDG0tR277usLDvRR\ncai+nh44eDAamzAhmVxqTYVBJMWy2aBrY8qUaPwb30gknaaVz8P115fGb765/rnUgwqDSMpls5Ap\nmoD48sswfXoi6TSlJUugvz8aW7SoOVY5x1FhEGkCcTt5vvgizJ1b/1yaTU8PrFkTjc2f3xwrnMtR\nYRBpAtlscOB8scceq38uzSSfh2uvjcbSvqX2cKgwiDSJVatKB0P7+4MD6mV0Vq6MrjAHOOec5lqz\nEEeFQaSJPPJIaWz1ap0RXU1f+UrSGdSeCoNIE8lmgwPoizXL5m711NkJ99xzbBHhmDHNPeBcSIVB\npMnEHUD/k59obcNITJ4ctLR+/Ws4ejQotv/4j8094FxIhUGkyWSzsHw5jB17LLZnjxa+Ddfs2bBv\nXzT2r//a/OMKhVQYRJpQLgdPPqmFbyPV1QWbNpXGL7mk/rkkSYVBpEmVW/imtQ3xenrij+o8+eRg\nxlcrqagwmNlEM3vUzHaEX0+NuecPzGxrweMdM5sfvna3mf2q4LWYYTMRGa24+fbr1+vshjjFZ1wM\nWL++vnk0gkpbDDcBj7n7DOCx8DrC3R9391nuPgv4JPAWUPiP+saB1919a4X5iEiBcrOUlizRFNZC\nnZ3BQHOx5ctba2xhQKWFYR6wIny+Apg/yL0AnwP+3t3fqvDnisgwxc1SgvjD7FtRPh/MQCq2YEFr\nTE2NU2lhON3dXw6f7wNOH+L+y4F7i2LfNrOfm9mtZnZ8uTeaWc7Mes2st6+vr4KURVpLNhvfpbRn\nj8YbIH5coaOj9cYVCg1ZGMzsp2b2bMxjXuF97u6Al/k2mNlk4EPAuoLwzcAHgI8CE4GyPZ/u3uPu\nGXfPtLe3D5W2iBTo7oY5c0rj69e39hTWnh546KFo7CMfac7Dd0aibagb3P2icq+Z2StmNtndXw4/\n+PcP8q0uAx5093cLvvdAa+OQmd0F/Pdh5i0iI7RuXTBHv3g65pe+BB/6UOv1pefzcN110b2QxoyB\n229PLqdGUWlX0lpgYfh8IfDQIPdeQVE3UlhMMDMjGJ94tsJ8RGQQGzfC+PHR2JEj8LGPtdZgdE9P\nsHX20aPR+KWXtl6BjFNpYVgMfNrMdgAXhdeYWcbM7hi4ycymA1OBfyh6/2oz+wXwC2AS8BcV5iMi\nQ7jhhvh43AllzainJ1gFvr+of2NgLyQB8+I9ZVMgk8l4b29v0mmIpNbMmbB9ezR20knw5pvJ5FNP\nkyeXbnlhBt//fvPPQjKzLe6eGeo+rXwWaUHbtsG0adHYb34TfGg282B0XFGA1igKI6HCINKidu8u\nXfy2b1/zbrZ32mnxRaGV1yuUo8Ig0sKWLQu6UYpde21zbZvR1QUHDpTGP/KR1l6vUI4Kg0gLy2bh\nyitL4+7Bwq9mKA7lNscDTU0tR4VBpMWtWhW/+A3grrvqm0u1DcxAKnbiifDUU5qaWo4Kg4iwbl2w\nDUSxvr7gTIc0rnEoVxQmTIDPqs3qAAAHPElEQVS33lJRGIwKg4gAweK3BQuOnXE8YO/e9C2AK1cU\nQAPNw6HCICL/btWq8v3uF16YjtlKnZ3li0JHR+uc21wJFQYRicjl4ruVDh8OPnBnz65/TsM1d278\nFtoQ/E6tvjnecKkwiEiJjRtLF8AN2LSp8YpDPg/nnlv+tLUFC1QURkKFQURi7d4d33KAoDi0tzfG\nuENPTzAGsrXM+Y/Ll2utwkipMIhIWRs3Bh+sx8ccofXqq8EHcpJrHQYbZD7ttGBKqgabR06FQUQG\nlcvB44+Xf33JkqBw1LtAzJ1bviiMHQsPP6wpqaOlwiAiQ8pmg5ZDOYcPBwWis7O2eeTzQReWWfnx\nhEmT4MknVRQqocIgIsOSywVdMyefXP6e1avhlFOq33oYGFz+2MeCLqxyOjqCRXkqCpWpqDCY2efN\n7DkzO2pmZff4NrOLzex5M9tpZjcVxM8ys41h/EdmNq6SfESktrJZeOONYJbP2LHx97z5ZtB6GDsW\nPvGJygaoe3rgve8dfHAZ4Oyzg6KlmUfVUWmL4VngPwEbyt1gZmOB24BLgJnAFWY2M3y5G7jV3c8G\nXgeurjAfEamDVauCI0HLzVqC4NjMDRuCD/XjjgsekyYNvkiuszMYNJ4+HdragjGEX/+6/P1jxgRF\nascOtRKqqaLC4O7b3f35IW7rAHa6+y53PwzcB8wLz3n+JPBAeN8KgnOfRSQlBmYtnXHG4PcdORI8\nXnst+LA3O/Y48USYOjV4vnp1sD32iy9Cf//g33PixOAeTUWtvnqMMZwJvFRwvSeMnQYcdPcjRXER\nSZFcDl5+OTgvua1t5O9/5x3Ys2f497e1BT/rtddG/rNkeIYsDGb2UzN7NuYxrx4JFuSRM7NeM+vt\n6+ur548WkWHo7oZ33w228I47/KdSbW1Bt9G772q/o1obsr67+0UV/oy9wNSC6ylh7DVggpm1ha2G\ngXi5PHqAHoBMJuMV5iQiNbJuXfC1qwuWLg0+yMeMGbprqNjYscHj859Xd1G91aMraTMwI5yBNA64\nHFjr7g48DnwuvG8h8FAd8hGROujuhkOHgkHoI0eCsYiJE0tbEyecEJz50NYG48fDzJnBvUeOBO9X\nUag/Cz6fR/lmsz8E/gZoBw4CW919rpm9H7jD3T8T3vcZYCkwFvihu387jP8WwWD0ROBfgE53PzTU\nz81kMt7b2zvqvEVEWpGZbXH3sksL/v2+SgpDUlQYRERGbriFQSufRUQkQoVBREQiVBhERCRChUFE\nRCJUGEREJCKVs5LMrA94cZRvnwQMsnFvw0t7/pD+3yHt+UP6f4e05w/J/A7T3L19qJtSWRgqYWa9\nw5mu1ajSnj+k/3dIe/6Q/t8h7flDY/8O6koSEZEIFQYREYloxcIwyDEhqZD2/CH9v0Pa84f0/w5p\nzx8a+HdouTEGEREZXCu2GEREZBAtUxjM7GIze97MdprZTUnnM1Jm9kMz229mzyady2iY2VQze9zM\ntpnZc2b2laRzGikzO8HMNpnZM+Hv8M2kcxoNMxtrZv9iZj9JOpfRMLPdZvYLM9tqZqnbTdPMJpjZ\nA2b2SzPbbmYNd1p1S3QlmdlY4AXg0wRHiG4GrnD3bYkmNgJmdgHwJrDS3X836XxGyswmA5Pd/Z/N\n7BRgCzA/Zf8ODDjJ3d80s+OAfwS+4u5PJ5zaiJjZnwIZ4D3u/tmk8xkpM9sNZNw9lesYzGwF8KS7\n3xGeUTPe3Q8mnVehVmkxdAA73X2Xux8mOAOirkeTVsrdNwAHks5jtNz9ZXf/5/D5G8B2UnbGtwfe\nDC+PCx+p+svKzKYA/xG4I+lcWpGZvRe4ALgTwN0PN1pRgNYpDGcCLxVc7yFlH0rNxMymA+cCG5PN\nZOTCbpitwH7gUXdP2++wFFgEHE06kQo4sN7MtphZLulkRugsoA+4K+zOu8PMTko6qWKtUhikQZjZ\nycCPga+6+6+Tzmek3L3f3WcRnFHeYWap6dYzs88C+919S9K5VOh8d/894BLgS2E3a1q0Ab8H3O7u\n5wK/ARpuzLNVCsNeYGrB9ZQwJnUU9sv/GFjt7n+XdD6VCJv/jwMXJ53LCJwHXBr20d8HfNLMUnei\nsrvvDb/uBx4k6CpOiz3AnoKW5gMEhaKhtEph2AzMMLOzwsGey4G1CefUUsKB2zuB7e7+10nnMxpm\n1m5mE8LnJxJMZvhlslkNn7vf7O5T3H06wf8DP3P3zoTTGhEzOymcvEDYBTMHSM1MPXffB7xkZr8T\nhj4FNNwEjLakE6gHdz9iZjcA64CxwA/d/bmE0xoRM7sXuBCYZGZ7gK+7+53JZjUi5wH/BfhF2EcP\n8Gfu/kiCOY3UZGBFOMttDHC/u6dyymeKnQ48GPydQRtwj7v/n2RTGrH/CqwO/0jdBXwx4XxKtMR0\nVRERGb5W6UoSEZFhUmEQEZEIFQYREYlQYRARkQgVBhERiVBhEBGRCBUGERGJUGEQEZGI/w/w1xWP\nb+vxVQAAAABJRU5ErkJggg==\n",
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYYAAAD8CAYAAABzTgP2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3X2UVPWd5/H3F1pU1ASRjhLhgDNy\nJpJJgrOVZioa4yQGNJsjzE7iqvRKcpwpH+Im2TkrrZNzNg8ziTSZGcnOEUNHozCgxjUjYtZZMEYH\nZyyBZgYThSgswRFWpBWZaFSQ5rt/3NtD3apb/VQPt27V53VOna77rVvd3/ahvv17NndHRERkwJik\nExARkcaiwiAiIhEqDCIiEqHCICIiESoMIiISocIgIiIRKgwiIhKhwiAiIhEqDCIiEtGWdAKjMWnS\nJJ8+fXrSaYiIpMqWLVtedff2oe5LZWGYPn06vb29SachIpIqZvbicO5TV5KIiESoMIiISIQKg4iI\nRKgwiIhIhAqDiIhEVKUwmNkPzWy/mT1b5nUzs/9pZjvN7Odm9nsFry00sx3hY2E18hERkdGrVovh\nbuDiQV6/BJgRPnLA7QBmNhH4OjAb6AC+bmanViknGYXZs6GtDU45BcaPB7PgMXYsnHsu5PNJZygi\ntVaVwuDuG4ADg9wyD1jpgaeBCWY2GZgLPOruB9z9deBRBi8wUkX5PHziE0EROP74oABs2gT9/fDm\nm/D228fuPXoUtm6Fj33sWKE45RTo6koufxGpjXqNMZwJvFRwvSeMlYuXMLOcmfWaWW9fX1/NEm0V\nM2cGH/IbNgRF4PDhkb3/6NHgfUuWwJgxMGkS9PTUJlcRqa/UDD67e4+7Z9w9094+5IpuidHVBe97\nHxx3HGzfXr3v6w6vvQbXXBO0PDo7q/e9RaT+6lUY9gJTC66nhLFycamifB6mTg3+uu/rgyNHhn6P\nGZx44sh/1uHDsHp1ME6hbiaRdKpXYVgLXBXOTvp94N/c/WVgHTDHzE4NB53nhDGpgp4e+OAHgy6j\nPXsGv7etLegSMoOOjqCr6K23gtbAwGPOnGBsAYL7BtPfHxQijUOIpE+1pqveC+SB3zGzPWZ2tZld\na2bXhrc8AuwCdgI/AK4HcPcDwJ8Dm8PHt8KYVKirK+ja2bZt8PsmTYKnnoJ33w0+zI8ehY0b4+9d\nty5obbgH9y1fDhMnDl4kBsYh1L0kkh7m7knnMGKZTMa1u2p5c+fC+vXlXx8/Pvjrf9EiyGar8zN7\neuDLX4ZDh8rf094ODz1UvZ8pIiNjZlvcPTPUfakZfJah5fPBh365ovC+9wXF4De/gQcfrO4HdC4H\n77wDCxbAuHHx9/T1Bd1amr0k0thUGJpET0/woVu49qDQokXwyivQ3V3bPFatCloNy5eXv+faa1Uc\nRBpZKg/qkajp0+HFMsdvTJwIt9wS/EVfTwM/75prSl9zh+uui94nIo1DLYaUO+20wYvCa68l9+Gb\nywUD2xMmlL529GhQNDQoLdJ4VBhSbPZsOFBmDte0aUFRSFo2C6+/HnRlxVm9WsVBpNGoMKTU3LnB\nvkZxFi2C3bvrms6QuruD1sNJJ5W+ds892pxPpJGoMKTQ7NnxM49OPDH48K31APNoZbPwpS+Vxt3h\nggvUchBpFBp8TplyA80dHeUXpjWSgaK1bFmw+G3AkSNBt9KOHen4PUSamVoMKVKuKMyZk64P0+5u\neOONoJgV27QpaBGJSHJUGFKis7N8S2FdSneXuvrq+PimTcG24CKSDBWGFOjsDLpZik2blq6WQrFc\nLlgIN7AxX6Ht2zXmIJIUFYYGN3dufFGYMKHxZh6NRi4XjDfEWb1aK6RFkqDC0MDy+fjZR2PHwiOP\n1D+fWsnlyq9zuOYaTWUVqTcVhgZ22WWlsZNPhiefbL4dSru7y++v9JnP1DcXkVanwtCgpk+PP1zn\nr/6q+YrCgFwumGFV7OBBmDy5/vmItKpqHdRzsZk9b2Y7zeymmNdvNbOt4eMFMztY8Fp/wWtrq5FP\n2s2dGz8DacGC5t90bt26+Gms+/ZpGqtIvVRcGMxsLHAbcAkwE7jCzCKTDd39v7n7LHefBfwN8HcF\nL7898Jq7X1ppPmlXblyhoyPY0roVbNwYzLgqtmmTjgkVqYdqtBg6gJ3uvsvdDwP3AfMGuf8K4N4q\n/NymtGRJaSzt01JHY/fu+H2V7rqr7qmItJxqFIYzgZcKrveEsRJmNg04C/hZQfgEM+s1s6fNbH4V\n8kmtmTNhzZpobMaM5piWOhqPPloae+01zVISqbV6Dz5fDjzg7v0FsWnhGaRXAkvN7Lfj3mhmubCA\n9Pb19dUj17qaOTNY1FVo7FhYsSKZfBpBNls6U+no0fhWlYhUTzUKw15gasH1lDAW53KKupHcfW/4\ndRfwBHBu3BvdvcfdM+6eaW9vrzTnhtLVVVoUIFj41awzkIZrYHX0mIL/Utes0ViDSC1VozBsBmaY\n2VlmNo7gw79kdpGZfQA4FcgXxE41s+PD55OA84BtVcgpNXp64v8CPuec5p+BNFy5HGQy0diSJSoO\nIrVScWFw9yPADcA6YDtwv7s/Z2bfMrPCWUaXA/e5uxfEzgF6zewZ4HFgsbu3VGG4+ebS2EknwbaW\n+qcwtLgN91QcRGrDop/T6ZDJZLy3tzfpNCrW0xNs+VBs+XK1FuJ0dcW3rp56Sl1uIsNhZlvCMd1B\naeVzgm65pTQ2Z46KQjnd3fF7Ki1cWP9cRJqZCkNCOjtLp6GefXZ6z1aol+7uYLuQQjt2qEtJpJpU\nGBLQ01O6lbYZrFyZTD5pEzcu873vaYtukWpRYUjAl79cGrvxRvWTD1fcZnuHDgXjNSoOIpVTYaiz\nuXODD7FCY8YEXSQyfOvWwQUXlMbjxm1EZGRUGOqoqyt+g7yLLqp/Ls1g8eKgC67Q7t1qNYhUSoWh\nTvJ5+O53S+MTJmjAebSyWZgXs12jWg0ilVFhqJOVK6F4yYhZcx3RmYRFi+JbDZqlJDJ6KgwJmTYN\n/umfNOBcqWwWvv/90viSJdqFVWS0VBjqoKsLfvKTYJDZDMaNg3vvVVGollwu6JIrdv319c9FpBmo\nMNTYwDYOe/YEW0Z//OPwxBMqCtUWt1p869ZgFpiIjIwKQ43dfXf0etcuFYVa6O4OzsQutn69ZimJ\njJQKQw11dcH+/dHYb/1WMrm0glWrYPLk0rhmKYmMjApDjcSds2AWzL2X2vnGN0pjL71UGhOR8lQY\namTp0tLY97+vbqRay+WCzQgL9fdrrEFkJFQYauTFF6PX06drO+16iduM8Gc/q38eImlVlcJgZheb\n2fNmttPMbop5/Qtm1mdmW8PHHxe8ttDMdoSPpthZf+ZMeOutaCxuR1CpjWwWOjqisSNHYPbsZPIR\nSZuKC4OZjQVuAy4BZgJXmNnMmFt/5O6zwscd4XsnAl8HZgMdwNfN7NRKc0pSTw9s3x6NHXecWgv1\ntnEjjB8fjW3aFJyDISKDq0aLoQPY6e673P0wcB8Qs4NNrLnAo+5+wN1fBx4FLq5CTomJaxn8wR/U\nPw+BG24oja1erRXRIkOpRmE4Eyic97EnjBX7IzP7uZk9YGZTR/jeVOjshAMHorHx47VJXlK6u+E9\n7ymN60AkkcHVa/D5YWC6u3+YoFWwYqTfwMxyZtZrZr19fX1VT7BS+XzpqWwAt95a/1zkmLgdbfft\nq38eImlSjcKwF5hacD0ljP07d3/N3QeOp7kD+A/DfW/B9+hx94y7Z9rb26uQdnXdVDLkDjNmaGwh\nablc6Q6sDz+s1dAig6lGYdgMzDCzs8xsHHA5sLbwBjMrXI96KTAwPLsOmGNmp4aDznPCWKr09MCG\nDaXxFSNuF0ktdHcHx34O6O+Ha6/VWINIORUXBnc/AtxA8IG+Hbjf3Z8zs2+Z2aXhbV82s+fM7Bng\ny8AXwvceAP6coLhsBr4VxlLlO98pjS1frsVsjeSqq6KtBne47rrk8hFpZObFp8ekQCaT8d7e3qTT\nAI7tnlpo0SKd4dyITjopur7khBPg7beTy0ek3sxsi7tnhrpPK58rVNxXPWGCikKj+sM/jF6/845O\nehOJo8JQga4uOHgwGvvwh5PJRYa2ahWccUY09pd/qbEGkWIqDKOUz5d2IYF2T2103/xm9Pro0fgZ\nZSKtTIVhlOIWSV1wgQacG93A9NVCGzaoS0mkkArDKP30p9FrnbWQHt3dpWdEa12DyDEqDKPQ1QU7\nd0Zj8+aptZAmxYXh4EEVB5EBKgyjEPcBUtw9IY0tbrPD667TQLQIqDCMWNxMpFmz1FpIm1wuODyp\n0NGjcP31iaQj0lBUGEYgn4/flG3ZsvrnIpWLazVs3aozG0RUGEbgiSeCrRQKTZ+u1kJa5XLB1iVj\niv4vePjhZPIRaRQqDCNw4YXBaWyFdGRnuuVypYsSTzklmVxEGoUKwzB1dsJnPxucxjZ/fnCm8PLl\n2la7GSxbFt1gb+9edSdJa1NhGIbOzuAQngMHYP36YDO2jRtVFJpFNgsf/Wg0tnq1pq9K61JhGIZ7\n741er1mTTB5SO1dfXRq788765yHSCFQYhjB3bjCNsZD6oJtPLgdz5kRjmzZpXYO0JhWGIRRvfQGl\nG7FJc7jwwtLYwoV1T0MkcVUpDGZ2sZk9b2Y7zaxkr0oz+1Mz22ZmPzezx8xsWsFr/Wa2NXysLX5v\nkrq6SlsL48drbKFZXXhhdBAaYMcOjTVI66m4MJjZWOA24BJgJnCFmc0suu1fgIy7fxh4ACjcsPpt\nd58VPi6lgdx1V2ns1lvrn4fURzYLV15ZGteUZGk11WgxdAA73X2Xux8G7gPmFd7g7o+7+8Chik8D\nU6rwc2sqn4e+vmjs7LPVWmh2q1bBaadFYwcOaFtuaS3VKAxnAi8VXO8JY+VcDfx9wfUJZtZrZk+b\n2fxybzKzXHhfb1/xJ3YNXHZZaSzuDAZpPt/5Tmnsnnvqn4dIUuo6+GxmnUAGKNxxaFp4OPWVwFIz\n++2497p7j7tn3D3T3t5e0zy7umDPnmisvV1bX7SKuBlKL7+sGUrSOqpRGPYCUwuup4SxCDO7CPga\ncKm7HxqIu/ve8Osu4Ang3CrkVJG4vw6/+MX65yHJWbcuOJFvQH9//FGuIs2oGoVhMzDDzM4ys3HA\n5UBkdpGZnQssJygK+wvip5rZ8eHzScB5wLYq5DRqPT2lrYWOjuDUL2ktixdDW9ux6zVrNENJWkPF\nhcHdjwA3AOuA7cD97v6cmX3LzAZmGX0XOBn4X0XTUs8Bes3sGeBxYLG7J1YY8nm49tpo7Mwzg+0v\npPVkszBpUjQWN/4g0mzahr5laO7+CPBIUex/FDy/qMz7ngI+VI0cqmHlytJttaW1Fa9j2bcvmTxE\n6kkrnwv8+MelsQUL6p+HNI4vfCF6fehQsE2KSDNTYQjNnl26bmHBAo0ttLrubjj++Ghs/XrNUJLm\npsIQ2rw5em0WLHYS+dznSmM3lWz8ItI8VBgI/vorHlvQDqoyYNUqmDgxGnvySbUapHmpMBA/P/27\n3y2NSeu65ZbotbtWwkvzavnCkM/D2qI9XS+4QHsiSVQuB4sWRXdf/cEP1GqQ5tTyhWHlyuiUxDFj\ngoVNIsW6u+HjHz923d8P11+fXD4itdLyheHpp6PXl16qPZGkvHfeiV5v3arV0NJ8WrowTJ8e/I89\nYMyYoLtApJy4s6G//vX65yFSSy1bGObOhRdfjMbe/361FmRwuRzMmhWN7dun8xqkubRsYXj88dJY\n3OldIsWWLSuNqTtJmklLFoZ8Ht59NxqbMEGrnGV4stnSVsPBgyoO0jxasjDErVp95JHSmEg5ca2G\npUvrn4dILbRcYejqgg0bjl2bwfLlGluQkclmSycqbN+uVoM0B/MU7jOdyWS8t7d3VO89/XTYv//Y\n9fveB6+8UqXEpOVMnhzdinvmTHjuueTyERmMmW0Jj1IeVFVaDGZ2sZk9b2Y7zayko8bMjjezH4Wv\nbzSz6QWv3RzGnzezmm5onM9HiwLABz5Qy58oze7UU6PXOq9BmkHFhcHMxgK3AZcAM4ErzGxm0W1X\nA6+7+9nArUB3+N6ZBEeBfhC4GFgWfr+aiBtb0CpnqcRXvxq9PnAAOjuTyUWkWqrRYugAdrr7Lnc/\nDNwHzCu6Zx6wInz+APApM7Mwfp+7H3L3XwE7w+9Xdfl8sCNmoXPO0diCVCaXC7ojC61erT2UpPry\n+WAzx3r8t1WNwnAm8FLB9Z4wFntPeEb0vwGnDfO9VRF3bGfxX3sio1F8yhvAddfVPQ1pYvk8XHgh\nfO1rwddaF4fUzEoys5yZ9ZpZb1/xUWujoB1UpVq6u6Gt6PT0Z55Rq0GqZ8kSOHw4+OP28OHab/le\njcKwF5hacD0ljMXeY2ZtwHuB14b5XgDcvcfdM+6eaW9vH3GSV10F48YF01PHjdPYglTXySeXxuLO\n+RAZqXwe1qwpjdVSNQrDZmCGmZ1lZuMIBpOLTjhgLbAwfP454GcezJNdC1wezlo6C5gBbKpCTiWy\nWXjiCfj2t4OvGluQaoprfa5dq1aDVC7uD4xXX63tz2wb+pbBufsRM7sBWAeMBX7o7s+Z2beAXndf\nC9wJ/K2Z7QQOEBQPwvvuB7YBR4AvuXt/pTmVk82qIEhtdHfD+vXR3XqPHg2a/PpvTipR+N/UgAUL\navszW26Bm0it5PNw/vnRg5/mz4cHH0wuJ0m3rq7SFsOMGfDCC6P7fnVd4CYiQcvg9tuDcz0GrFmj\nLblldHp6SouCGaxYEX9/NakwiFRRLhe0GgotWaI9lGTkvve90ti8efXpmlRhEKmy4uM/Ae68s/55\nSHrl87BtW2m8XidMqjCIVFnc8Z8nnFD/PCS94mYizZ9fv4kMKgwiVZbLBQsoC736qqauyvDErVsw\nq+959CoMIjWweHF0NfS2bcHYg4qDDCWutVCvsYUBKgwiNZDNwh//cTR29Kj2UJKhPf109LrerQVQ\nYRCpmauuKo398pf1z0PSo6ur9EyPG2+s/yJJFQaRGslmg8VIhQ4d0tRVKe/226PXJ58crKqvNxUG\nkRqKW4x03XUaa5BSXV3wxhvR2KRJyeSiwiBSQ9lsMM2w0NGj2nlVovL5+P8mbr65/rmACoNIzS1a\nFN0mA7TzqkTFHTs8a1ZyZ8aoMIjU2MAeSmbHYmo1SKHNm0tjy5bVP48BKgwidZDLBXPRC61Zo1aD\nBGMLb78djc2alex27SoMInUSNxf9+uvrn4c0lrhZakm2FkCFQaRuslk47rho7LnnkslFGkM+DwcP\nRmNnnJH84U4VFQYzm2hmj5rZjvDrqTH3zDKzvJk9Z2Y/N7P/XPDa3Wb2KzPbGj5mVZKPSKM77bTo\n9bvv6ryGVrZyZWnsm9+sfx7FKm0x3AQ85u4zgMfC62JvAVe5+weBi4GlZjah4PUb3X1W+Ig5xE6k\necT9T6/zGlpTPg8/+MGx64GtL5KaiVSo0sIwDxhYwrMCmF98g7u/4O47wuf/D9gPtFf4c0VSKW7n\nVYAf/7j+uUiybroJ+gtOuP/4x5NZ5Ryn0sJwuru/HD7fB5w+2M1m1gGMA/5vQfjbYRfTrWZ2fIX5\niDS8xYth7NhorF1/KrWUfB6efDIaizvgKSlDFgYz+6mZPRvziEy+c3cHfJDvMxn4W+CL7j5wXPrN\nwAeAjwITgbK9rWaWM7NeM+vt6+sb+jcTaVDZLPzJn0Rj99+vqaut5KabwIs+LeMOeErKkIXB3S9y\n99+NeTwEvBJ+4A988O+P+x5m9h7gfwNfc/enC773yx44BNwFdAySR4+7Z9w9064/ryTlrroqel7D\nkSPxA5HSfOJaC9OmNcbYwoBKu5LWAgvD5wuBh4pvMLNxwIPASnd/oOi1gaJiBOMTz1aYj0gqZLNw\n223HupTcg4FIDUI3vyeeKI392Z/VPY1BVVoYFgOfNrMdwEXhNWaWMbM7wnsuAy4AvhAzLXW1mf0C\n+AUwCfiLCvMRSY1cLuhSGtgqo78frr1WXUrN7sILgzPAzYI9tBplJlIh8+KOrhTIZDLe29ubdBoi\nFcvn4bzzov3NF1wA//APyeUktdPTE8xAmzULJkwIikQ9F7OZ2RZ3zwx1X9tQN4hI7WSz8N73Rle/\n6pS35tTVdWzjxPXrYfny5Fc4l6MtMUQS9uEPR69PPFHdSc0m7ryF730vmVyGQ4VBJGHF6xpefDFY\n7KTi0Dzizlto5F58FQaRhGWzwfTFadOOxfr7tfNqM9m1qzT21a/WP4/hUmEQaQDZbHR7BICtW9Vq\naAb5fOnZzXPmNN5MpEIqDCIN4sorS2Of/3z985Dqyefh/PODIg/BFNUFC2DdumTzGooKg0iD6O4u\n3UNp714tekuzhQuDY1wHuMMHP5hcPsOlwiDSQD71qdLY9derSymNenpgx45ozCxYu9DoVBhEGsi6\nddBRtGNYf7/2UUqjO+8sjV15ZeOuXSikwiDSYDZuDFbGFtq2LZlcZHTyedi0KRo75xxYtSqZfEZK\nhUGkAY0bF71upL36ZWjFi9kApk6tfx6jpcIg0oCK9+Z/4w0NQqfJCy+Uxv7oj+qfx2ipMIg0oFwu\n2Etn5szgevt2uOYa6OxMNi8ZWmdnadffggWNvW6hmAqDSIPK5eDkk6Ox1avVcmhknZ3Bv6NC8+en\nZ2xhgAqDSAN7//tLY428+Vory+dLiwIE5y2kjQqDSAOL+1DZtk3rGhpR3JTiWbPSMT21WEWFwcwm\nmtmjZrYj/Hpqmfv6C05vW1sQP8vMNprZTjP7UXgMqIiEstlgrKHYZZfVPxcZ3NNPl8aWLat/HtVQ\naYvhJuAxd58BPBZex3nb3WeFj0sL4t3Are5+NvA6cHX820VaVy4H7e3R2J49wcEv0hi6uo7thzRg\n/vx0thag8sIwD1gRPl8BzB/uG83MgE8CD4zm/SKt5ItfLI0tXVr/PKRU3CE8ZukcWxhQaWE43d1f\nDp/vA04vc98JZtZrZk+b2cCH/2nAQXc/El7vAc4s94PMLBd+j96+vr4K0xZJl+7u0kVvhw+r1dAI\nFi4sjd14Y3pbCzCMwmBmPzWzZ2Me8wrvc3cHyp1JNC08gPpKYKmZ/fZIE3X3HnfPuHumvbhdLdIC\n4g52ufvuuqchBbq6SjfKmzAhKORpNmRhcPeL3P13Yx4PAa+Y2WSA8Ov+Mt9jb/h1F/AEcC7wGjDB\nzNrC26YAeyv+jUSaVHd36QZ7+/drXUNS4rqQIF0L2cqptCtpLTDQkFoIPFR8g5mdambHh88nAecB\n28IWxuPA5wZ7v4gcs3EjnHFGNHbLLcnk0uriZoadfXb6WwtQeWFYDHzazHYAF4XXmFnGzO4I7zkH\n6DWzZwgKwWJ3H1gw3gX8qZntJBhziNmoVkQK/f7vR69371arod7y+WBmWLFm2R7dgj/c0yWTyXhv\nb2/SaYgkIp+H884LTgMb0NERtCakPqZMCU7XK4699FIy+QyXmW0Jx3sHpZXPIimTzQazXgpt3qwZ\nSvWSz5cWBYD7769/LrWiwiCSQt3dwQKqAe7BQKi6lGrvpphlvB0d6Z6eWkyFQSSlFi2CMUX/B8cd\nJynV09UFGzZEY83YjafCIJJS2Sycf3409qtfqdVQS8X/bCdMaL6iACoMIqm2eDG0tR277usLDvRR\ncai+nh44eDAamzAhmVxqTYVBJMWy2aBrY8qUaPwb30gknaaVz8P115fGb765/rnUgwqDSMpls5Ap\nmoD48sswfXoi6TSlJUugvz8aW7SoOVY5x1FhEGkCcTt5vvgizJ1b/1yaTU8PrFkTjc2f3xwrnMtR\nYRBpAtlscOB8scceq38uzSSfh2uvjcbSvqX2cKgwiDSJVatKB0P7+4MD6mV0Vq6MrjAHOOec5lqz\nEEeFQaSJPPJIaWz1ap0RXU1f+UrSGdSeCoNIE8lmgwPoizXL5m711NkJ99xzbBHhmDHNPeBcSIVB\npMnEHUD/k59obcNITJ4ctLR+/Ws4ejQotv/4j8094FxIhUGkyWSzsHw5jB17LLZnjxa+Ddfs2bBv\nXzT2r//a/OMKhVQYRJpQLgdPPqmFbyPV1QWbNpXGL7mk/rkkSYVBpEmVW/imtQ3xenrij+o8+eRg\nxlcrqagwmNlEM3vUzHaEX0+NuecPzGxrweMdM5sfvna3mf2q4LWYYTMRGa24+fbr1+vshjjFZ1wM\nWL++vnk0gkpbDDcBj7n7DOCx8DrC3R9391nuPgv4JPAWUPiP+saB1919a4X5iEiBcrOUlizRFNZC\nnZ3BQHOx5ctba2xhQKWFYR6wIny+Apg/yL0AnwP+3t3fqvDnisgwxc1SgvjD7FtRPh/MQCq2YEFr\nTE2NU2lhON3dXw6f7wNOH+L+y4F7i2LfNrOfm9mtZnZ8uTeaWc7Mes2st6+vr4KURVpLNhvfpbRn\nj8YbIH5coaOj9cYVCg1ZGMzsp2b2bMxjXuF97u6Al/k2mNlk4EPAuoLwzcAHgI8CE4GyPZ/u3uPu\nGXfPtLe3D5W2iBTo7oY5c0rj69e39hTWnh546KFo7CMfac7Dd0aibagb3P2icq+Z2StmNtndXw4/\n+PcP8q0uAx5093cLvvdAa+OQmd0F/Pdh5i0iI7RuXTBHv3g65pe+BB/6UOv1pefzcN110b2QxoyB\n229PLqdGUWlX0lpgYfh8IfDQIPdeQVE3UlhMMDMjGJ94tsJ8RGQQGzfC+PHR2JEj8LGPtdZgdE9P\nsHX20aPR+KWXtl6BjFNpYVgMfNrMdgAXhdeYWcbM7hi4ycymA1OBfyh6/2oz+wXwC2AS8BcV5iMi\nQ7jhhvh43AllzainJ1gFvr+of2NgLyQB8+I9ZVMgk8l4b29v0mmIpNbMmbB9ezR20knw5pvJ5FNP\nkyeXbnlhBt//fvPPQjKzLe6eGeo+rXwWaUHbtsG0adHYb34TfGg282B0XFGA1igKI6HCINKidu8u\nXfy2b1/zbrZ32mnxRaGV1yuUo8Ig0sKWLQu6UYpde21zbZvR1QUHDpTGP/KR1l6vUI4Kg0gLy2bh\nyitL4+7Bwq9mKA7lNscDTU0tR4VBpMWtWhW/+A3grrvqm0u1DcxAKnbiifDUU5qaWo4Kg4iwbl2w\nDUSxvr7gTIc0rnEoVxQmTIDPqs3qAAAHPElEQVS33lJRGIwKg4gAweK3BQuOnXE8YO/e9C2AK1cU\nQAPNw6HCICL/btWq8v3uF16YjtlKnZ3li0JHR+uc21wJFQYRicjl4ruVDh8OPnBnz65/TsM1d278\nFtoQ/E6tvjnecKkwiEiJjRtLF8AN2LSp8YpDPg/nnlv+tLUFC1QURkKFQURi7d4d33KAoDi0tzfG\nuENPTzAGsrXM+Y/Ll2utwkipMIhIWRs3Bh+sx8ccofXqq8EHcpJrHQYbZD7ttGBKqgabR06FQUQG\nlcvB44+Xf33JkqBw1LtAzJ1bviiMHQsPP6wpqaOlwiAiQ8pmg5ZDOYcPBwWis7O2eeTzQReWWfnx\nhEmT4MknVRQqocIgIsOSywVdMyefXP6e1avhlFOq33oYGFz+2MeCLqxyOjqCRXkqCpWpqDCY2efN\n7DkzO2pmZff4NrOLzex5M9tpZjcVxM8ys41h/EdmNq6SfESktrJZeOONYJbP2LHx97z5ZtB6GDsW\nPvGJygaoe3rgve8dfHAZ4Oyzg6KlmUfVUWmL4VngPwEbyt1gZmOB24BLgJnAFWY2M3y5G7jV3c8G\nXgeurjAfEamDVauCI0HLzVqC4NjMDRuCD/XjjgsekyYNvkiuszMYNJ4+HdragjGEX/+6/P1jxgRF\nascOtRKqqaLC4O7b3f35IW7rAHa6+y53PwzcB8wLz3n+JPBAeN8KgnOfRSQlBmYtnXHG4PcdORI8\nXnst+LA3O/Y48USYOjV4vnp1sD32iy9Cf//g33PixOAeTUWtvnqMMZwJvFRwvSeMnQYcdPcjRXER\nSZFcDl5+OTgvua1t5O9/5x3Ys2f497e1BT/rtddG/rNkeIYsDGb2UzN7NuYxrx4JFuSRM7NeM+vt\n6+ur548WkWHo7oZ33w228I47/KdSbW1Bt9G772q/o1obsr67+0UV/oy9wNSC6ylh7DVggpm1ha2G\ngXi5PHqAHoBMJuMV5iQiNbJuXfC1qwuWLg0+yMeMGbprqNjYscHj859Xd1G91aMraTMwI5yBNA64\nHFjr7g48DnwuvG8h8FAd8hGROujuhkOHgkHoI0eCsYiJE0tbEyecEJz50NYG48fDzJnBvUeOBO9X\nUag/Cz6fR/lmsz8E/gZoBw4CW919rpm9H7jD3T8T3vcZYCkwFvihu387jP8WwWD0ROBfgE53PzTU\nz81kMt7b2zvqvEVEWpGZbXH3sksL/v2+SgpDUlQYRERGbriFQSufRUQkQoVBREQiVBhERCRChUFE\nRCJUGEREJCKVs5LMrA94cZRvnwQMsnFvw0t7/pD+3yHt+UP6f4e05w/J/A7T3L19qJtSWRgqYWa9\nw5mu1ajSnj+k/3dIe/6Q/t8h7flDY/8O6koSEZEIFQYREYloxcIwyDEhqZD2/CH9v0Pa84f0/w5p\nzx8a+HdouTEGEREZXCu2GEREZBAtUxjM7GIze97MdprZTUnnM1Jm9kMz229mzyady2iY2VQze9zM\ntpnZc2b2laRzGikzO8HMNpnZM+Hv8M2kcxoNMxtrZv9iZj9JOpfRMLPdZvYLM9tqZqnbTdPMJpjZ\nA2b2SzPbbmYNd1p1S3QlmdlY4AXg0wRHiG4GrnD3bYkmNgJmdgHwJrDS3X836XxGyswmA5Pd/Z/N\n7BRgCzA/Zf8ODDjJ3d80s+OAfwS+4u5PJ5zaiJjZnwIZ4D3u/tmk8xkpM9sNZNw9lesYzGwF8KS7\n3xGeUTPe3Q8mnVehVmkxdAA73X2Xux8mOAOirkeTVsrdNwAHks5jtNz9ZXf/5/D5G8B2UnbGtwfe\nDC+PCx+p+svKzKYA/xG4I+lcWpGZvRe4ALgTwN0PN1pRgNYpDGcCLxVc7yFlH0rNxMymA+cCG5PN\nZOTCbpitwH7gUXdP2++wFFgEHE06kQo4sN7MtphZLulkRugsoA+4K+zOu8PMTko6qWKtUhikQZjZ\nycCPga+6+6+Tzmek3L3f3WcRnFHeYWap6dYzs88C+919S9K5VOh8d/894BLgS2E3a1q0Ab8H3O7u\n5wK/ARpuzLNVCsNeYGrB9ZQwJnUU9sv/GFjt7n+XdD6VCJv/jwMXJ53LCJwHXBr20d8HfNLMUnei\nsrvvDb/uBx4k6CpOiz3AnoKW5gMEhaKhtEph2AzMMLOzwsGey4G1CefUUsKB2zuB7e7+10nnMxpm\n1m5mE8LnJxJMZvhlslkNn7vf7O5T3H06wf8DP3P3zoTTGhEzOymcvEDYBTMHSM1MPXffB7xkZr8T\nhj4FNNwEjLakE6gHdz9iZjcA64CxwA/d/bmE0xoRM7sXuBCYZGZ7gK+7+53JZjUi5wH/BfhF2EcP\n8Gfu/kiCOY3UZGBFOMttDHC/u6dyymeKnQ48GPydQRtwj7v/n2RTGrH/CqwO/0jdBXwx4XxKtMR0\nVRERGb5W6UoSEZFhUmEQEZEIFQYREYlQYRARkQgVBhERiVBhEBGRCBUGERGJUGEQEZGI/w/w1xWP\nb+vxVQAAAABJRU5ErkJggg==\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
@@ -191,7 +187,7 @@
"metadata": {
"id": "i0FJe3Y-Gkac",
"colab_type": "code",
- "outputId": "60b19cdd-c69c-469e-9446-b738a79c1f51",
+ "outputId": "481dad2e-1bfe-427c-a9ef-c345a821dfb9",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 269
@@ -210,7 +206,7 @@
{
"output_type": "display_data",
"data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAD8CAYAAACfF6SlAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi40LCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcv7US4rQAAIABJREFUeJztnX+YVOV597/3mdkdeNNa0tGWKFIS\njUlsuMIKEqc2uqlEg41K3bfVxPddC8j6A4jEq1Jtk5S3MZIQo5ugIqvAyzaaNC0JQoJFMW6kYRoE\nwdKgxh9NEH9Usr7UpGGX3Znn/ePeu89zzpyzO7MzuzNz5v5c116zM/OcmTP74/vc5/5JxhgoiqIo\njYVX7RNQFEVRxh8Vf0VRlAZExV9RFKUBUfFXFEVpQFT8FUVRGhAVf0VRlAZExV9RFKUBUfFXFEVp\nQFT8FUVRGpBktU8gihNPPNFMmzat2qehKIpSV+zdu/cXxpiTRlpXs+I/bdo07Nmzp9qnoSiKUlcQ\n0c+LWaduH0VRlAZExV9RFKUBUfFXFEVpQFT8FUVRGhAVf0VRlAZExV9RFKUBUfFvQLJZYOVKvlUU\npTGp2Tx/ZWzo6gKWLAFyOSCVAh5/HMhkijs2mwV6eoDW1uKPURSlNlHxbyCyWWDxYmBwkO/397OY\nFyPk2SxwwQXA8eNAc3Npm4aiKLWHun0aiJ4eIJ+394nYig8S5hbq6WHhz+X4tqdnbM9VUZSxRS3/\nmCIumnQa6O1lkW9tBZJJFm8AMAY4cMBvwUe5hVpb2eIXyz9s01AUpX5Q8Y8h4qLp72dL3/OskC9Y\nAKxdy8KfzwM33MDH9PbyRhHlFspk+Hj1+StKPFDxrzOKCbqKi0ZcPPk83+/u5vtELP4AW/iyAXge\n3xcSCb+FL5uAoij1j4p/HVFM0DWbBQ4dYveOWPeex0K+YQNb9SL8ggi+MbxOjrn7bhV7RYkrKv51\nRFjQ1RVnd3NIJICODqClhV06hw4B99/vt+xdxDXU2WljBCr8ihJfVPxriJFcOlFBVznu0CG7OQDA\n1KnA9On8XEuLPTaR4CsAcQsRAbNmASefDDz4IHDkCLB7NzB3rm4EihJXyAR9ADXCrFmzTCMNcyk2\njz6YxZNOA8uW8XHi6snl+DU6O+1ziQRw8cX8GpMnAyecANx5J28ATU18OzAQfm6pFPDEE7oBKEo9\nQER7jTGzRlqnln+NMJJLR5DHZKOQIK1Y8YsWscWfTgObNgF9fXZD2LyZ1xDxRnHTTcCkSWzly3Nh\n9PcDq1YBs2fbqwCt9lWU+kbFv0YoJo8+zL1jDG8ARHxcezuvveACK/xBjGEr/847gXvuAb7//ZHP\nb+tW/mpuBpYutVcNbi2AbgiKUj+o+NcIYXn0rpgC/mBucug3J+4d1ze/ciWvG8mjl88D69ZFu3sA\n3lSIbByhvx+44w57pSG1AO75FeO20g1CUapLRcSfiNYD+ASAN40xHwx5ngB8DcDFAH4N4M+NMU9X\n4r3jhJtHH4wBXH21P5jrundc4ZdUz0TCpnqKgBvj3xCSSWDv3ujz8TwOBO/b528L4b4GEb9fd7ff\nbdXdXSjy2h9IUWqHSln+/xfA3QC6I56fC+C9Q18fBrBm6FYJIZsFVqywFbrSjsF1C7nuHXnMDfAm\nk8Cll7JLZ3CQNwOx4JNJYP584I03wn39slmkUsBZZwFu3F0CxLkcr/E8TiH1PP8GITUFrshHxTX0\nakBRxp+KiL8x5kkimjbMkssAdBtOLfoXIppERO8yxrxeifePE2GtGUTs29v9IrlypV3X388BXvfq\n4Ne/5ufkCuCSS/ixtjauAbj++vBzkDjC0qXA22+zyLvk83w8YGsH3PoBCUAbw+e1YgV/tbbaIjKp\nHtarAUWpDuPV1fMUAK849w8PPeaDiDqIaA8R7Tly5Mg4nVpt4bZm8Dxgzhy26MWvfuutVhyPHvW3\ncJgxgwU0keDbtjZ7P5kEtm1jcV22jEW3vZ2t+zDyeQ7qilXvbgC5HPD001w7kBzBfMjngUcfZYE/\ncMC+Ti7H97VbqKJUh5oK+BpjugB0AZznX+XTqQrBrJ+2Nr8rZ/58Fu0DBzjwKngep20+/rjt4TN9\nug0iuxW+fX02dfPGG60A/+u/2s3E82yDN2OAyy7jbB+x8HfvBp55hgvBhksTFfr6+MpkYMCmni5e\nzNlGwSwndQMpytgzXuL/KoBTnftThh5TAgSzfnp6rGsnl+OOnOvX+3P7AX8Tto0bWUw3buTXuvVW\nFtQNG2x66ObNhaItcYLJk4Ff/pKrfQF+nzPO4CDzfffZ9ceP89qJE/kc3YByWKbRjBnAjh32fj7P\nwWr38wLqBlKU8WC83D5bALQTcw6A/1R/fzSZjHXvpNOFmTYDA4U9ev7sz1hA3awb8bdns/xa8+cX\n+u9dcjm+Glizhls8uPT0cIDYxRv66+nsBG67jTeGiRN5I0ok/GtPPx342tf8nyWVstb9rbfyYxLo\nVjeQoowtlUr1/CaAVgAnEtFhAH8DoAkAjDH3AdgGTvN8EZzqOb8S79sI9Pb6WzAT2Wwb2QCIgL//\ne9uV0+3ouWMH8MMfsvC3tAATJgDHjoW/lzG82QBspT/6qH1u717gqaf86+VKZMIEDg5v3gycfTZw\n5pnsmtq8GXjoIeD114EXXvAf63m8aUi2T3c3X9FIzyEJdOvQGEUZGyqV7fPJEZ43ABZX4r0aBfF7\nHz3qd6F85CPAVVcBjzzCPngRfDfDZtZQV489e2wmkIh0Zyfn7a9bx0JL5Pfz9/by92+/7T+fqG6g\nxtgYgrBrF4v/l7/McYjPfjb82N5em+3jViNLoHvFCk0FVZSxoqYCvoq1giVP3gs45n70I14zOMgW\n/sKFbNEvW2ZjA3v28NVBMmkDrCLS+/bZGgHAf6znseWfzbIVHobnFRaLBRkc5M8gbquwtYkEP9fd\n7Rd+qS9whV9jAIoyBhhjavJr5syZptHYtcuYiRONIRJ5Ncbz+MtKrv9r9mw+btcuYy680K5NJIy5\n7jr+SiTs+mTSmFSKH5s4kY9bu9aYpiY+trmZXzPqPRMJY5Yv59ch4turripcd9119vMEnyPi15f3\nlMebmuxxwu232/NPJPi+oijRANhjitDY8Qr4KkUgKZeuFZxIsAskit27gY9+lC3otja2miXPv72d\nLXvX8pZAqqR8dnez+yWft9XEu3fbthBBjGFXzpNPAl/8It9+4xvA8uV2TTJpC9KkOlmQKwdpIe0G\ngFta+DjXspfUV/lMGgNQlMqg4l9DuELX1GRz7d30yDDEp79sGfv0v/AFdo8AnEvvCqwbPDaGff/p\nNL+vK/aeB3zsY/y68+bxOcm0r3S60Ac/bx4/JxtW8PM0N/Oa4bKN9uxhF082ax+T1Ff5TOryUZTK\noD7/KuMOZ9m3D7joIs6dB4CurpH964IxbGVLk7fubq7CdQO1nsd5/A8/bF9zcNDm2ruxhuZm63fv\n6PCfpxSdBfv2yHzgwUG+f+uthTULDz/sP2+36ZxceQRnGejgeEWpPCr+Y8xwmSrBPj5CUxMPT5c+\nOGGIBS2tF4xhMU6n+b1cd4s0YLvkEq7I3bbN3yxOzi2TKewfJMjz0i5a3EerVnG/IGktEZxHEBTu\n5mb+vAB/vkWLbNB5uFkGiqJUFhX/MSSYqRLsu+/28XEZGOCrgLvvtm6bRKKw734yyWtkTq9Y1+46\nIs6937+fU0O3bwdWr+bXBwp97CNZ2W77Cc+zVcKPPsp+/0mThk/JnDsX2LKFN6tk0g6Y18HxijK+\nqPiPIW7Tsv5+4IYbbEtkEWuxhMMs/OnTgWuu4e9bWoBPf9pazeJe2beP3TIimAcOFPbsP+ssLtIS\na723l6t4R4PbfuIb3wAOHrTPPfQQ8O1v8/crV/pnDQCF+fwDA8CSJXxensd9fnQAjKKMDyr+Y4hr\nJQPW/378ODdKO/dcroz96lf9xzU1sah+5CMslKlUYdYOwPfXr/db7729trc+EdcBtLfbfj+VcKvI\ne/3d3/kfP3wYOP98mzkkraFTKTuMxs1k8jx7lZLP2xbT0i4a4LiHXP24IyMVRSkPFf8xxLWSg0PS\njxyJ7oaZy3EKpSC9+sOqbHM5f4C0tZVFUoS+pYWfl8reKEqxrkWQw84n6JoKDqORK5f3vx+YMsXf\nQiK4AWSzfGUg3UWPHbMZTboBKEqZFFMMUI2vuBV57drFxVVRxVrDfTU1cSFWMllYLCWFWsH3uv12\nPmbiRC6OSqW4gMst7nLXy7qw13NZu9ZfmEXERWGplL84zS1Sc4vJ3IKzqC+3QC2s2Ky5efhzVJRG\nBlrkVXvMnQtMm1baMZ7HQd2ODvbdu5x2WrgbRLpk9vb6M3OkG2iwW2axA1Wy2cK6gUSCLfEnngCu\nvdbWC3ge9yC67TZ7jlJMNhLSMG7DBo5ZBGsDBga026eilIu6fcYI140CcBWuuDw8j90ezc2chSNM\nngyccw5nw4hIXnopB35XruTX2r3brr/5Zr/wB103bsxBOn3mcoV+/+AAmWBMQF730CG/eAeDtJkM\nu5kkiPud7/Bm4bqkPC+6SVywAG1wkFNBg7OG3dkFiqKMDhX/EinGNx5M8bzoIiv8gBXQadP84v+L\nXxTm4W/ZwkPY83l+reXL+RiZwztS8VVwUEpUDr+7LrihyGdJJvlLGs7dc48/OAsUtopw4xGZDHDv\nvZz1FLYBGGMrieXzSt2BuyFec436/BWlXFT8SyCswyRQKJrBFM+tWwtf6+BBf5okwOt7e4EFC9jt\nIVWvInrHj3Me/fbthecjFnVQdIN5+1GiGZXf734WgC3xqVNtGqcMihFGuoro6OArmYULgWefLXw/\nY/i5qVPtsYcOcQaUVB67XUkVRRkdKv4lEPSNd3fbFEp3vq4rgG6//JFw3Rkyb1cgKpxxe+iQPR9J\nq3TXVYKgmIvwRrVZHu4qArAtq198Mfz9PM+mrgY3t5kzeWNQq19RykfFvwSCQghY8ZUgpczNFQE8\nehS4666Re/QkEtZ/ns36g5xNTTZfH7CCKFO7gPAK4koQJubBFg/F9uIJG9wiwWFx+dx9d/gVVC7H\nk8QOHOArB90AFKU8VPxLIMyHvnGjFTNpriZNzQAWO6lglcKnILNns3ADLKyHDvnFceFCW5HrCi9g\n3TBjWf0aFPORXDthZLN2Pq/72WS6WNimlU77f27uz1fFX1HKQ8W/RIJCKN0w168vzKRxp1SJdRtE\nLHbAH1iVtshBH3eYG2a8hXAk106QYAM7z+PPuGBB9PlnsxzAlo3TDQIfPcpB9GDQW9s/KErxqPiX\nSVBsXH/1+vVW8IN+f2mvPHeu9d+LOBrDohZm0ZcqvGNFKW2W3QZ2RDxj2K3S7eriCmYR8+AxiYQN\nAh89aucFP/oo8OCDwI9/bIPB2v5BUYpDxb9MghlAYqVLf/swiIC/+AsebiLHuoHhfJ7z5YNplEK9\n9bdvbWVLXwLT+/ez715iIq6YA/y502kbD3CvcC66yP/abhsMdQkpSvGo+I8C183gunbc6tjdu4cP\n8K5eDbz9tvXfuwFeIraE4xLYzGQ4E0rSVwcHufgrLAi+bh1/7mXLrNXvXiW0tfn7AQmyUaTTY/95\nFCUOqPiXSLDoyQ3iJhLhw1SEU04BXn+9sNmZZO4Q2bm2O3YAO3fGx43hdhZ1axKCrRuefpo3VNdN\n1Ntrn5eroXXruFGdmw6by3GX1H37qhMLUZR6Qnv7lEgw11+6WBJxALO3t7CzJcAi//nPFw5Yl/m0\nPT3cH2fOHBvcHK7PTr3hzuK9+277c5gwATjvPLtONtLhhrZ3dHAM4OST/YVw8jNbu7ZwFrCiKH7U\n8i+RqAEsTU3W39/UZC17CewuX84C6E7dksCwkMlwOqRM44pbDxs3VuH+HIDCuEnUOEmAA8TXXhv9\nPpoSqigjo+JfImLBLlvmb7J2+ukcxOzt5efuuMNao9u2sfjL8SJIUe0ixBUSdInEiaiU2SASGHZ7\nE4XNQSACPvQhO8lMZwEryvCo+I+CTIbbK7vif/AgW6MSeHSvCqQFcdAKjWqlPDhoA6ONZL1KTGDD\nBvv53boAIttULohkEREVBokVRSlEff6jpL3dtnhwkbbJrkAlk5zHH/RBiwvJ9W2HPdYIhMVS3NTX\ngQF2tUmrh6i5CJJB5AaJFUUpRC3/UZLJsGBJde/AgD9t8f3vB844g7/fto0btUnfn5GaoNVCEdd4\nEzZ7wLX83SupfJ67m4YhdQGNsmkqymhR8S8D8Vu3t/MmsG6dddk89xzw7//Og8vFWi22CVq9FXFV\ngqjZA+k01zw89ph/cw276rrwQr5ta2u8n5+ilIqK/xCl9IcJrnU3gRUrOEc/n2c3xdNP+/v0qEUa\nTdTsAckMkgyqpiZO8wxeETz6KD+2c6e/QE7aSAOa/68o/00xg36r8TWeA9xLHWDe1MSDxVMpY+bN\n40Hjcoy8lgwzJ+J17hqldHbt4p/hvHl2EH3UMPhEggfYy3GplA5/VxoHjOcAdyL6OBE9T0QvEtEt\nIc//OREdIaL9Q1/XVOJ9K0WpA8wlGNnfz2mH993HM3plqpV06QRYcgYGuCmZWpyjJ5PhttazZ1s3\nWtgoSPH5p9Pc/lqqhQUd/q4oTNluHyJKALgHwMcAHAbwFBFtMcYEhhTi740xS8p9v7FguP70rssA\niJ7K1d/P63p6Cvv6EKm7p1IEe/wDfH/OHPb19/bymk9/2j93WKqum5r0d6EoQGV8/rMBvGiMeRkA\niOhbAC4DEBT/miUq6yab9ffpkfRNovCmbevWhW8Ol1yiVn8lyGa5d8/AgM3nB3jDXrHC/oyvv543\nY4DXzpvH37/2mo6BVBShEuJ/CoBXnPuHAXw4ZF0bEZ0H4KcAPmOMeSW4gIg6AHQAwNSpUytwasUT\nlmEjbRYEEfaowSyy1vNYmGT4iFT3KuXhunCkp89ll9mZCFJhfTBgdrz1Fo+AlAD8Sy9xqmgjpdIq\nSpDxyvbZCuCbxph+IroWwEYAfxRcZIzpAtAFALNmzRqmIfL40Nrq79MjDDeQnYiblslownTa+phV\naEpjpAwsY4CtW4FHHuHfkTG2wtqlr8/2YsrneX6A5/HvKS5dUxWlVCoR8H0VwKnO/SlDj/03xphe\nY8zQhTgeADCzAu875kgh1wc+UPwxnsfC39HBorVsGfC5z2mXyVKRvkfuz6693bp6BOnkKVdickUg\nLbKbm9nVE9wQ4tY1VVFKpRLi/xSA9xLRu4moGcCVALa4C4joXc7dSwE8W4H3HRcyGfblp1J8P5Gw\nxUTCeef5m7BJa4Fis4iUQsJ+dpkMD6x38bzCBnhEwL33Al/8Ih/X0QHcdJN/rVYCK41O2W4fY8wg\nES0BsB1AAsB6Y8xPiOhvwfmmWwB8moguBTAI4C0Af17u+1aaKBeDPP71r/OQkDfeAL73Pft8IgFc\ndRX7lIPZQsNlESnDE/Wzk6Ew/f0s5Jdcwj59d5yjMXagC8AB4PXr+ftEArjiCuDIEWDGDL9LTgfB\nK40EmeFmDVaRWbNmmT179ozLe4W1VhYxcKd2Sc5+MI3zi19kwQj26Zf2BL29KiijYbgNWXoq5XI2\nldONxYjLJ/g7k2C8TBIT339nJ7vogn8DilJvENFeY8yskdZpewewwEhA8Ngx4JprgAce8LseRFiC\ne6VYpSP16VchKZ2oHkcSi3ELvf7wDwutfzcW4D4ezODq62PXXpibSVHiirZ0BlvnrtV48CBw/vn8\nuNteuamJv0+lOHf8uut49GKxffqVyhFsff3bv124JuyiNuqxffv4tRqtlbbSuKjlj/De7wMD/Hhn\nJ3eVbGsrHMEYhfr6x55gYV5wCtiUKcDhw9HHn3468MEPAg8/bDOEFi3iNhzqolMaARV/sIUfrNpt\nauLHly1jl9Djj3NwUWbxDkdUxbBSWVy30IEDtrCuqYlTRJcu9Vdnu1d3N9/Mm/n27fz79TygpYUz\ngxSlEWg48Q8GEbu6gBtu8Au/5wF3382Wf1+ffW7zZi4oCnP1BGnEnvzVIpvlTdoYDv6uXs0i/tJL\nwFe+Yh9ftoxHPba1WZHv7ORmfbkcPz99Oj+uG7cSdxpK/Lu6gCVL+B9dMjzkvov4gN94o9BH3N/P\nIqEzYmsHibHk83wF19vLG8Kdd9rf3+Agt3TYvt1/bG+vdfscP87uI5klrMF6Jc40TMA3rB3zpk0s\nCkGSSc7+2Lw5/LV277YtnJXqEzb3uKfH7+ZJJMJjL8FjAQ3WK41Bw1j+3d1+oU8kuMjn0UftY1Om\n8O2JJ7J7wCUYE9B0wNohKsaSSll//t1382MrV/rHRLa2Fo6PdC1/DdYrcaUhxD+btRWeAAu/+PQl\nEEhks0PCskSi8vuV2iBsBGRQ1KX2wp0H0NQEzJ/vH++owXqlEWgIt48UBAEs8osW2cZrqVRh068o\nPI8nSUXl9yu1QzCw393Nwftcjl1/UrjX3w+sXetvvJfJALfeyt+vXKnuPSWeNITlH8y7P+EE4KKL\n2O1z9tk88HukLhduGwAV/dpGKqzF5XPFFcC3vhX9O5ZqYJnE1trKqaNucoAGfpW40RDi77oAjh7l\nfu6A398fBREPDJk9W90A9YLbriOfBx58MHotkZ0KtmEDx4WSSb6Vq8X+fo3vKPGjIcQfsD7hiy4q\n/hgiYMKE4gq7lNqhtbWwqMtF2jobYwfAnHYa8PzzfEww9TcqU0hR6pmG8Pm7zJhR3LpkErj2Wr3c\nr0cyGeCeeziYG4zneB7/bl3yeeC558I3C0kO0L8BJW40jOUvTJpk0zaJeErXb/wG+3zd9M5PfAJY\ns6Z656mUR0eH7cUkbbXdW7f1AxAdD5DkAO31r8SNWPfzD/uHddstJxL8Tz84GJ7KqX7e+JLNcuxn\nyxZr8Tc12b8Huf/DH/L32qJbqReK7ecfW7dP2AxY4aKLgDPOAE4+uXA4i5DLaXVnXBGj4K23rPAT\n8axfGfcI2Ftt0a3Ekdi6faL+YVtb/Zf7Yeh81/giRoHbsA/gOMAJJ9hGcAAbBnLl6KYKp9O2Uliv\nAJR6JZbin80Chw7ZwJ7ncZ+e3bv9U5xckkme4NXSomMX44wYBcGrvdNPB7761cIRnek0H9PZaeMF\nOu5RiQOxE/+gT//cc3m83+7d0cfMnq3FW42CWPFSByCcdBJn/Lice26h0IddUerfjVKPxM7n7/5z\nDg4C//ZvIx+zcGHhP3A2q6X9cUQK/m67DbjqKr4qJAJ+/GN/CmhTE3DmmYVCH9ZBVFHqkdhZ/kHL\n7q23Rj4mOMZRB7DHGyn4W7mShT+fZ0Nh0SK7pr2dbzdutG0i0mmd0qbEh9hZ/vLPOWdOeIHPvHn+\nx8OsN/fqoa+vcD6sEg+CVnx7O9d2rFljRf3ss23657JlPBBIhV+JA7HN889meeBKf799LJXibpyA\nFXS3la97rJsVJMfpP3v8iCreCvv7AdgdlM/rFaEydpRbUFhsnn/s3D5CJsN92teutdW88+fbH+Zw\nP9RMBliwwB47OKiBvbgSNWtZrv6CSEGgBnuVsWA8Xc6xc/u4tLTY1D1jOI+7WNrbuambBvYaE2kO\nFySZ1L8JZexw506MdUFhrMW/t9d2cASAu+4qPntHYgdf+IJe3seV4TK6Mhng3nsL40Of+QxbZkuX\n8j+mZoMplUImDorBmkyOrYERW7cPwD+4RML2asnlgBUr+KsYMY9yCSj1T/DyWoq4XD+rNIeT+FBL\nCwd9+/p4FgQRxwAWLAiPHSlKKQQnDrpu6rEg1pa/tPZNJPh+Pg/s2FHY60dpPNyMrv5+ntr1uc9x\nkPf66/0jHSUDqLeX17quxOPHC8dAKspoEGNVjApJNx4rYi3+AFtu7qV7Pq/NuRR/mqfn8SYgG0GY\nmEvLENeNKLgBYEUpB/n7Cvs7qzSxdPu4qVLd3f5+PkTanEvxF2tJvx5p9hbM5nFdRMF/ymSS12sA\nWCmXnh6bTTYeGYaxE/+gL/fss/3Pv//9/I++dClvCk1NmrLXqLgxHfHtr1/PVwAi5tksx4iCvYAA\nvmL4xCeAX/8aaGvTvyGlPILdY8famKiI+BPRxwF8DUACwAPGmC8Fnk8B6AYwE0AvgCuMMT+rxHsH\ncYd39/UBL7/sf/7884F9+2wO9/Hj/E+v/7iNjWwE7e32qvHAAWDxYt4MomohH3mErbSdO3kDOXAA\n2LSJx4VOmqRXlsrIdHXx30xbG1+NjldHgbLFn4gSAO4B8DEAhwE8RURbjDEHnWULAfw/Y8zpRHQl\ngC8DuKLc9w4jnbYWmjHA4cP+51taWPwVxcV1Fd56K99fssRmioWRz/PVo8SRVq3i1uGAZgMpxdHV\nxbPCAf6bWb6c+0kdP863tV7kNRvAi8aYl40xxwF8C8BlgTWXAdg49P0/AriAaGxCGsHcfhfP4+fb\n27llAxHfjnVUXaltwqa+dXcPL/xCImGLvl57zf+cZgMpI7Fpk//+hg3AsWPjU+RVCbfPKQBece4f\nBvDhqDXGmEEi+k8AaQC/cBcRUQeADgCYOnXqqE4mmNtvX5uFXi7Dn3hCG3QpTLBHv/j+g64ezwNO\nPZWzfozhYO9nPmPdOwcOhM+N0HYQShjZLLumXY4csd97XgMVeRljugB0AdzYbTSvIbn911/vn8/6\nsY/5i7u0gEsRgoE2wBbbAPz3I8bDN7/Jrp077uA1q1fbS3P5exKf/9tvsyU3OKjZQIqfYPPIMFpa\naj/b51UApzr3pww9FrbmMBElAfwWOPA7JnR08O2SJfwPmkoVX9WrNB7BHv2A7eMvBkQiwVXAAHDn\nnfbx/n5/Smhvr/9vzQ0g699f4xHVobOnJ3qkrLBw4RieGCoj/k8BeC8RvRss8lcC+FRgzRYAVwPI\nAvifAH5gxriXtJTm6z+eUgzBK8HHH2cR37GDhT6fZ2Hv6fGnfBKxG6irK3y2r15hNi7DdehsbeVk\nANfyb27mv6H9+znzR4zYsaJs8R/y4S8BsB2c6rneGPMTIvpbAHuMMVsArAPwd0T0IoC3wBvEmKP/\neMpoyWRY/HfuLMy7TqVsn39jWPg9z24S6t9XgOHnPWcy7DJct467B5955vhnhFXE52+M2QZgW+Cx\nzzvf9wH400q8l6KMF1EjGyUV6Q70AAAfCUlEQVQX+/77bWwgn+cNwPPUv68wwVhSOs2xSID9+W6h\n6Ze+ZF2H4+WtqKmAr6LUGu7Vo/uPOXVqYTaQZABJbCCsfch4/nMr1SXYQmTpUuvmkStFwNaITJ7s\nrzAf61byKv6KUgRB/+3SpfwP7Hb4NIb/cfftC/f/j+eUJqU2EONh5Up/gDfYKmTLFv9j4+E6VPFX\nlCIItoC+6y7r6hHhB/ixgwft2r4+tupmz+bAcJQPWIk3YQFegahwMxgP12HsWzorSiVwW0ATce6+\nBHiD/7g/+pFtI24M1wV89rN8Sa9jIOPLSJPhenqAefMKOxCceGLh+qVLx94wUPFXlCIQ/+2iRXzf\ndfcEMQY4/XT/Y/k8W/zz5+to0DgS1iIkSCbDV4BBLryw8LH9+yt/jkFi6fbRoJoyFmQynOXjVv8G\nIWKr//nn/Y9LFpA2eIsPrs4Ml9bprk2n/e1nPA/4zd/0B4ABzvMfa2In/hpUU6rJyScDr7/uby1y\n2WVs8aXTvHl0d+smUO+EzYCO6sUfXHvFFcBDD/EVoucBTz/tf+0zz+QC1bEmduI/0g6sKOXQ3s6+\n+4EBO/7R5bXX/K6gZJLb9AL+Xi4bNnBzQf3brE+COtPbG14TElzb38/9oeRvZHAQeOopvk/Et889\nx5uFpnqWyHhPw1EaCwncueMf3WpfV/gTCeCP/5i/D/ZyUcOkvgnTmaiOAu5aYwoTBOQK4D3v4eFT\n41UlHruArwTmNKimjBWZDA986ejgv7HbbgPWrOEyfcnkmTePrf6tW9mKS6c51U9Qw6S+KUVnZO0l\nlxQKP8DCn0oBN9/Mt+OVDRY7yx/Qnj7K+OH+rb30EvCd7wCXX849/rdutbn++/axJScj+tTnX/+U\nqjPf+17hYzNmAL/zO7aR23g2o4yl+CvKeNPVxcVcAN8uX86Wv8z/Xb+eBX/NGnuMZqU1DsFusABb\n+M8+y0OAZAb0eBqusXP7KEo1CI7j27+fc/qloGdw0D+Sr5i8cKV+CRZ8tbayMSB4HruBBgZsIHgs\nRzaGoZa/olSAtjYewO3eB/xtH9Jp+7xmpcWXsDTQ3l7g4ouBhx+2mT2AvRoI/n2MByr+ilIBZPDG\nunWc6y++Wyne8TwWALfYR7PS4kkwtVPaOCeT/LuWsZ6TJxf+fYwnKv6KUiGmT2f/7d69wPbtbPGl\nUv5+7mIRJpPA3LksABr8jRfptG345/r5BwfZSJg6ldc88oitCE+lxt8AUPFXlAoRVfgjGT779tnn\nczl2AUyYwOKv1D/ZrH/IT7CBWz4PnHACd3f9m7+xdR8yH3q8DQAVf0WpEBLUy+f5Np3mzJ+tW+2g\nF3leCsLcAfBKfSKiv369v2WzW7Ur3HUXXwG4j8l86PFGxV9RKogb4F282DbwAvj7WbPY2n/ySbtu\nvAN9SuWQ4G5fX3iH16lTueVHLmfbgQTXNTVVJ+aj4q8oFaKnx/5zu60chHwe2LOHRUAsQiJ2Byn1\nibj6RNCDlv5f/ZUN/h89ypY/wIJf7ZiPir+iVAjp4dLfX1jQ46b2BSeArVunQd9ao9gCvKCrb+FC\n9uvv32+rdoULLrBXAF//uv+5aqDirygVQnq4rFgB7NhhN4AzzwRuvNE/wNu1DgcGbFBY2z9Un1Lb\nwrtWf9TvTa4Q8nleVw0ffxCt8FWUCpLJsPi7TdxefJEv/RcssFcAQb/vk08C558P3Hcff7W2atVv\ntQgrwBturbj6crnwtdksZ/jU2ghPFX9FqTCZjL+1g4hCezsHe72Q/7pnn/XHCQYGxr/cX2Hcec0i\n1FHzecPWushVxP338waxaFHtdBtWt4+ijAHt7cDGjYX93sUt9Nhjfuu/VjJAFPt7Ep8/EO0GCq4N\nirp7FQFw9k8tCD+g4q8oY0KUKIhb6Ac/8KeBErGwVDsDRGHc7prXXw8cO8bfHzvGcZlMxt+qA+Dq\nbrkvGVwtLbXbxkPFX1HGiKj2vJkMcNNNwB138P1kkuMBKvi1RzYLPPCA/7F161jUZYqbm9kVTPVs\nbuZ1kv1TS79f9fkryjiTzXI5v2R+rF5t+/yH+ZWj/M3K2CMBXZfBQd4A+vrCRzK6HD/Ouf2PP86b\nQC39DtXyV5RxprvbpnzmciwkgK0ITiY5+0dcCxdcwBam5wH33FP9/PBGorXVVuYKngc8/XR4RW8Q\nIj52vObyloKKv6JUmd27ufJXrMjBQeCWW4CPf5xTBMW1kM8DS5bYiU9KdZg8GXj9dXvfdfUE3T6f\n+hSP9lSfv6IoaG9na99N7Qy6D3bu5C8i/3OSNqriPz709BRa+K++6r/v1m64az0P+P3f5yu6WhzX\nqeKvKONMJsNtAO67zz4mQz0EEZGgmEjfd53/Oz60tvLPvL+f7wc3aSHMNSS/q/Gcy1sKZQV8iei3\niegxInph6PadEetyRLR/6GtLOe+pKHGgvR2YOJFFIpnkgO/atcCUKeHriYA5czhwCOj83/FCUnZv\nu41/R2EFevk8u+IEz7O/q1oUfaHcbJ9bADxujHkvgMeH7odxzBgzY+jr0jLfU1HqHhGVSy8FzjqL\nH5s+HXjzzfD1nmdTBbu7OdOkmPYDSvlkMmzB9/YCn/xk+JpnnrHfex7XctSy8APlu30uA9A69P1G\nAD0A/rLM11SUhuDAAWDzZv5+927gvPMK0woFYzhV8KWXbKsAgK8aaimIGEe6ujjQnstx5XXQRQcM\nX61dq5Rr+f+uMUbi3m8A+N2IdROIaA8R/QsRzYt6MSLqGFq358iRI2WemqLUNps2+e9LgDeMfJ79\nznfc4d8g5s+vfQuzXgirp+jq4grfgQGbrulm9pxySuHvzJj6uBob0fInoh0AJoc89dfuHWOMIaKo\nPe/3jDGvEtF7APyAiA4YY14KLjLGdAHoAoBZs2bVyf6pKKOjrQ149FF73xjgne8EwuwezysMKiYS\nXGm6cqUGfsslrI0zwBZ/sII3meTfQ3Mz8PnP8xWZTPKq1jD20TCi+Btj5kQ9R0T/QUTvMsa8TkTv\nAhDqsTTGvDp0+zIR9QBoAVAg/orSSHR0sBvnjjuswIQJvwR729qAT3/aZp7kcsANN/D3UX3n454V\nVKnPF2zj3N0NvPyyv/8SwOK+ejX7/9Npvu3s9N+vm5+1MWbUXwC+AuCWoe9vAbAqZM07AaSGvj8R\nwAsAzhzptWfOnGkUpRHYtcuYCy80xvMkU9z/lUrxGmOMue668DWJhDG33174us3NxhDx7a5d/HX7\n7fb16pldu4yZOJE/+8SJ5X0m97VSKWOSyfCfM2DM7NnGrF1bufeuNAD2mCL0u9yA75cAfJuIFgL4\nOYA/AwAimgXgOmPMNQA+AGAtEeXBMYYvGWMOlvm+ihIbpNPnzp1sdSYSwDnn8FXASSfxJDDpGNnS\nUlhFCoRXj7ptJI4fB1atArZvL35CVa0TNnRltJ8nk2ELft064D/+A/j5z+1z06YBP/uZvb97N7B3\nL/8OarFtQ7GUJf7GmF4AF4Q8vgfANUPf7wIwPbhGURSL2wJaBn0PDvKQl507/f7kD32Iu0QK06YB\nDz00svi89lrlxLIWkEEqo2mdEHQXZbN+l5rLxImFG24+z5u0tOKuBx9/EK3wVZQaQYT4vPP8vmYR\nHbEyzzmHrwQk+Pvaa+Gv194ObNhgxXHhQj6uFvvMjIaRBqlE4QZ3k0nOmALsVVKQF14Ib9X89a/X\nmY8/gIq/otQQPT2FOeSSV+55LDrt7fz42rX+2bEiQK5V+8QTfnGcPj1eAeDRtE5w3UW5HP8cm5p4\nI5B+S55nvfyyEScSwLnnshsuDrMXVPwVpYZwe8l4Hg99mTQpPJMkOCYS4Lz0xYt5s0il2DJubbV5\n57XaZ6bSDJcFJO4iSc+UDXTRIrumpaXQDWQMd1q99dZx+ADjgIq/otQQxboywtZls5yXLpZqfz8H\nfd1Not6DvMUQlrPvfmb52XV3A+vX25z9lhb/Brtvn7/5HhG32M5m4/EzVPFXlBqjVOtcMoEOHSrs\nLAnEK8g7HGLtuzMQ+vs5kyqs187UqXbE4owZ/L27YbS0FL7H/ffzZhqHTVTFX1HqEHfCl8QDkkn2\nS0tm0D33sI/ftfzT6XhWBLvWPmDjJvk88NhjnDElgh382REBO3ZYF5BsGO95j7+Pj2yscdlEVfwV\npQ5wfdgAi5M7PDyf52Cl9JlJJOzEr85O7iMUZt3Wu4AJbhA3iDF+wZauqO7MBLdfTz7Pm0FYPYUE\n3es9UwpQ8VeUmieYmigZKGK1homYZAABLPj9/X7rNi7WqxAM4rp4Hm+Ghw5xQHz9+vDOm0TAaadx\nW4ewoS3Sp78e2jUXQ7ldPRVFGUOyWWvli99eOkwSRXcBleCkWLkyA1hcQnGxXgUJ4l57LWc5eR6n\nby5fzj2UiNhf7wbEAf/Pzxjg8sv5+DBSqfgIP6CWv6LULGG+6WB3z6je8fk8568HXRdE7Mu++eb4\niJgggfL2dn8W1MqVLPi5nN38ZOMMuonefhu4+mrg4EHgySft4/Pm8UYSp5+Zir+i1Cjix5aALmDd\nNuKbFjFLJOx9sfJlvYsx7NZYtszGBOKGfCZxe4lLKKx2ws3lTya5InpwkNcvX86ZQG1ttjjOff16\nR8VfUWoUt3eNWPsi8IDdBN73PuD88zk1cdMm/4yAMEbTjKyeWkOH5fl3dtppXKtX22D39OnsGhPu\nv9+61yZN4kZ4I9UN1Csq/opSo7iFXOm0zdRJJGxrAmO4+dvzz7NPeunSaPFvarKujjCff5TAjyR+\ntbYxuJk/btqmXBH19bHgi5tIzrmry7rW3J9PJbuH1hIq/opSw7jiJK6HdNoOcRFca95l9mxu6CaV\nq0DpAj+c+NWSVSybUDpt3TyS5+85qS3GsHvH7c+TzfLmKt06Ozvtc+V0D61lVPwVpU6QjWDlyuj8\n85NP9j/+6qt86/ajkUInt9hrOIEP+szTafta5VrFlbpqCG5CS5dym+vDh23vHgnySqrrqlW8OUrv\nI4mvEPFm6f68RtM9tNZR8VeUOsNt/pZIAJ/5DGepvPEGPy8zZo1h8b/2Wn68o4Nvw6z14axbKRRb\nvJhf1w0Wl9tTvxR30nAbhbsJ9fX5R2O6SOzEGGDzZmDLFv5ZdnYO/zmCQeQ4bAAq/opSZwQtUQD4\n6Edt1ornAe94B/CrX9ljNm2y4t/T4+99I69z9dX8fFi74t7e8MlVpVrFroAX605KJICLLwa2bbPx\niuBG0dpqYyFusZtABEyYAJx9tj+FUz5Pb+/wn6OW3FuVQsVfUeoQNxawcqV/EEk+7xd+gNMVARax\n3bv9bSGOHvULW3t7oZU90pVBMUIYFNDhrO1gz/3Nm+1zYe6lTAZYsMDOOHBJJLhds7RpdhE30NGj\nw3+OOAZ9VfwVpc5xffJhELGbRsS3r88+53mcy+4KW1gbaGD4K4NiCArocNa2a8kHP0tUptIbb9hG\nbO4GsGgRsGaNLfaS15FxmMaw//+00+zVUZA4Bn1V/BWlzslkeGLXNddwZWoY4qs+ftwvjIkEXxXI\n8PjmZrsuajOQSWJRuFk3vb3+26CAhlnb2Sy/p9QxSCFbUxOPXJT3l4A1wLdy9RNseXHCCXaN+/7y\nWQXXNRYkjkFfFX9FiQGZDPDAAyxMMopQRH7CBCuSEgwWZG0whuCKPVC8yyOsJYU7fL6zc/i5t+7V\niZx/sKFa0H109dV+t1fQ7XPXXdyeISjgBw6wC0wQ11gUcZuCpuKvKDFBUja7uzmPfWCALfulS63g\nzZ/vn04FWIvXFbbgZrB+vc2Bb22NzrxxUyaBwuHzvb3Dj0GUYLTbYjnYUC3oPpIsJ0Es/2CH02BR\nl9xu2sTCH2X1xxUVf0WJEbIBSMtnALjzThZCCbI2NVmLHwi3eF2RzGatoBKxxRw2FyCb5U6i0nba\nTbUsppNoNgv80z/5jyPyF1wBNh4gm9HkyYWtrV2Syehq5nSan5s+Pfq84oqKv6LEjKieQGJ5//CH\nwC23cIO3T31qZItXNhOZI7BpU2FMQObhDg6yEF96KXDGGexyGRzk8wiKuEs2y/2J3E0J4Pd0C64E\nEftcjn36iYS/VbPLggXh1cyuaymV4rhJnNw6I6HirygxI9gTaOlSO+Xr0CG23J96ioV79WrOchnO\nDx8MlM6YAfzgB7ab6IYNhYHk73+fb2XTiBJxobu7UPiB8KuFnh67NpfjDeamm/hWNjr3+GCAOuha\nAuywexV/RVFqlmJaIojbRlw2YrVL8zJJh+zv5z5BuRwL+b33Fl4JZDK8gXznO8CHP8wbhrRLOOcc\n4Ec/KnS15HLA1q328TDXy0iceSYHsV33k2xowdm6kybxFY08v28fPxeWlppOR89BaCRU/BWljii1\n0tS1koHwlgeS/ZPL8UYQ7PP/l3/JefAA8OKL9nFjgH/+Z/6eyA6PN8bvhiHiQDPgT890N7D2dmDd\nOnuuTU2Fwi9ZRDKQ5oUX7GfavZtfS4LJslGE/fyWLAmPC4yUwho3VPwVpY4otdL06NFwwQfsLIBn\nn7WP5XLs/li1CnjtNRbUO+6Ifv1gcPaee9i9c/So3TCMYb+8266BiDeHZJI3hpYW7j4qmTuTJxd+\nbndgvQi/sHkz996XgrThOpQG3UunnAL8wz80lssHUPFXlLqilErTbJYzfQQi7m2zfz8LbyIBXHIJ\n8NOf+nP/3RYJbh78SAwO2lTOlSuta4aIXUYi3m4Fbi5n308KuSSQu3GjFW63k2gUbkvrYO8it0Np\nsHL4qqsaT/gBHeCuKHWFBHO/8IXiXD6uZZ5McsbN6tU2C+hrXwsf9RiGuHa8CNXwPA4oZ7O286jn\n8eu99JK/6Cvs/USs3e6cCxcCf/In3JNnJD+9MbxZuVc7+bx/48hkuN2De86TJg3/unFFxV9R6oxM\nhq3rkaxVV4CTSeDuu/mYYIfOKLeQCxG3ht65k/3tUdx/P7tcAN6c5syxG4DncWaRWzMw3GYiU8o2\nby7Mzgkjn+e1rpvK8wqzjNrbgYkT+b1TqXj06RkNKv6KElPkKuG227iNcUeHLcRKJPiruZk3hjAS\nCb4lYneMZM5cfnn4WnHXHDvG/v5MhitzUykrtJdf7i/GuvJK3iCKsb6DPXuikM1MWkqE9eYv9uop\nzpTl8yeiPwWwAsAHAMw2xuyJWPdxAF8DkADwgDHmS+W8r6IoxRGs1JVAqOcBM2eyW2XfvvBWyOIX\nlzTRAwf4tcKE+owzOHYgbN7MaaUdHf5WET09ftfPN7/Jt0Hr303lBHjzmDkT2LOnMI//4ouB733P\nX+RFBMyaBZx1Fp+3DGmXDSxufXpGhTFm1F9g0X8fgB4AsyLWJAC8BOA9AJoBPAPgzJFee+bMmUZR\nlPLZtcuY22835rrrjEkkJBnTGCJjJk40Zu1aviWyz4V9eR6v3bXLmFTK/1zYsSecwOuFtWuNmTw5\n+rXPO8+Y0083ZvlyY+bN8z8/ezYfn0rxezU18efZtct+xnnz+PN5njHNzbzW8/yvk0rZY+IKgD2m\nCP0uy/I3xjwLADT89dhsAC8aY14eWvstAJcBiGg+qyhKpQhOxEombbaNMf6++itWADt2RMcA8nng\n+uu5N/4TT7A1/fTTXC0c5o9/+22OEzz5JPBf/+UfyBLGOefwVUU6zdW6Lnv2AM88468Ydgu4Mhng\nu9+1+f2HDnH8IfhZ4jKIpRKMR6rnKQBece4fBvDhsIVE1AGgAwCmTp069memKDHHrQsAbKbL+vV2\nJKIUWq1YYfv6J5PA3LnAz37Goutm5CxezIK+Zg2L7XnnRffVAYAHHxz5PPN5jhN4HrtsgkNcJBNI\nGBy07RiCFc/y2MaN/toAID6DWCrBiOJPRDsATA556q+NMQ9X8mSMMV0AugBg1qxZWoCtKGUSrAsQ\na7m9vbBFRNhsYMncccnn/S2Sb7oJ+MpX+LlkEpg2rbAIazjcGICkg460ToiqeA72Nxqu3UOjMqL4\nG2PmlPkerwI41bk/ZegxRVHGmKgJVK6FLC0X3EBoNsttm48d4/VEVpTdDJpslmsHXPE+ejT6fCZN\nYqv+l7+0j8lr5/O2WZy4d1zc+5J9NFzFswZ1h2c83D5PAXgvEb0bLPpXAvjUOLyvoiiIFsEoqzms\nvXJzM3DjjVwd3NYW3S4hlwOOHIk+ly9/mXv4uJXDJ54I/MEf2PuPPMK3RMDUqcArrxS2kVi40J5D\n3Gbrjhflpnr+CYDVAE4C8H0i2m+MuYiITgandF5sjBkkoiUAtoMzf9YbY35S9pkrilIWUVbzqlWF\n/W/mzuXK4P5+bucMcBpna2u4OyaK3l4Wblf833yTg8Fh/v7Dh9mVJMNpJHdfmrDFcbbueFFuts93\nAXw35PHXAFzs3N8GYFs576UoSmUJTsSS8Yxbt/rXybQstzfPDTewH729nQe3jJTJA7CrxhXor3yF\n2z64LqMgxvAwlqlT7SD4oMire2d0aGM3RWlgxI+fy3ExlLR+cLnySv9aWb92LWfUdHayq8bNxgm+\nvjH+4zs6uHV02LB391ix8lXcK4+Kv6I0KOKvl7YMixdzS+ZUyp8i+eCDNhALhNcJPPGEP7PmjTds\nW+auLlslHAzIBjNyJAU1kWCLPyj8xQyyUYpDxV9RGhTX7QPwbVTBl1jmw9UJhIlxV1d0h02X6dP5\naiAsBdWd4BU1OF43hNJR8VeUBiWT4U6fixezMEsKp1vwJVcAnjdynUCQbJaHvYs7J9hh053O5Xl8\n1dHRET5sPWwYvfTuD3sNZWRU/BWlgRGh3LTJn8IZdMkEA61RdQKCK+wi/MEOm+50LgkiB0dIuhlJ\n8jpE9ooj+BpLlhS+hhKOir+iNDBSzHX8OFv6rnC6ufxhFv5wdQIrVvivGubM4cfc15A0UUFGSAbX\nuHn8nZ2FG1FwmLv27ikOFX9FaWCGq5AdaVh8dzdP25LAb9AN4+blB4Uf4PuXXDJ8muhIefyZDLt6\nlizhz9DIw1lKRcVfURqY4WYCj7QxbNhgUzOlTkCOCbP4wwKzy5cD27Zx1pG0bAgyUh6/pI1q0Lc0\nVPwVpYEZzrIeaWOQTp5EnJYZ1m6hrY3XHjgQnqmTyfDz5Qq3FnqVjoq/ojQ4UcJZysYQ1m7BTc0M\ny9TRBmzVRcVfUZRIRrMxyDErVw6fqaNUFxV/RVFGRXA+cHAjKCZTR6keKv6KopRFMQNVVPBrDxV/\nRVHKQgeq1CdetU9AUZT6Rtw7iYT68+sJtfwVRSkLde/UJyr+iqKUjbp36g91+yiKojQgKv6KoigN\niIq/oihKA6LiryiK0oCo+CuKojQgKv6KoigNCBlpyF1jENERAD8f5eEnAvhFBU+nGtT7Z6j38wfq\n/zPU+/kD9f8ZqnH+v2eMOWmkRTUr/uVARHuMMbOqfR7lUO+fod7PH6j/z1Dv5w/U/2eo5fNXt4+i\nKEoDouKvKIrSgMRV/LuqfQIVoN4/Q72fP1D/n6Hezx+o/89Qs+cfS5+/oiiKMjxxtfwVRVGUYYid\n+BPRx4noeSJ6kYhuqfb5lAoRrSeiN4no36p9LqOBiE4loieI6CAR/YSIbqz2OZUKEU0got1E9MzQ\nZ/g/1T6n0UBECSLaR0Tfq/a5jAYi+hkRHSCi/US0p9rnUypENImI/pGIniOiZ4mopvqexsrtQ0QJ\nAD8F8DEAhwE8BeCTxpiDVT2xEiCi8wD8CkC3MeaD1T6fUiGidwF4lzHmaSL6TQB7Acyrs98BAXiH\nMeZXRNQE4J8B3GiM+Zcqn1pJENFNAGYBOMEY84lqn0+pENHPAMwyxtRlnj8RbQSw0xjzABE1A/gf\nxpij1T4vIW6W/2wALxpjXjbGHAfwLQCXVfmcSsIY8ySAt6p9HqPFGPO6Mebpoe9/CeBZAKdU96xK\nwzC/GrrbNPRVV1YSEU0B8McAHqj2uTQiRPRbAM4DsA4AjDHHa0n4gfiJ/ykAXnHuH0adCU+cIKJp\nAFoA/Li6Z1I6Qy6T/QDeBPCYMabePkMngOUA8tU+kTIwAB4lor1E1FHtkymRdwM4AmDDkOvtASJ6\nR7VPyiVu4q/UCET0GwA2AVhmjHm72udTKsaYnDFmBoApAGYTUd244IjoEwDeNMbsrfa5lMkfGmPO\nAjAXwOIhl2i9kARwFoA1xpgWAP8FoKZikHET/1cBnOrcnzL0mDKODPnJNwF40BjznWqfTzkMXao/\nAeDj1T6XEjgXwKVDPvNvAfgjIvpGdU+pdIwxrw7dvgngu2C3br1wGMBh54rxH8GbQc0QN/F/CsB7\niejdQwGWKwFsqfI5NRRDwdJ1AJ41xtxZ7fMZDUR0EhFNGvp+IjiB4LnqnlXxGGNuNcZMMcZMA/8P\n/MAY87+qfFolQUTvGEoYwJC75EIAdZMBZ4x5A8ArRPS+oYcuAFBTSQ+xGuBujBkkoiUAtgNIAFhv\njPlJlU+rJIjomwBaAZxIRIcB/I0xZl11z6okzgXwvwEcGPKZA8BfGWO2VfGcSuVdADYOZY95AL5t\njKnLdMk65ncBfJdtCSQBPGSM+afqnlLJLAXw4JAh+jKA+VU+Hx+xSvVUFEVRiiNubh9FURSlCFT8\nFUVRGhAVf0VRlAZExV9RFKUBUfFXFEVpQFT8FUVRGhAVf0VRlAZExV9RFKUB+f8FvkT+M2urzAAA\nAABJRU5ErkJggg==\n",
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAD8CAYAAACfF6SlAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJztnX+YVOV597/3mdkdeNNa0tGWKFIS\njUlsuMIKEqc2uqlEg41K3bfVxPddC8j6A4jEq1Jtk5S3MZIQo5ugIqvAyzaaNC0JQoJFMW6kYRoE\nwdKgxh9NEH9Usr7UpGGX3Znn/ePeu89zzpyzO7MzuzNz5v5c116zM/OcmTP74/vc5/5JxhgoiqIo\njYVX7RNQFEVRxh8Vf0VRlAZExV9RFKUBUfFXFEVpQFT8FUVRGhAVf0VRlAZExV9RFKUBUfFXFEVp\nQFT8FUVRGpBktU8gihNPPNFMmzat2qehKIpSV+zdu/cXxpiTRlpXs+I/bdo07Nmzp9qnoSiKUlcQ\n0c+LWaduH0VRlAZExV9RFKUBUfFXFEVpQFT8FUVRGhAVf0VRlAZExV9RFKUBUfFvQLJZYOVKvlUU\npTGp2Tx/ZWzo6gKWLAFyOSCVAh5/HMhkijs2mwV6eoDW1uKPURSlNlHxbyCyWWDxYmBwkO/397OY\nFyPk2SxwwQXA8eNAc3Npm4aiKLWHun0aiJ4eIJ+394nYig8S5hbq6WHhz+X4tqdnbM9VUZSxRS3/\nmCIumnQa6O1lkW9tBZJJFm8AMAY4cMBvwUe5hVpb2eIXyz9s01AUpX5Q8Y8h4qLp72dL3/OskC9Y\nAKxdy8KfzwM33MDH9PbyRhHlFspk+Hj1+StKPFDxrzOKCbqKi0ZcPPk83+/u5vtELP4AW/iyAXge\n3xcSCb+FL5uAoij1j4p/HVFM0DWbBQ4dYveOWPeex0K+YQNb9SL8ggi+MbxOjrn7bhV7RYkrKv51\nRFjQ1RVnd3NIJICODqClhV06hw4B99/vt+xdxDXU2WljBCr8ihJfVPxriJFcOlFBVznu0CG7OQDA\n1KnA9On8XEuLPTaR4CsAcQsRAbNmASefDDz4IHDkCLB7NzB3rm4EihJXyAR9ADXCrFmzTCMNcyk2\njz6YxZNOA8uW8XHi6snl+DU6O+1ziQRw8cX8GpMnAyecANx5J28ATU18OzAQfm6pFPDEE7oBKEo9\nQER7jTGzRlqnln+NMJJLR5DHZKOQIK1Y8YsWscWfTgObNgF9fXZD2LyZ1xDxRnHTTcCkSWzly3Nh\n9PcDq1YBs2fbqwCt9lWU+kbFv0YoJo8+zL1jDG8ARHxcezuvveACK/xBjGEr/847gXvuAb7//ZHP\nb+tW/mpuBpYutVcNbi2AbgiKUj+o+NcIYXn0rpgC/mBucug3J+4d1ze/ciWvG8mjl88D69ZFu3sA\n3lSIbByhvx+44w57pSG1AO75FeO20g1CUapLRcSfiNYD+ASAN40xHwx5ngB8DcDFAH4N4M+NMU9X\n4r3jhJtHH4wBXH21P5jrundc4ZdUz0TCpnqKgBvj3xCSSWDv3ujz8TwOBO/b528L4b4GEb9fd7ff\nbdXdXSjy2h9IUWqHSln+/xfA3QC6I56fC+C9Q18fBrBm6FYJIZsFVqywFbrSjsF1C7nuHXnMDfAm\nk8Cll7JLZ3CQNwOx4JNJYP584I03wn39slmkUsBZZwFu3F0CxLkcr/E8TiH1PP8GITUFrshHxTX0\nakBRxp+KiL8x5kkimjbMkssAdBtOLfoXIppERO8yxrxeifePE2GtGUTs29v9IrlypV3X388BXvfq\n4Ne/5ufkCuCSS/ixtjauAbj++vBzkDjC0qXA22+zyLvk83w8YGsH3PoBCUAbw+e1YgV/tbbaIjKp\nHtarAUWpDuPV1fMUAK849w8PPeaDiDqIaA8R7Tly5Mg4nVpt4bZm8Dxgzhy26MWvfuutVhyPHvW3\ncJgxgwU0keDbtjZ7P5kEtm1jcV22jEW3vZ2t+zDyeQ7qilXvbgC5HPD001w7kBzBfMjngUcfZYE/\ncMC+Ti7H97VbqKJUh5oK+BpjugB0AZznX+XTqQrBrJ+2Nr8rZ/58Fu0DBzjwKngep20+/rjt4TN9\nug0iuxW+fX02dfPGG60A/+u/2s3E82yDN2OAyy7jbB+x8HfvBp55hgvBhksTFfr6+MpkYMCmni5e\nzNlGwSwndQMpytgzXuL/KoBTnftThh5TAgSzfnp6rGsnl+OOnOvX+3P7AX8Tto0bWUw3buTXuvVW\nFtQNG2x66ObNhaItcYLJk4Ff/pKrfQF+nzPO4CDzfffZ9ceP89qJE/kc3YByWKbRjBnAjh32fj7P\nwWr38wLqBlKU8WC83D5bALQTcw6A/1R/fzSZjHXvpNOFmTYDA4U9ev7sz1hA3awb8bdns/xa8+cX\n+u9dcjm+Glizhls8uPT0cIDYxRv66+nsBG67jTeGiRN5I0ok/GtPPx342tf8nyWVstb9rbfyYxLo\nVjeQoowtlUr1/CaAVgAnEtFhAH8DoAkAjDH3AdgGTvN8EZzqOb8S79sI9Pb6WzAT2Wwb2QCIgL//\ne9uV0+3ouWMH8MMfsvC3tAATJgDHjoW/lzG82QBspT/6qH1u717gqaf86+VKZMIEDg5v3gycfTZw\n5pnsmtq8GXjoIeD114EXXvAf63m8aUi2T3c3X9FIzyEJdOvQGEUZGyqV7fPJEZ43ABZX4r0aBfF7\nHz3qd6F85CPAVVcBjzzCPngRfDfDZtZQV489e2wmkIh0Zyfn7a9bx0JL5Pfz9/by92+/7T+fqG6g\nxtgYgrBrF4v/l7/McYjPfjb82N5em+3jViNLoHvFCk0FVZSxoqYCvoq1giVP3gs45n70I14zOMgW\n/sKFbNEvW2ZjA3v28NVBMmkDrCLS+/bZGgHAf6znseWfzbIVHobnFRaLBRkc5M8gbquwtYkEP9fd\n7Rd+qS9whV9jAIoyBhhjavJr5syZptHYtcuYiRONIRJ5Ncbz+MtKrv9r9mw+btcuYy680K5NJIy5\n7jr+SiTs+mTSmFSKH5s4kY9bu9aYpiY+trmZXzPqPRMJY5Yv59ch4turripcd9119vMEnyPi15f3\nlMebmuxxwu232/NPJPi+oijRANhjitDY8Qr4KkUgKZeuFZxIsAskit27gY9+lC3otja2miXPv72d\nLXvX8pZAqqR8dnez+yWft9XEu3fbthBBjGFXzpNPAl/8It9+4xvA8uV2TTJpC9KkOlmQKwdpIe0G\ngFta+DjXspfUV/lMGgNQlMqg4l9DuELX1GRz7d30yDDEp79sGfv0v/AFdo8AnEvvCqwbPDaGff/p\nNL+vK/aeB3zsY/y68+bxOcm0r3S60Ac/bx4/JxtW8PM0N/Oa4bKN9uxhF082ax+T1Ff5TOryUZTK\noD7/KuMOZ9m3D7joIs6dB4CurpH964IxbGVLk7fubq7CdQO1nsd5/A8/bF9zcNDm2ruxhuZm63fv\n6PCfpxSdBfv2yHzgwUG+f+uthTULDz/sP2+36ZxceQRnGejgeEWpPCr+Y8xwmSrBPj5CUxMPT5c+\nOGGIBS2tF4xhMU6n+b1cd4s0YLvkEq7I3bbN3yxOzi2TKewfJMjz0i5a3EerVnG/IGktEZxHEBTu\n5mb+vAB/vkWLbNB5uFkGiqJUFhX/MSSYqRLsu+/28XEZGOCrgLvvtm6bRKKw734yyWtkTq9Y1+46\nIs6937+fU0O3bwdWr+bXBwp97CNZ2W77Cc+zVcKPPsp+/0mThk/JnDsX2LKFN6tk0g6Y18HxijK+\nqPiPIW7Tsv5+4IYbbEtkEWuxhMMs/OnTgWuu4e9bWoBPf9pazeJe2beP3TIimAcOFPbsP+ssLtIS\na723l6t4R4PbfuIb3wAOHrTPPfQQ8O1v8/crV/pnDQCF+fwDA8CSJXxensd9fnQAjKKMDyr+Y4hr\nJQPW/378ODdKO/dcroz96lf9xzU1sah+5CMslKlUYdYOwPfXr/db7729trc+EdcBtLfbfj+VcKvI\ne/3d3/kfP3wYOP98mzkkraFTKTuMxs1k8jx7lZLP2xbT0i4a4LiHXP24IyMVRSkPFf8xxLWSg0PS\njxyJ7oaZy3EKpSC9+sOqbHM5f4C0tZVFUoS+pYWfl8reKEqxrkWQw84n6JoKDqORK5f3vx+YMsXf\nQiK4AWSzfGUg3UWPHbMZTboBKEqZFFMMUI2vuBV57drFxVVRxVrDfTU1cSFWMllYLCWFWsH3uv12\nPmbiRC6OSqW4gMst7nLXy7qw13NZu9ZfmEXERWGplL84zS1Sc4vJ3IKzqC+3QC2s2Ky5efhzVJRG\nBlrkVXvMnQtMm1baMZ7HQd2ODvbdu5x2WrgbRLpk9vb6M3OkG2iwW2axA1Wy2cK6gUSCLfEnngCu\nvdbWC3ge9yC67TZ7jlJMNhLSMG7DBo5ZBGsDBga026eilIu6fcYI140CcBWuuDw8j90ezc2chSNM\nngyccw5nw4hIXnopB35XruTX2r3brr/5Zr/wB103bsxBOn3mcoV+/+AAmWBMQF730CG/eAeDtJkM\nu5kkiPud7/Bm4bqkPC+6SVywAG1wkFNBg7OG3dkFiqKMDhX/EinGNx5M8bzoIiv8gBXQadP84v+L\nXxTm4W/ZwkPY83l+reXL+RiZwztS8VVwUEpUDr+7LrihyGdJJvlLGs7dc48/OAsUtopw4xGZDHDv\nvZz1FLYBGGMrieXzSt2BuyFec436/BWlXFT8SyCswyRQKJrBFM+tWwtf6+BBf5okwOt7e4EFC9jt\nIVWvInrHj3Me/fbthecjFnVQdIN5+1GiGZXf734WgC3xqVNtGqcMihFGuoro6OArmYULgWefLXw/\nY/i5qVPtsYcOcQaUVB67XUkVRRkdKv4lEPSNd3fbFEp3vq4rgG6//JFw3Rkyb1cgKpxxe+iQPR9J\nq3TXVYKgmIvwRrVZHu4qArAtq198Mfz9PM+mrgY3t5kzeWNQq19RykfFvwSCQghY8ZUgpczNFQE8\nehS4666Re/QkEtZ/ns36g5xNTTZfH7CCKFO7gPAK4koQJubBFg/F9uIJG9wiwWFx+dx9d/gVVC7H\nk8QOHOArB90AFKU8VPxLIMyHvnGjFTNpriZNzQAWO6lglcKnILNns3ADLKyHDvnFceFCW5HrCi9g\n3TBjWf0aFPORXDthZLN2Pq/72WS6WNimlU77f27uz1fFX1HKQ8W/RIJCKN0w168vzKRxp1SJdRtE\nLHbAH1iVtshBH3eYG2a8hXAk106QYAM7z+PPuGBB9PlnsxzAlo3TDQIfPcpB9GDQW9s/KErxqPiX\nSVBsXH/1+vVW8IN+f2mvPHeu9d+LOBrDohZm0ZcqvGNFKW2W3QZ2RDxj2K3S7eriCmYR8+AxiYQN\nAh89aucFP/oo8OCDwI9/bIPB2v5BUYpDxb9MghlAYqVLf/swiIC/+AsebiLHuoHhfJ7z5YNplEK9\n9bdvbWVLXwLT+/ez715iIq6YA/y502kbD3CvcC66yP/abhsMdQkpSvGo+I8C183gunbc6tjdu4cP\n8K5eDbz9tvXfuwFeIraE4xLYzGQ4E0rSVwcHufgrLAi+bh1/7mXLrNXvXiW0tfn7AQmyUaTTY/95\nFCUOqPiXSLDoyQ3iJhLhw1SEU04BXn+9sNmZZO4Q2bm2O3YAO3fGx43hdhZ1axKCrRuefpo3VNdN\n1Ntrn5eroXXruFGdmw6by3GX1H37qhMLUZR6Qnv7lEgw11+6WBJxALO3t7CzJcAi//nPFw5Yl/m0\nPT3cH2fOHBvcHK7PTr3hzuK9+277c5gwATjvPLtONtLhhrZ3dHAM4OST/YVw8jNbu7ZwFrCiKH7U\n8i+RqAEsTU3W39/UZC17CewuX84C6E7dksCwkMlwOqRM44pbDxs3VuH+HIDCuEnUOEmAA8TXXhv9\nPpoSqigjo+JfImLBLlvmb7J2+ukcxOzt5efuuMNao9u2sfjL8SJIUe0ixBUSdInEiaiU2SASGHZ7\nE4XNQSACPvQhO8lMZwEryvCo+I+CTIbbK7vif/AgW6MSeHSvCqQFcdAKjWqlPDhoA6ONZL1KTGDD\nBvv53boAIttULohkEREVBokVRSlEff6jpL3dtnhwkbbJrkAlk5zHH/RBiwvJ9W2HPdYIhMVS3NTX\ngQF2tUmrh6i5CJJB5AaJFUUpRC3/UZLJsGBJde/AgD9t8f3vB844g7/fto0btUnfn5GaoNVCEdd4\nEzZ7wLX83SupfJ67m4YhdQGNsmkqymhR8S8D8Vu3t/MmsG6dddk89xzw7//Og8vFWi22CVq9FXFV\ngqjZA+k01zw89ph/cw276rrwQr5ta2u8n5+ilIqK/xCl9IcJrnU3gRUrOEc/n2c3xdNP+/v0qEUa\nTdTsAckMkgyqpiZO8wxeETz6KD+2c6e/QE7aSAOa/68o/00xg36r8TWeA9xLHWDe1MSDxVMpY+bN\n40Hjcoy8lgwzJ+J17hqldHbt4p/hvHl2EH3UMPhEggfYy3GplA5/VxoHjOcAdyL6OBE9T0QvEtEt\nIc//OREdIaL9Q1/XVOJ9K0WpA8wlGNnfz2mH993HM3plqpV06QRYcgYGuCmZWpyjJ5PhttazZ1s3\nWtgoSPH5p9Pc/lqqhQUd/q4oTNluHyJKALgHwMcAHAbwFBFtMcYEhhTi740xS8p9v7FguP70rssA\niJ7K1d/P63p6Cvv6EKm7p1IEe/wDfH/OHPb19/bymk9/2j93WKqum5r0d6EoQGV8/rMBvGiMeRkA\niOhbAC4DEBT/miUq6yab9ffpkfRNovCmbevWhW8Ol1yiVn8lyGa5d8/AgM3nB3jDXrHC/oyvv543\nY4DXzpvH37/2mo6BVBShEuJ/CoBXnPuHAXw4ZF0bEZ0H4KcAPmOMeSW4gIg6AHQAwNSpUytwasUT\nlmEjbRYEEfaowSyy1vNYmGT4iFT3KuXhunCkp89ll9mZCFJhfTBgdrz1Fo+AlAD8Sy9xqmgjpdIq\nSpDxyvbZCuCbxph+IroWwEYAfxRcZIzpAtAFALNmzRqmIfL40Nrq79MjDDeQnYiblslownTa+phV\naEpjpAwsY4CtW4FHHuHfkTG2wtqlr8/2YsrneX6A5/HvKS5dUxWlVCoR8H0VwKnO/SlDj/03xphe\nY8zQhTgeADCzAu875kgh1wc+UPwxnsfC39HBorVsGfC5z2mXyVKRvkfuz6693bp6BOnkKVdickUg\nLbKbm9nVE9wQ4tY1VVFKpRLi/xSA9xLRu4moGcCVALa4C4joXc7dSwE8W4H3HRcyGfblp1J8P5Gw\nxUTCeef5m7BJa4Fis4iUQsJ+dpkMD6x38bzCBnhEwL33Al/8Ih/X0QHcdJN/rVYCK41O2W4fY8wg\nES0BsB1AAsB6Y8xPiOhvwfmmWwB8moguBTAI4C0Af17u+1aaKBeDPP71r/OQkDfeAL73Pft8IgFc\ndRX7lIPZQsNlESnDE/Wzk6Ew/f0s5Jdcwj59d5yjMXagC8AB4PXr+ftEArjiCuDIEWDGDL9LTgfB\nK40EmeFmDVaRWbNmmT179ozLe4W1VhYxcKd2Sc5+MI3zi19kwQj26Zf2BL29KiijYbgNWXoq5XI2\nldONxYjLJ/g7k2C8TBIT339nJ7vogn8DilJvENFeY8yskdZpewewwEhA8Ngx4JprgAce8LseRFiC\ne6VYpSP16VchKZ2oHkcSi3ELvf7wDwutfzcW4D4ezODq62PXXpibSVHiirZ0BlvnrtV48CBw/vn8\nuNteuamJv0+lOHf8uut49GKxffqVyhFsff3bv124JuyiNuqxffv4tRqtlbbSuKjlj/De7wMD/Hhn\nJ3eVbGsrHMEYhfr6x55gYV5wCtiUKcDhw9HHn3468MEPAg8/bDOEFi3iNhzqolMaARV/sIUfrNpt\nauLHly1jl9Djj3NwUWbxDkdUxbBSWVy30IEDtrCuqYlTRJcu9Vdnu1d3N9/Mm/n27fz79TygpYUz\ngxSlEWg48Q8GEbu6gBtu8Au/5wF3382Wf1+ffW7zZi4oCnP1BGnEnvzVIpvlTdoYDv6uXs0i/tJL\nwFe+Yh9ftoxHPba1WZHv7ORmfbkcPz99Oj+uG7cSdxpK/Lu6gCVL+B9dMjzkvov4gN94o9BH3N/P\nIqEzYmsHibHk83wF19vLG8Kdd9rf3+Agt3TYvt1/bG+vdfscP87uI5klrMF6Jc40TMA3rB3zpk0s\nCkGSSc7+2Lw5/LV277YtnJXqEzb3uKfH7+ZJJMJjL8FjAQ3WK41Bw1j+3d1+oU8kuMjn0UftY1Om\n8O2JJ7J7wCUYE9B0wNohKsaSSll//t1382MrV/rHRLa2Fo6PdC1/DdYrcaUhxD+btRWeAAu/+PQl\nEEhks0PCskSi8vuV2iBsBGRQ1KX2wp0H0NQEzJ/vH++owXqlEWgIt48UBAEs8osW2cZrqVRh068o\nPI8nSUXl9yu1QzCw393Nwftcjl1/UrjX3w+sXetvvJfJALfeyt+vXKnuPSWeNITlH8y7P+EE4KKL\n2O1z9tk88HukLhduGwAV/dpGKqzF5XPFFcC3vhX9O5ZqYJnE1trKqaNucoAGfpW40RDi77oAjh7l\nfu6A398fBREPDJk9W90A9YLbriOfBx58MHotkZ0KtmEDx4WSSb6Vq8X+fo3vKPGjIcQfsD7hiy4q\n/hgiYMKE4gq7lNqhtbWwqMtF2jobYwfAnHYa8PzzfEww9TcqU0hR6pmG8Pm7zJhR3LpkErj2Wr3c\nr0cyGeCeeziYG4zneB7/bl3yeeC558I3C0kO0L8BJW40jOUvTJpk0zaJeErXb/wG+3zd9M5PfAJY\ns6Z656mUR0eH7cUkbbXdW7f1AxAdD5DkAO31r8SNWPfzD/uHddstJxL8Tz84GJ7KqX7e+JLNcuxn\nyxZr8Tc12b8Huf/DH/L32qJbqReK7ecfW7dP2AxY4aKLgDPOAE4+uXA4i5DLaXVnXBGj4K23rPAT\n8axfGfcI2Ftt0a3Ekdi6faL+YVtb/Zf7Yeh81/giRoHbsA/gOMAJJ9hGcAAbBnLl6KYKp9O2Uliv\nAJR6JZbin80Chw7ZwJ7ncZ+e3bv9U5xckkme4NXSomMX44wYBcGrvdNPB7761cIRnek0H9PZaeMF\nOu5RiQOxE/+gT//cc3m83+7d0cfMnq3FW42CWPFSByCcdBJn/Lice26h0IddUerfjVKPxM7n7/5z\nDg4C//ZvIx+zcGHhP3A2q6X9cUQK/m67DbjqKr4qJAJ+/GN/CmhTE3DmmYVCH9ZBVFHqkdhZ/kHL\n7q23Rj4mOMZRB7DHGyn4W7mShT+fZ0Nh0SK7pr2dbzdutG0i0mmd0qbEh9hZ/vLPOWdOeIHPvHn+\nx8OsN/fqoa+vcD6sEg+CVnx7O9d2rFljRf3ss23657JlPBBIhV+JA7HN889meeBKf799LJXibpyA\nFXS3la97rJsVJMfpP3v8iCreCvv7AdgdlM/rFaEydpRbUFhsnn/s3D5CJsN92teutdW88+fbH+Zw\nP9RMBliwwB47OKiBvbgSNWtZrv6CSEGgBnuVsWA8Xc6xc/u4tLTY1D1jOI+7WNrbuambBvYaE2kO\nFySZ1L8JZexw506MdUFhrMW/t9d2cASAu+4qPntHYgdf+IJe3seV4TK6Mhng3nsL40Of+QxbZkuX\n8j+mZoMplUImDorBmkyOrYERW7cPwD+4RML2asnlgBUr+KsYMY9yCSj1T/DyWoq4XD+rNIeT+FBL\nCwd9+/p4FgQRxwAWLAiPHSlKKQQnDrpu6rEg1pa/tPZNJPh+Pg/s2FHY60dpPNyMrv5+ntr1uc9x\nkPf66/0jHSUDqLeX17quxOPHC8dAKspoEGNVjApJNx4rYi3+AFtu7qV7Pq/NuRR/mqfn8SYgG0GY\nmEvLENeNKLgBYEUpB/n7Cvs7qzSxdPu4qVLd3f5+PkTanEvxF2tJvx5p9hbM5nFdRMF/ymSS12sA\nWCmXnh6bTTYeGYaxE/+gL/fss/3Pv//9/I++dClvCk1NmrLXqLgxHfHtr1/PVwAi5tksx4iCvYAA\nvmL4xCeAX/8aaGvTvyGlPILdY8famKiI+BPRxwF8DUACwAPGmC8Fnk8B6AYwE0AvgCuMMT+rxHsH\ncYd39/UBL7/sf/7884F9+2wO9/Hj/E+v/7iNjWwE7e32qvHAAWDxYt4MomohH3mErbSdO3kDOXAA\n2LSJx4VOmqRXlsrIdHXx30xbG1+NjldHgbLFn4gSAO4B8DEAhwE8RURbjDEHnWULAfw/Y8zpRHQl\ngC8DuKLc9w4jnbYWmjHA4cP+51taWPwVxcV1Fd56K99fssRmioWRz/PVo8SRVq3i1uGAZgMpxdHV\nxbPCAf6bWb6c+0kdP863tV7kNRvAi8aYl40xxwF8C8BlgTWXAdg49P0/AriAaGxCGsHcfhfP4+fb\n27llAxHfjnVUXaltwqa+dXcPL/xCImGLvl57zf+cZgMpI7Fpk//+hg3AsWPjU+RVCbfPKQBece4f\nBvDhqDXGmEEi+k8AaQC/cBcRUQeADgCYOnXqqE4mmNtvX5uFXi7Dn3hCG3QpTLBHv/j+g64ezwNO\nPZWzfozhYO9nPmPdOwcOhM+N0HYQShjZLLumXY4csd97XgMVeRljugB0AdzYbTSvIbn911/vn8/6\nsY/5i7u0gEsRgoE2wBbbAPz3I8bDN7/Jrp077uA1q1fbS3P5exKf/9tvsyU3OKjZQIqfYPPIMFpa\naj/b51UApzr3pww9FrbmMBElAfwWOPA7JnR08O2SJfwPmkoVX9WrNB7BHv2A7eMvBkQiwVXAAHDn\nnfbx/n5/Smhvr/9vzQ0g699f4xHVobOnJ3qkrLBw4RieGCoj/k8BeC8RvRss8lcC+FRgzRYAVwPI\nAvifAH5gxriXtJTm6z+eUgzBK8HHH2cR37GDhT6fZ2Hv6fGnfBKxG6irK3y2r15hNi7DdehsbeVk\nANfyb27mv6H9+znzR4zYsaJs8R/y4S8BsB2c6rneGPMTIvpbAHuMMVsArAPwd0T0IoC3wBvEmKP/\neMpoyWRY/HfuLMy7TqVsn39jWPg9z24S6t9XgOHnPWcy7DJct467B5955vhnhFXE52+M2QZgW+Cx\nzzvf9wH400q8l6KMF1EjGyUV6Q70AAAfCUlEQVQX+/77bWwgn+cNwPPUv68wwVhSOs2xSID9+W6h\n6Ze+ZF2H4+WtqKmAr6LUGu7Vo/uPOXVqYTaQZABJbCCsfch4/nMr1SXYQmTpUuvmkStFwNaITJ7s\nrzAf61byKv6KUgRB/+3SpfwP7Hb4NIb/cfftC/f/j+eUJqU2EONh5Up/gDfYKmTLFv9j4+E6VPFX\nlCIItoC+6y7r6hHhB/ixgwft2r4+tupmz+bAcJQPWIk3YQFegahwMxgP12HsWzorSiVwW0ATce6+\nBHiD/7g/+pFtI24M1wV89rN8Sa9jIOPLSJPhenqAefMKOxCceGLh+qVLx94wUPFXlCIQ/+2iRXzf\ndfcEMQY4/XT/Y/k8W/zz5+to0DgS1iIkSCbDV4BBLryw8LH9+yt/jkFi6fbRoJoyFmQynOXjVv8G\nIWKr//nn/Y9LFpA2eIsPrs4Ml9bprk2n/e1nPA/4zd/0B4ABzvMfa2In/hpUU6rJyScDr7/uby1y\n2WVs8aXTvHl0d+smUO+EzYCO6sUfXHvFFcBDD/EVoucBTz/tf+0zz+QC1bEmduI/0g6sKOXQ3s6+\n+4EBO/7R5bXX/K6gZJLb9AL+Xi4bNnBzQf3brE+COtPbG14TElzb38/9oeRvZHAQeOopvk/Et889\nx5uFpnqWyHhPw1EaCwncueMf3WpfV/gTCeCP/5i/D/ZyUcOkvgnTmaiOAu5aYwoTBOQK4D3v4eFT\n41UlHruArwTmNKimjBWZDA986ejgv7HbbgPWrOEyfcnkmTePrf6tW9mKS6c51U9Qw6S+KUVnZO0l\nlxQKP8DCn0oBN9/Mt+OVDRY7yx/Qnj7K+OH+rb30EvCd7wCXX849/rdutbn++/axJScj+tTnX/+U\nqjPf+17hYzNmAL/zO7aR23g2o4yl+CvKeNPVxcVcAN8uX86Wv8z/Xb+eBX/NGnuMZqU1DsFusABb\n+M8+y0OAZAb0eBqusXP7KEo1CI7j27+fc/qloGdw0D+Sr5i8cKV+CRZ8tbayMSB4HruBBgZsIHgs\nRzaGoZa/olSAtjYewO3eB/xtH9Jp+7xmpcWXsDTQ3l7g4ouBhx+2mT2AvRoI/n2MByr+ilIBZPDG\nunWc6y++Wyne8TwWALfYR7PS4kkwtVPaOCeT/LuWsZ6TJxf+fYwnKv6KUiGmT2f/7d69wPbtbPGl\nUv5+7mIRJpPA3LksABr8jRfptG345/r5BwfZSJg6ldc88oitCE+lxt8AUPFXlAoRVfgjGT779tnn\nczl2AUyYwOKv1D/ZrH/IT7CBWz4PnHACd3f9m7+xdR8yH3q8DQAVf0WpEBLUy+f5Np3mzJ+tW+2g\nF3leCsLcAfBKfSKiv369v2WzW7Ur3HUXXwG4j8l86PFGxV9RKogb4F282DbwAvj7WbPY2n/ySbtu\nvAN9SuWQ4G5fX3iH16lTueVHLmfbgQTXNTVVJ+aj4q8oFaKnx/5zu60chHwe2LOHRUAsQiJ2Byn1\nibj6RNCDlv5f/ZUN/h89ypY/wIJf7ZiPir+iVAjp4dLfX1jQ46b2BSeArVunQd9ao9gCvKCrb+FC\n9uvv32+rdoULLrBXAF//uv+5aqDirygVQnq4rFgB7NhhN4AzzwRuvNE/wNu1DgcGbFBY2z9Un1Lb\nwrtWf9TvTa4Q8nleVw0ffxCt8FWUCpLJsPi7TdxefJEv/RcssFcAQb/vk08C558P3Hcff7W2atVv\ntQgrwBturbj6crnwtdksZ/jU2ghPFX9FqTCZjL+1g4hCezsHe72Q/7pnn/XHCQYGxr/cX2Hcec0i\n1FHzecPWushVxP338waxaFHtdBtWt4+ijAHt7cDGjYX93sUt9Nhjfuu/VjJAFPt7Ep8/EO0GCq4N\nirp7FQFw9k8tCD+g4q8oY0KUKIhb6Ac/8KeBErGwVDsDRGHc7prXXw8cO8bfHzvGcZlMxt+qA+Dq\nbrkvGVwtLbXbxkPFX1HGiKj2vJkMcNNNwB138P1kkuMBKvi1RzYLPPCA/7F161jUZYqbm9kVTPVs\nbuZ1kv1TS79f9fkryjiTzXI5v2R+rF5t+/yH+ZWj/M3K2CMBXZfBQd4A+vrCRzK6HD/Ouf2PP86b\nQC39DtXyV5RxprvbpnzmciwkgK0ITiY5+0dcCxdcwBam5wH33FP9/PBGorXVVuYKngc8/XR4RW8Q\nIj52vObyloKKv6JUmd27ufJXrMjBQeCWW4CPf5xTBMW1kM8DS5bYiU9KdZg8GXj9dXvfdfUE3T6f\n+hSP9lSfv6IoaG9na99N7Qy6D3bu5C8i/3OSNqriPz709BRa+K++6r/v1m64az0P+P3f5yu6WhzX\nqeKvKONMJsNtAO67zz4mQz0EEZGgmEjfd53/Oz60tvLPvL+f7wc3aSHMNSS/q/Gcy1sKZQV8iei3\niegxInph6PadEetyRLR/6GtLOe+pKHGgvR2YOJFFIpnkgO/atcCUKeHriYA5czhwCOj83/FCUnZv\nu41/R2EFevk8u+IEz7O/q1oUfaHcbJ9bADxujHkvgMeH7odxzBgzY+jr0jLfU1HqHhGVSy8FzjqL\nH5s+HXjzzfD1nmdTBbu7OdOkmPYDSvlkMmzB9/YCn/xk+JpnnrHfex7XctSy8APlu30uA9A69P1G\nAD0A/rLM11SUhuDAAWDzZv5+927gvPMK0woFYzhV8KWXbKsAgK8aaimIGEe6ujjQnstx5XXQRQcM\nX61dq5Rr+f+uMUbi3m8A+N2IdROIaA8R/QsRzYt6MSLqGFq358iRI2WemqLUNps2+e9LgDeMfJ79\nznfc4d8g5s+vfQuzXgirp+jq4grfgQGbrulm9pxySuHvzJj6uBob0fInoh0AJoc89dfuHWOMIaKo\nPe/3jDGvEtF7APyAiA4YY14KLjLGdAHoAoBZs2bVyf6pKKOjrQ149FF73xjgne8EwuwezysMKiYS\nXGm6cqUGfsslrI0zwBZ/sII3meTfQ3Mz8PnP8xWZTPKq1jD20TCi+Btj5kQ9R0T/QUTvMsa8TkTv\nAhDqsTTGvDp0+zIR9QBoAVAg/orSSHR0sBvnjjuswIQJvwR729qAT3/aZp7kcsANN/D3UX3n454V\nVKnPF2zj3N0NvPyyv/8SwOK+ejX7/9Npvu3s9N+vm5+1MWbUXwC+AuCWoe9vAbAqZM07AaSGvj8R\nwAsAzhzptWfOnGkUpRHYtcuYCy80xvMkU9z/lUrxGmOMue668DWJhDG33174us3NxhDx7a5d/HX7\n7fb16pldu4yZOJE/+8SJ5X0m97VSKWOSyfCfM2DM7NnGrF1bufeuNAD2mCL0u9yA75cAfJuIFgL4\nOYA/AwAimgXgOmPMNQA+AGAtEeXBMYYvGWMOlvm+ihIbpNPnzp1sdSYSwDnn8FXASSfxJDDpGNnS\nUlhFCoRXj7ptJI4fB1atArZvL35CVa0TNnRltJ8nk2ELft064D/+A/j5z+1z06YBP/uZvb97N7B3\nL/8OarFtQ7GUJf7GmF4AF4Q8vgfANUPf7wIwPbhGURSL2wJaBn0PDvKQl507/f7kD32Iu0QK06YB\nDz00svi89lrlxLIWkEEqo2mdEHQXZbN+l5rLxImFG24+z5u0tOKuBx9/EK3wVZQaQYT4vPP8vmYR\nHbEyzzmHrwQk+Pvaa+Gv194ObNhgxXHhQj6uFvvMjIaRBqlE4QZ3k0nOmALsVVKQF14Ib9X89a/X\nmY8/gIq/otQQPT2FOeSSV+55LDrt7fz42rX+2bEiQK5V+8QTfnGcPj1eAeDRtE5w3UW5HP8cm5p4\nI5B+S55nvfyyEScSwLnnshsuDrMXVPwVpYZwe8l4Hg99mTQpPJMkOCYS4Lz0xYt5s0il2DJubbV5\n57XaZ6bSDJcFJO4iSc+UDXTRIrumpaXQDWQMd1q99dZx+ADjgIq/otQQxboywtZls5yXLpZqfz8H\nfd1Not6DvMUQlrPvfmb52XV3A+vX25z9lhb/Brtvn7/5HhG32M5m4/EzVPFXlBqjVOtcMoEOHSrs\nLAnEK8g7HGLtuzMQ+vs5kyqs187UqXbE4owZ/L27YbS0FL7H/ffzZhqHTVTFX1HqEHfCl8QDkkn2\nS0tm0D33sI/ftfzT6XhWBLvWPmDjJvk88NhjnDElgh382REBO3ZYF5BsGO95j7+Pj2yscdlEVfwV\npQ5wfdgAi5M7PDyf52Cl9JlJJOzEr85O7iMUZt3Wu4AJbhA3iDF+wZauqO7MBLdfTz7Pm0FYPYUE\n3es9UwpQ8VeUmieYmigZKGK1homYZAABLPj9/X7rNi7WqxAM4rp4Hm+Ghw5xQHz9+vDOm0TAaadx\nW4ewoS3Sp78e2jUXQ7ldPRVFGUOyWWvli99eOkwSRXcBleCkWLkyA1hcQnGxXgUJ4l57LWc5eR6n\nby5fzj2UiNhf7wbEAf/Pzxjg8sv5+DBSqfgIP6CWv6LULGG+6WB3z6je8fk8568HXRdE7Mu++eb4\niJgggfL2dn8W1MqVLPi5nN38ZOMMuonefhu4+mrg4EHgySft4/Pm8UYSp5+Zir+i1Cjix5aALmDd\nNuKbFjFLJOx9sfJlvYsx7NZYtszGBOKGfCZxe4lLKKx2ws3lTya5InpwkNcvX86ZQG1ttjjOff16\nR8VfUWoUt3eNWPsi8IDdBN73PuD88zk1cdMm/4yAMEbTjKyeWkOH5fl3dtppXKtX22D39OnsGhPu\nv9+61yZN4kZ4I9UN1Csq/opSo7iFXOm0zdRJJGxrAmO4+dvzz7NPeunSaPFvarKujjCff5TAjyR+\ntbYxuJk/btqmXBH19bHgi5tIzrmry7rW3J9PJbuH1hIq/opSw7jiJK6HdNoOcRFca95l9mxu6CaV\nq0DpAj+c+NWSVSybUDpt3TyS5+85qS3GsHvH7c+TzfLmKt06Ozvtc+V0D61lVPwVpU6QjWDlyuj8\n85NP9j/+6qt86/ajkUInt9hrOIEP+szTafta5VrFlbpqCG5CS5dym+vDh23vHgnySqrrqlW8OUrv\nI4mvEPFm6f68RtM9tNZR8VeUOsNt/pZIAJ/5DGepvPEGPy8zZo1h8b/2Wn68o4Nvw6z14axbKRRb\nvJhf1w0Wl9tTvxR30nAbhbsJ9fX5R2O6SOzEGGDzZmDLFv5ZdnYO/zmCQeQ4bAAq/opSZwQtUQD4\n6Edt1ornAe94B/CrX9ljNm2y4t/T4+99I69z9dX8fFi74t7e8MlVpVrFroAX605KJICLLwa2bbPx\niuBG0dpqYyFusZtABEyYAJx9tj+FUz5Pb+/wn6OW3FuVQsVfUeoQNxawcqV/EEk+7xd+gNMVARax\n3bv9bSGOHvULW3t7oZU90pVBMUIYFNDhrO1gz/3Nm+1zYe6lTAZYsMDOOHBJJLhds7RpdhE30NGj\nw3+OOAZ9VfwVpc5xffJhELGbRsS3r88+53mcy+4KW1gbaGD4K4NiCArocNa2a8kHP0tUptIbb9hG\nbO4GsGgRsGaNLfaS15FxmMaw//+00+zVUZA4Bn1V/BWlzslkeGLXNddwZWoY4qs+ftwvjIkEXxXI\n8PjmZrsuajOQSWJRuFk3vb3+26CAhlnb2Sy/p9QxSCFbUxOPXJT3l4A1wLdy9RNseXHCCXaN+/7y\nWQXXNRYkjkFfFX9FiQGZDPDAAyxMMopQRH7CBCuSEgwWZG0whuCKPVC8yyOsJYU7fL6zc/i5t+7V\niZx/sKFa0H109dV+t1fQ7XPXXdyeISjgBw6wC0wQ11gUcZuCpuKvKDFBUja7uzmPfWCALfulS63g\nzZ/vn04FWIvXFbbgZrB+vc2Bb22NzrxxUyaBwuHzvb3Dj0GUYLTbYjnYUC3oPpIsJ0Es/2CH02BR\nl9xu2sTCH2X1xxUVf0WJEbIBSMtnALjzThZCCbI2NVmLHwi3eF2RzGatoBKxxRw2FyCb5U6i0nba\nTbUsppNoNgv80z/5jyPyF1wBNh4gm9HkyYWtrV2Syehq5nSan5s+Pfq84oqKv6LEjKieQGJ5//CH\nwC23cIO3T31qZItXNhOZI7BpU2FMQObhDg6yEF96KXDGGexyGRzk8wiKuEs2y/2J3E0J4Pd0C64E\nEftcjn36iYS/VbPLggXh1cyuaymV4rhJnNw6I6HirygxI9gTaOlSO+Xr0CG23J96ioV79WrOchnO\nDx8MlM6YAfzgB7ab6IYNhYHk73+fb2XTiBJxobu7UPiB8KuFnh67NpfjDeamm/hWNjr3+GCAOuha\nAuywexV/RVFqlmJaIojbRlw2YrVL8zJJh+zv5z5BuRwL+b33Fl4JZDK8gXznO8CHP8wbhrRLOOcc\n4Ec/KnS15HLA1q328TDXy0iceSYHsV33k2xowdm6kybxFY08v28fPxeWlppOR89BaCRU/BWljii1\n0tS1koHwlgeS/ZPL8UYQ7PP/l3/JefAA8OKL9nFjgH/+Z/6eyA6PN8bvhiHiQDPgT890N7D2dmDd\nOnuuTU2Fwi9ZRDKQ5oUX7GfavZtfS4LJslGE/fyWLAmPC4yUwho3VPwVpY4otdL06NFwwQfsLIBn\nn7WP5XLs/li1CnjtNRbUO+6Ifv1gcPaee9i9c/So3TCMYb+8266BiDeHZJI3hpYW7j4qmTuTJxd+\nbndgvQi/sHkz996XgrThOpQG3UunnAL8wz80lssHUPFXlLqilErTbJYzfQQi7m2zfz8LbyIBXHIJ\n8NOf+nP/3RYJbh78SAwO2lTOlSuta4aIXUYi3m4Fbi5n308KuSSQu3GjFW63k2gUbkvrYO8it0Np\nsHL4qqsaT/gBHeCuKHWFBHO/8IXiXD6uZZ5McsbN6tU2C+hrXwsf9RiGuHa8CNXwPA4oZ7O286jn\n8eu99JK/6Cvs/USs3e6cCxcCf/In3JNnJD+9MbxZuVc7+bx/48hkuN2De86TJg3/unFFxV9R6oxM\nhq3rkaxVV4CTSeDuu/mYYIfOKLeQCxG3ht65k/3tUdx/P7tcAN6c5syxG4DncWaRWzMw3GYiU8o2\nby7Mzgkjn+e1rpvK8wqzjNrbgYkT+b1TqXj06RkNKv6KElPkKuG227iNcUeHLcRKJPiruZk3hjAS\nCb4lYneMZM5cfnn4WnHXHDvG/v5MhitzUykrtJdf7i/GuvJK3iCKsb6DPXuikM1MWkqE9eYv9uop\nzpTl8yeiPwWwAsAHAMw2xuyJWPdxAF8DkADwgDHmS+W8r6IoxRGs1JVAqOcBM2eyW2XfvvBWyOIX\nlzTRAwf4tcKE+owzOHYgbN7MaaUdHf5WET09ftfPN7/Jt0Hr303lBHjzmDkT2LOnMI//4ouB733P\nX+RFBMyaBZx1Fp+3DGmXDSxufXpGhTFm1F9g0X8fgB4AsyLWJAC8BOA9AJoBPAPgzJFee+bMmUZR\nlPLZtcuY22835rrrjEkkJBnTGCJjJk40Zu1aviWyz4V9eR6v3bXLmFTK/1zYsSecwOuFtWuNmTw5\n+rXPO8+Y0083ZvlyY+bN8z8/ezYfn0rxezU18efZtct+xnnz+PN5njHNzbzW8/yvk0rZY+IKgD2m\nCP0uy/I3xjwLADT89dhsAC8aY14eWvstAJcBiGg+qyhKpQhOxEombbaNMf6++itWADt2RMcA8nng\n+uu5N/4TT7A1/fTTXC0c5o9/+22OEzz5JPBf/+UfyBLGOefwVUU6zdW6Lnv2AM88468Ydgu4Mhng\nu9+1+f2HDnH8IfhZ4jKIpRKMR6rnKQBece4fBvDhsIVE1AGgAwCmTp069memKDHHrQsAbKbL+vV2\nJKIUWq1YYfv6J5PA3LnAz37Goutm5CxezIK+Zg2L7XnnRffVAYAHHxz5PPN5jhN4HrtsgkNcJBNI\nGBy07RiCFc/y2MaN/toAID6DWCrBiOJPRDsATA556q+NMQ9X8mSMMV0AugBg1qxZWoCtKGUSrAsQ\na7m9vbBFRNhsYMncccnn/S2Sb7oJ+MpX+LlkEpg2rbAIazjcGICkg460ToiqeA72Nxqu3UOjMqL4\nG2PmlPkerwI41bk/ZegxRVHGmKgJVK6FLC0X3EBoNsttm48d4/VEVpTdDJpslmsHXPE+ejT6fCZN\nYqv+l7+0j8lr5/O2WZy4d1zc+5J9NFzFswZ1h2c83D5PAXgvEb0bLPpXAvjUOLyvoiiIFsEoqzms\nvXJzM3DjjVwd3NYW3S4hlwOOHIk+ly9/mXv4uJXDJ54I/MEf2PuPPMK3RMDUqcArrxS2kVi40J5D\n3Gbrjhflpnr+CYDVAE4C8H0i2m+MuYiITgandF5sjBkkoiUAtoMzf9YbY35S9pkrilIWUVbzqlWF\n/W/mzuXK4P5+bucMcBpna2u4OyaK3l4Wblf833yTg8Fh/v7Dh9mVJMNpJHdfmrDFcbbueFFuts93\nAXw35PHXAFzs3N8GYFs576UoSmUJTsSS8Yxbt/rXybQstzfPDTewH729nQe3jJTJA7CrxhXor3yF\n2z64LqMgxvAwlqlT7SD4oMire2d0aGM3RWlgxI+fy3ExlLR+cLnySv9aWb92LWfUdHayq8bNxgm+\nvjH+4zs6uHV02LB391ix8lXcK4+Kv6I0KOKvl7YMixdzS+ZUyp8i+eCDNhALhNcJPPGEP7PmjTds\nW+auLlslHAzIBjNyJAU1kWCLPyj8xQyyUYpDxV9RGhTX7QPwbVTBl1jmw9UJhIlxV1d0h02X6dP5\naiAsBdWd4BU1OF43hNJR8VeUBiWT4U6fixezMEsKp1vwJVcAnjdynUCQbJaHvYs7J9hh053O5Xl8\n1dHRET5sPWwYvfTuD3sNZWRU/BWlgRGh3LTJn8IZdMkEA61RdQKCK+wi/MEOm+50LgkiB0dIuhlJ\n8jpE9ooj+BpLlhS+hhKOir+iNDBSzHX8OFv6rnC6ufxhFv5wdQIrVvivGubM4cfc15A0UUFGSAbX\nuHn8nZ2FG1FwmLv27ikOFX9FaWCGq5AdaVh8dzdP25LAb9AN4+blB4Uf4PuXXDJ8muhIefyZDLt6\nlizhz9DIw1lKRcVfURqY4WYCj7QxbNhgUzOlTkCOCbP4wwKzy5cD27Zx1pG0bAgyUh6/pI1q0Lc0\nVPwVpYEZzrIeaWOQTp5EnJYZ1m6hrY3XHjgQnqmTyfDz5Qq3FnqVjoq/ojQ4UcJZysYQ1m7BTc0M\ny9TRBmzVRcVfUZRIRrMxyDErVw6fqaNUFxV/RVFGRXA+cHAjKCZTR6keKv6KopRFMQNVVPBrDxV/\nRVHKQgeq1CdetU9AUZT6Rtw7iYT68+sJtfwVRSkLde/UJyr+iqKUjbp36g91+yiKojQgKv6KoigN\niIq/oihKA6LiryiK0oCo+CuKojQgKv6KoigNCBlpyF1jENERAD8f5eEnAvhFBU+nGtT7Z6j38wfq\n/zPU+/kD9f8ZqnH+v2eMOWmkRTUr/uVARHuMMbOqfR7lUO+fod7PH6j/z1Dv5w/U/2eo5fNXt4+i\nKEoDouKvKIrSgMRV/LuqfQIVoN4/Q72fP1D/n6Hezx+o/89Qs+cfS5+/oiiKMjxxtfwVRVGUYYid\n+BPRx4noeSJ6kYhuqfb5lAoRrSeiN4no36p9LqOBiE4loieI6CAR/YSIbqz2OZUKEU0got1E9MzQ\nZ/g/1T6n0UBECSLaR0Tfq/a5jAYi+hkRHSCi/US0p9rnUypENImI/pGIniOiZ4mopvqexsrtQ0QJ\nAD8F8DEAhwE8BeCTxpiDVT2xEiCi8wD8CkC3MeaD1T6fUiGidwF4lzHmaSL6TQB7Acyrs98BAXiH\nMeZXRNQE4J8B3GiM+Zcqn1pJENFNAGYBOMEY84lqn0+pENHPAMwyxtRlnj8RbQSw0xjzABE1A/gf\nxpij1T4vIW6W/2wALxpjXjbGHAfwLQCXVfmcSsIY8ySAt6p9HqPFGPO6Mebpoe9/CeBZAKdU96xK\nwzC/GrrbNPRVV1YSEU0B8McAHqj2uTQiRPRbAM4DsA4AjDHHa0n4gfiJ/ykAXnHuH0adCU+cIKJp\nAFoA/Li6Z1I6Qy6T/QDeBPCYMabePkMngOUA8tU+kTIwAB4lor1E1FHtkymRdwM4AmDDkOvtASJ6\nR7VPyiVu4q/UCET0GwA2AVhmjHm72udTKsaYnDFmBoApAGYTUd244IjoEwDeNMbsrfa5lMkfGmPO\nAjAXwOIhl2i9kARwFoA1xpgWAP8FoKZikHET/1cBnOrcnzL0mDKODPnJNwF40BjznWqfTzkMXao/\nAeDj1T6XEjgXwKVDPvNvAfgjIvpGdU+pdIwxrw7dvgngu2C3br1wGMBh54rxH8GbQc0QN/F/CsB7\niejdQwGWKwFsqfI5NRRDwdJ1AJ41xtxZ7fMZDUR0EhFNGvp+IjiB4LnqnlXxGGNuNcZMMcZMA/8P\n/MAY87+qfFolQUTvGEoYwJC75EIAdZMBZ4x5A8ArRPS+oYcuAFBTSQ+xGuBujBkkoiUAtgNIAFhv\njPlJlU+rJIjomwBaAZxIRIcB/I0xZl11z6okzgXwvwEcGPKZA8BfGWO2VfGcSuVdADYOZY95AL5t\njKnLdMk65ncBfJdtCSQBPGSM+afqnlLJLAXw4JAh+jKA+VU+Hx+xSvVUFEVRiiNubh9FURSlCFT8\nFUVRGhAVf0VRlAZExV9RFKUBUfFXFEVpQFT8FUVRGhAVf0VRlAZExV9RFKUB+f8FvkT+M2urzAAA\nAABJRU5ErkJggg==\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
@@ -243,7 +239,7 @@
"metadata": {
"id": "nNYko5L1keqZ",
"colab_type": "code",
- "outputId": "b9f9c57b-b6aa-4817-8ab4-4a2201732b9a",
+ "outputId": "2ebd8e8c-e5ef-4812-af10-1d60e70c222e",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 269
@@ -258,16 +254,16 @@
"# Use np.split to chop our data into three parts.\n",
"# The second argument to np.split is an array of indices where the data will be\n",
"# split. We provide two indices, so the data will be divided into three chunks.\n",
- "x_train, x_test, x_validate = np.split(x_values, [TRAIN_SPLIT, TEST_SPLIT])\n",
- "y_train, y_test, y_validate = np.split(y_values, [TRAIN_SPLIT, TEST_SPLIT])\n",
+ "x_train, x_validate, x_test = np.split(x_values, [TRAIN_SPLIT, TEST_SPLIT])\n",
+ "y_train, y_validate, y_test = np.split(y_values, [TRAIN_SPLIT, TEST_SPLIT])\n",
"\n",
"# Double check that our splits add up correctly\n",
"assert (x_train.size + x_validate.size + x_test.size) == SAMPLES\n",
"\n",
"# Plot the data in each partition in different colors:\n",
"plt.plot(x_train, y_train, 'b.', label=\"Train\")\n",
- "plt.plot(x_test, y_test, 'r.', label=\"Test\")\n",
"plt.plot(x_validate, y_validate, 'y.', label=\"Validate\")\n",
+ "plt.plot(x_test, y_test, 'r.', label=\"Test\")\n",
"plt.legend()\n",
"plt.show()\n"
],
@@ -276,7 +272,7 @@
{
"output_type": "display_data",
"data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAD8CAYAAACfF6SlAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi40LCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcv7US4rQAAIABJREFUeJzsvXt8FNX9//+cmd1JEDUpUctHEbR4\ngWBCEvAyRXQwCl6r/eEV26WgpFoQsaiVfj62fIoV64VGBRWoIPl+VD7thxatN5CVEcShKBJuiwiI\nUFRaTU2ol+zszpzfH2c3uwlBbgmX5Dwfjzxwd2d2zq6zr/M+7/O+aEIIFAqFQtG+0A/2ABQKhUJx\n4FHir1AoFO0QJf4KhULRDlHir1AoFO0QJf4KhULRDlHir1AoFO0QJf4KhULRDlHir1AoFO0QJf4K\nhULRDgkd7AHsimOOOUacdNJJB3sYCoVCcVixfPnyz4UQx+7uuENW/E866STefffdgz0MhUKhOKzQ\nNG3Lnhyn3D4KhULRDlHir1AoFO0QJf4KhULRDjlkff4KhaJ9kUgk2LZtG/X19Qd7KIcFubm5dOnS\nhXA4vE/nK/FXKBSHBNu2beOoo47ipJNOQtO0gz2cQxohBDU1NWzbto2TTz55n95DuX0UCsUhQX19\nPQUFBUr49wBN0ygoKNivVZIS/3ZIXZ3Lli0TqatzD/ZQFIpGKOHfc/b3u1Jun3bGkiUu9fXlGIaH\nrpv07h0lL8/ao3Pr6lxqax3y8+09PkehUByaKMu/HeG6MH26A3iATxB41NY6e3RuXZ3LypXlbN58\nLytXlqtVg6LNUVNTQ0lJCSUlJXTu3JkTTjih4bHneXv0HsOGDWP9+vWtPNKWQVn+7QjHgeXLbW64\nwQACwCA/397pONeVx9o2WCkDv7bWIQgaTxrK+le0JQoKCqiurgZg/PjxHHnkkdx5552NjhFCIIRA\n15u3m2fOnNnq42wplOXfVnFdmDiR1dNcJk6UD20bwmHpK5TuQo01axqfNm0anH8+/Nd/QXm5PA8g\nP99G103AQNfNZicNheJAk7rNG+7T1mDjxo0UFhZy44030qtXLz799FMqKiro27cvvXr14je/+U3D\nseeeey7V1dUkk0ny8/O555576N27N5Zl8c9//rP1BrkPKMu/LeK6UF6OiHt0D0xe1qNMyLGIRuHR\nRx0SiSS6LvB9j7lzqwDo0sVh2zabkSMtkkn5NvG4XAFYFuTlWfTuHVU+f8UhQ+o2x/PANCEazaxU\nW5r333+fqqoq+vbtC8ADDzxAp06dSCaTDBgwgKuvvprCwsJG59TV1XH++efzwAMP8POf/5wZM2Zw\nzz33tM4A9wFl+R9m7JGl4zjgeWiBTxiP/oGD58GGKpfi6q0Q6AgBui4YNGg68fgANm++l/r6cnr0\nyLyxYcjVQpq8PItu3cYp4VccEqRuc3xf/us4rXet7t27Nwg/wPPPP09ZWRllZWWsW7eOWCy20zkd\nOnTgkksuAaBPnz589NFHrTfAfUBZ/ocRe2LpuC5s2GpzY8hEFx6JwGSxbnOu4XLjzHKMpMcx/y2o\n+T5oGhiGj4YPgGF49OnjEItZ6DpMntx6lpRCsb/YtvwdpH8P2YZKS9OxY8eG/96wYQOPPvooy5Yt\nIz8/nx/96EfNxtubptnw34ZhkEwvqQ8RlPgfRjRn6WSLc2ZysJhhRJlV4bCj1OayGoshWydiTJcn\n5/wL0hHCGqT2fnV03WTECJvTT2+82atQHIpYljSAmgYntDY7duzgqKOO4uijj+bTTz9l3rx5XHzx\nxQfm4i2IEv9DiN3F0e/K0klH52zdmpkc3sLiua4WdhHgwI7SzMmdFxpsvySJ0AK0JJw6WSNxUV/q\n/1nG5pq5DOwzhiMTx/PJJ5eQSNQoH7/ikMWyDryRUlZWRmFhIT169KBbt27069fvwA6ghdCEEAd7\nDM3St29f0Z6auaTj6IPg25Ov0kJ/eYFLUY3D6gKbs8dYeB6EQiCEFH/ThMpKGDNGTgjnGi7PX1rF\nf7AdOnemzjqa2jcmkb88IG9DGD8Q1J2aYPUkgcisVhHoaFoOpaV7ngymUOwL69ato2fPngd7GIcV\nzX1nmqYtF0L03cUpDSjL/xBhT+PoLQssMs7/HrpJmR9lSSCPHTECunaFggKYMwfq6+Fs4fKKX07u\n3DgQIDSdI57O4fM7ppA3pAaWLUOb+wL/LhGIMA0+ISFA0wKSSY/58x02brQaltfN5QIoFIrDByX+\nhwjpOPq05f9tyVdDtjp0S/l3QsLjAt1hqWZhmhCJyGPLy6XwCwE2DiYeOgEC0EQACY9Zk2q4ZopN\nr5f/Gw1BXjVoCRpZ/smkTjJpMmGCTSwmVxTP3eby3iSHNwK7IYRUTQgKxeGFEv9DhObi6LPFFDKR\nPvMMm2jIxMBDM02uqbTpUJMR3YkT5XFpj56DTRIDIxXVIwCfEG8ENhcsrWLrNR5mHXh58OXjJdSc\navIJx/PWhkvIz69h5UqbNWukmpfFXS5+uJzLA497MBkYj+I4VqPxfVvMtZogFIpDgxYRf03TZgCX\nA/8UQpzRzOsa8ChwKfA18BMhxHstce22RF6e1eDqaRrWOXRo483cZ0dEiXSVPv+XajLumLo6l3PP\ndSgutlm50qJHD5fupQ5/XHkpP177ApoQ+Gg8wzC+7g3cOJ2PNCFdPQHkJNby4J1vsnatDPfs2xey\nS5Wcj4MpPHR8BB625rB1q0VVVeNIpA1VLlYTlT+QSTkKheLbaSnL/xlgMlC1i9cvAU5N/Z0NPJn6\nV9EMrgvjx8sM2yCQYgmNI31OjVi4WI3E9PXXXXxfbhpPmmSyaVMlJ588Bk3z0ESIHXeGyVvlQ8jk\niGERKm+oIvB9KfwCMMAgwe1Dx/BYVSWbNlmUlUH2vvvCwCYIm2i+h9BkDsGS6aDrcHbgch4OdRRw\n48wxkGys8rsKVVWrAYXiwNMi4i+EWKRp2knfcsiVQJWQoUVLNU3L1zTtP4QQn7bE9dsSaes4Lfy6\nToMvPxJpLJITJ0L37i7FxQ6rVtls3OjQrZvcNAaPvn3n8MUXqcca1I67grw/fI0xeDCRCov166v4\nNPv/QACaDqeWLePR0gFs+2QhrmuRXTbcxWJAEOV/KhzexGbJdAvfhzN9l9cpx8Qj8HX0wAcRUNc9\nTu0H48kvHI9tWxQXu/Tq5bB2rY1tW2o1oFAcJA6Uz/8E4O9Zj7elnmsk/pqmVQAVAF27dj1AQzu0\nSFvHaeG/8EIYPDgj+uPGZY4999xpnHnmKDTNJ5HIoWPHSoTIbBofe+xg6uoWy8eEyJ/4CqzyYfFi\nKCqic2GE7dtnIgIPkYSjNgi+PB0wQA883njD4X/+R7p/giCzh/BFD3ihJ/TpA6GZ0pJPbyqH8PEJ\nCNDZfrnGxtsDAuN1jJWLOeWUSiZNGoMI4uiBQWEwmScWVXxr4ppCcaCoqamhvLwcgO3bt2MYBsce\neywAy5Yta5Sx+23MmDGDSy+9lM6dO7faWFuCQ2rDVwgxDZgGMs7/IA/noNA0kWvw4EysfigEw4bJ\nFUC3btPw/VsJhQIADCNO16415OdHWbXKobraJhy26N27SG4iv7iVvFXTwffx6z2WP+gQPWscJ5yw\nkNpah6//VsCIVbex9iGPQECAybvv2vi+FP0rr4S//hVOP93l4YfLCYc9EgmTO++M8tvfWo02lXUE\nX/SC9aNBM0DTBL4f57PP5gBxND1ABAG100dy+feLmGBajRLXlBtIcTDYk5LOe8KMGTMoKytT4p/i\nY+DErMddUs8pmtA0Zd1xMq6d6mqbqVMt/vY3l0mTRiHrMkg0Tdbmj8UsBg2ystwoFpZlQV8XPzSL\nwPdICJPb59osnQtgpf7glVAR09+qIncQPPdWhLVr5fNBAKedJnMIcnOrMM16dF0ghMegQQ6TJlks\ni1s8I4ZTIaaiI6grDkAXaJqcPILA4NgdJdQlFhBooCchf3lAt9MdolGr2agm5QZS7JYDZCnMmjWL\nKVOm4Hke3//+95k8eTJBEDBs2DCqq6sRQlBRUcF3v/tdqqurue666+jQocNerRgONAdK/F8ERmma\nNhu50Vun/P27JjtlPQhcSkszlvbYsVF69XIQItnIF69pd/DEE1ajEg9lcZf4eAfG22BZPDssyvqp\nDm8Im6Xs/EN5y7f4c0eLcf3BuQ8KC11KSuSk89ln8JOfVBGPP42miQZBr662qayEmhroVxBBHzML\nPI+8NQbJhIYuEgihs/6VO7CffJyOpwTU9ob8lZD3YQ7Ydubzui7OeIeyuM2SwFJuIMW3c4A2jNas\nWcNf/vIX3n77bUKhEBUVFcyePZvu3bvz+eefs3r1agBqa2vJz8/n8ccfZ/LkyZSUlLT4WFqSlgr1\nfB6wgWM0TdsG/BoIAwghngJeQYZ5bkSGeg5rieu2B7p0cdj8YRy0AEScsjK5WappISCROkpjxox8\n/ud/ZBnmUAjOES7zg3I6LPDw3zR5dliU+lKL3+dafPNN89cSQmYGA9x4o8txx8lJx/dDhEKCIEgQ\nCklrPgg0XnppOI8+apGbC7fdBr/fANf8fijf96BT3wjbN8DatQ6vvWZzo1/F1sH1dKqGbs8hNzSe\nrATLkjWNVlWRf/sMzl/pMz8wGahHec+0WrVSo+IwZ3eVDluIBQsW8M477zSUdP7mm2848cQTGTRo\nEOvXr2f06NFcdtllDBw4sMWv3Zq0VLTPDbt5XQAjW+Ja7YbUcjb/hFr04wKCEIREwFXFtdxyC+Tl\nXcbnn/8VEPh+DsuXS/98z54uI0Y4FFdvpcMsWdM/iHusn+rw+1yLykpYsQKefhqSSVJCLi+p69KC\nBzj5ZIdEwkPXfTQt7V6S2zBBoOF5ucyfH0EImUn80ksujzwiJ4t3MelbGCFiWUycaMnVy6Sn2RIS\nbE1CyR2Q9z5QU5OpaeTXo98v6D0Wjn7f474LHXLGWw0rArUJoNiJA1TTWQjB8OHDmTBhwk6vrVq1\nildffZUpU6YwZ84cpk2b1ipjaA0OqQ1fBVLoqqpg5kxIJsnTdU4ZBBtuB6HDMX0eIZGo5LPPfDQt\nxH/8xzA+/zzCpk0WZ5zh8tBD5eTmeujFIXZUGxxZDQlh8oawqa+Xwv/AAy5Dhkh3Tk6OxZgxMrRU\n16Xl77pw++02999vEgp5BEEI8GXtf83g5Zdv5rXXIsRiGSEuKXEIhz0MwyeZ9Fi1yqF/f4vLC1xO\nHDiGUDgBGogwbB8ER2zI4f0Cm9pVDr7voWmCIAS1ZRp5m03s8bbcilCxoIpdcYBqOl944YVcffXV\n3H777RxzzDHU1NTw1Vdf0aFDB3Jzc7nmmms49dRTufnmmwE46qij+Pe//90qY2lJlPgfSqSFLl2U\nB0AIEvk6QgvAgEDISBldFySTgu3bP6SkRP4GPvjAITc3VRwOqH10BLXPdeV30wuwfQeEvMSKFeWA\nR+/esnooWIwaJVfPt90GJSWwcqXF2LFRSkoc6uoKGD16NJrmAwa9e0d47DEr1QwGrrsOVqywSSRM\nhPBIJk3WrbPpH3IpGlOOecs3jWJ639HO4ia/EmMUXH76Vno/FEIPQUgPkX/WMPhZJPNDPkBLe8Vh\nygGo6VxUVMSvf/1rLrzwQoIgIBwO89RTT2EYBjfddBNCCDRN43e/+x0Aw4YN4+abb1Ybvoq9IC10\nKeEXmkbSyGHzsbfhJ34PwscPwgghMIwkhhHg+wtYsWIxq1dHKSuz8f2s4nDFEbaug0minBAeHib3\nnTE09bpPENSzfXsVNTUWQZDJJl62TA5n3TqLWMxiyJCJGIbs+wtJzj/fYdEiq5HB9YtfWEyZUkn/\n/nNw3cHcfbcFjiwy1HmetPaFCUnf5IF5lRwVwPygHHONR81Yg8qSEfwjJ0LOCKvxb/lAtmtSKFKM\nHz++0eMhQ4YwZMiQnY5bsWLFTs9de+21XHvtta01tBZDif+hRJbQ+brB08FwZiUjLH3Uose8qxoi\nbwCGDh1Pnz4LMAxZcnnZModf/GIcr78epVs3pyHs89WRE/lVIJOvBB7fWQmJhIFp+oDg009nct55\nEUzTarTgyE4w27rVRggTkJPKtm02ixY1Xmn/8pcuK1aMQQiPs85aTGFhESA/T956j5JxBuvHDWfE\nfTKE9B4mNiSFdYoBsa7M0i1mz27i2TlY7ZoUijaOEv+DTWozc3WBjROHs2cN5fTt8EIswq3TLAIB\nCIjFrEY+9lmzxlNcvLjBzfLeezaeB4sWWdi2xZsPuBz93kT+4RfgYSLw8HUTcUqEefPg8sunpmL1\nk3TpImPts7YaME1ZX0hqrUVdnaw4um2bzUUXWTu54GtrHcBLuYZS/QiscQ3CnWfbRB2LVFQcDjYe\nJprm4Wsmi4TdsPLYybNzMNo1KRRtHCX+rcy3BqqkfPwi7nFcD4PTH9H4dzjJ8gKT7mURjKczkTjZ\nnIPLgHUOs+6qxCyTJZfXr5f1/AsKYJzt8oon6+wMxOQOrZLj9BqOusIm/xKL2U/BwIGzCIU8QiHZ\nO6BbNzm+pvWD0qQrjj73XGMX/Msvu3zwgcP3vlfQfD+CLOG2kRNGadzFxmGsUck9I2rYUWqzYoyF\noTw7CsUBQ4l/K9I0UOX11126dMnq0Zvy8WuBz9clAaEw6IYgmfQIAofJky1GjpQTgGFAIiGFP0o5\nOXgE603evy3Kl7dk/O+OA99PZOrsaJrHJWfWcF31OPy/gjkPKistNmyQm7nFxY378+7OyM52wRcV\nuZx7rgzv/Oork44dKzn55G/v+fubS1xuf7GckPAgZPLljyqhi8Prr6dXLcrIVygOBEr8W5HsQJXu\n3V3q6202b06gaWFKSqQrBNMkiHscUZ3OiE02RMtEIi49e2ZCMkePhgFxKeyG8CHpcfQKh6KKzCbp\n6tXworAbXD1ayGRHmY2/PGOt19TAuHGZsg57Q7YLPggy4Z2IOF++NIduV43H/cRq1GcY224oP31H\nvYMhPAx86k6Ns7J+FMFmH03TGTp0CscfX7HTNVWYv0LR8ijxb0WyreSLL67CMGRh/iDweOKJKpYt\ne5Kht0X55yNVHBPbzo6xsL6kM6/FIpxzDrzzTjmhkAzJNIwoQlgNvnKBrNEzdIbNxKzIyJoaWKZb\nlAdRBmgOPW6yOTViYc5quYCZ9LUmTbLp08cEESecDCh7eQHJ3y/m7iBKEMDtohyhe2g5JhuGRvE8\nizeEzX+mfP21fTQCQ2YpCxHw/vuj2Ly5iH79Mgo/bRoNq5+cHBXmr1C0FPrBHkBbJm0lT5gAV1zR\n+LV//hPmzoXZL6+m/LrpXFA4l6Gxufz6uRl0XAU7djhoWqah+8aNDr4PdYXwmyFDeaRwBOVEecuX\nVnYa25Yi+Y5hUZk7jtJSsJyJ/K3SZcQI2RGsOVxX9gdw3d1/riVLXKZOncj778PYsVE2zbyQM8bq\nfGdtgEh4nOs7nCfkCkUL5HLjfBxMU05MA/Uoz/WcwCdXXNfofYXwmTrVIZ0k6bowapTcgO7Rw+WH\nP5zI9OnuHo1RodhbBgwYwLx58xo9V1lZya233rrLc4488kgAPvnkE66++upmj7Ftm3ezOyI1Q2Vl\nJV9//fVejnj/UJZ/K5P2odfVRaiunonvy+ic+fMjFBa6jHh4FH8P+3ycgN5j4chYgv6Bw4vVmaQp\nTTM55RSbM85weeCBTJG3Z++KYG5qbMlnu2UuL5BJVngehSGTdUJOFrNmNbag9yaJdskSl6+/LufH\nP/a4/nqTu+6KEts4nrxNi/E1uRpxkAPyMDF02We4W8SmshR+9jNY4lt8ATyWdy+GQGb+ChDCYPly\nm83PuQxa4bABG9+3KCzMlI5IJExGjowyZYqlVgCKFuWGG25g9uzZDBo0qOG52bNn8+CDD+723OOP\nP57/+7//2+drV1ZW8qMf/Ygjjjhin99jb1GW/wFE04axZs1PueOOhcRiFiUlDlo4KTN3Q1BbAgnC\nOMgY/bFjo1RVTSA3N0q/fhbDh2d87KGQx+DBTrNCbVmy6UtRjdMoNKdfwmmUKJumuSTa5nBdmD7d\nQdczY+jTx+G6SgtjYZRtP53ApWYUXYMLdId3bqxEu29Cw2xSU5OJXrqipApDy7SQDAKNysrJHB2D\neX45J069lxtnltM/5FJa6hAOxzEMn3A4Tq9ezi7HqGhf1NW5bNkykbq6/V8OXn311bz88st4qb6p\nH330EZ988gmlpaWUl5dTVlZGUVERL7zwwk7nfvTRR5xxhmxf/s0333D99dfTs2dPfvjDH/JNViXF\nW2+9lb59+9KrVy9+/etfA/DYY4/xySefMGDAAAYMGADA/PnzsSyLsrIyrrnmGr788sv9/nxNUZZ/\na5G1S1lXKEsq+L7HaaeZ6HqEwkIoooBQQhAIWd/+n9vP43+veoBlL1oQyNj+006TNXu2bJlIaWkB\nX32VKaFw0UV2I+HfaWM0e9MhZLJE2Bj+zn7/3SXRpt9361ZYvtzm+uvlGIQwGTHCbsgF6GZZPF7q\n0mNUOSHfQ/uzCSMzs5Nty+Qx34ce1dvRE7IjgSbgn5X9eeWViobkLz21oT1rhMOCvgXoeoAQoOsB\nX35ZoMJBFZmigKnw4t69o7uMMtsTOnXqxFlnncWrr77KlVdeyezZs7n22mvp0KEDf/nLXzj66KP5\n/PPPOeecc/jBD36All1TPYsnn3ySI444gnXr1rFq1SrKysoaXvvtb39Lp06d8H2f8vJyVq1axejR\no5k0aRILFy7kmGOO4fPPP+e+++5jwYIFdOzYkd/97ndMmjSJX/3qV/v82ZpDif9eskeRJ038KNtf\nG0QQ1GMYsgHKZcVV9Kp2+PHXy/h4CnzeH76zSOMPiy6myxSL0CuZpu0bN7osX16OrssbvGPHSj78\nsIZTTrHp189qGE9BQabjV8Z1k/EBGbbNRKxmx/5tSbTZHyUUAiEs7rorSp8+DiNG2I02ZyG12gg8\nCHauxWNZ8MQT0vXzTawzxWOhrgTyqiEWK0QAiw0bdFO+R8pddOHxDps360CAEDo//3mNcvkoqK11\nCILMvlhtrbNf4g8Z109a/J9++mmEEPzyl79k0aJF6LrOxx9/zD/+8Y9ddupatGgRo0ePBqC4uJji\n4uKG1/74xz8ybdo0kskkn376KbFYrNHrAEuXLiUWi9GvXz8APM+Tv+UWRon/XtCcb7yw0JVtErNj\n27P8KHXd42xP/rWhAQo+3FU9nbxYQC2CTY9AEIa6YsGGLQV0qIHhw2HqVOkHLy52ECJzg598cg39\n+4/baTxpi3qnLNmswP30w+bYVXx/tksIZDevrl0tCgosFi2S192bWjwVFVBUBH+4KcKw2EyOinkk\nMKkiAsDbwuK5m6JEujoN5+a/uBW9KExAEsMwKS5u/J6K9kl+vt18YuF+cOWVV3LHHXfw3nvv8fXX\nX9OnTx+eeeYZPvvsM5YvX044HOakk06ivr5+r9978+bNPPzww7zzzjt85zvf4Sc/+Umz7yOE4KKL\nLuL555/f78/zbSjx3wua+sbffdclkShvaJDee/Uw8vpGGglgbR8NoQdoSL/2Ca8FdIoJNKTVG4QB\nA3yhkdOnpkErp8t2u1SnNn6ln13e4NlumPR4pEtE1udvySzZploekRq96w3i3dTiSVes/n8bLWIs\nxMbBIdNZTNfh1EhqJkrNbnmeR1GRwZvDRnBsn8h+W3eKtkFenkXv3tGdja/94Mgjj2TAgAEMHz6c\nG26QbUrq6uo47rjjCIfDLFy4kC1btnzre5x33nk899xzXHDBBaxZs4ZVq1YBsGPHDjp27EheXh7/\n+Mc/ePXVV7FTP9R0GehjjjmGc845h5EjR7Jx40ZOOeUUvvrqKz7++GNOO+20/f582Sjx3wuaCmFJ\niaxFDz5B0qd22VTyfpEKpUkJYP4Jtej+7/E1EJ5B5/mJ9B4nedVIv7cAQcZ/7ro0tGiMxSzGjYvy\n2GMyGzcWsxqEN921C+R40u0UWzIZqjktnzhxN1WWd7GMaFqxeikWf9MsdB10IT/P5MlZp2bNtkdW\nwztjuvL7XEvF+isaSJcdaUluuOEGfvjDHzJ79mwAbrzxRq644gqKioro27cvPXr0+Nbzb731VoYN\nG0bPnj3p2bMnffr0AaB3796UlpbSo0cPTjzxxAa3DkBFRQUXX3wxxx9/PAsXLuSZZ57hhhtuIB6P\nA3Dfffe1uPgjhDgk//r06SMORd5+W4j775f/1ta+Ld58s4NY+IYm3nwVUVuIEIYhD0gf3KGDqD1D\nF5sjITHljBtFAA1/s7hRPFB4i/jLo7eI2tq3G9581i1vC8OQwY+aJsQtt2Suf//9ouE1w5Cvpcdz\nIL+DDh3k9Tt02LNr19a+LZ555n5xxhlvi3Rgp6bJ86dObf4zrJr6tvDCHURSM8RXdBDn8Hajr1fR\ntojFYgd7CIcdzX1nwLtiDzT2oIv8rv4OVfFvSm3t2+KjRbeI2lJzJzX86Jb7ha9JpfZ1Q7zGQJFA\nFwJEAl3cw/3CNFOHZylqMqeDON98u1lx3RfhbQ2yJ8Hd0TBJLjTEq692EGec8bYwTTlx7er89Ofs\np78t/tO4X5wXznwfd98txMCBctJIv/9HH90vJ1DFYYsS/71nf8RfuX32k7w8i1jI4s2zI5x/tkO3\niA2WjMIZN8PmNREiTEAiCPF/DKY/ixvKK3/nBzaPXyK9G8dvdegalxmxuvCYVeHwXNedC50dKuXt\n96bKcnZUhml6jBjhcOaZmSStuiXTqN04h/xTBpPXT9b2SXt8lgQWSw2LETfBxV2hthbSOTfz58OK\nFS7XXSc7k7VEuJ9C0V5Q4r+fZCJuLEzTIhqRUTWOI8sSpJueg2ANRdxOJVdrczjlzsH0vyrjv39V\ns3ktMAnjkQhMdpTajNu5xhlw+JW3l1EYJsmkzE945hmb3Fz5HQ06YRpfHvdTgi6g/3s+vZdAXr8K\nLi9w+UZzeEO3ec+0iKTqF2UlXwKyDIbvy6Szlgr3Uxw8RKolomL3iHTnpX1Eif++kBXsX1WV6YCV\nnR371VcuP7lhPF+vSNIpJgjwGUoVEWaRi4f++GKW7CjC8yx8H97SLC4kyvk4LNJsjpxjMb7o8BL5\nXZGXZ7F6dZRlyxzee8/m/fdTTRpzAAAgAElEQVRlqWoh4Lgb5tD9J6T6E8NHy+fQWy+iaEw5ZwQe\n9xom71dGKUp9EYMHS4s/TToaStNk0tm2bbI3geLwIzc3l5qaGgoKCtQEsBuEENTU1JCbm7vP76HE\nf2/JCq73QyaxQFbbBBmtUlAAI0e63H9/OeEBcdb8KKDXWJ0OMZMTToAOnzYtdmY1RO68p1n8LSH7\n6eoLYPHitlPFsm9fi1/8IvNZ0zkJC6oHc2pifkOW80MzBvPbdQ7dUn0OwponE8dSoaAVqdXQ00/D\nihUyNHTBAlmtbt68CB98YDF8OA0rBcXhQ5cuXdi2bRufffbZwR7KYUFubi5dunTZ5/OV+O8t2cH+\ngUc/4bAIC02TyVk1NdCrV7oGT0BC6Py55EKe3Tiex38F2phMbeVuEZtoVucskK0TFyxoJlnrMCd7\nryI7G/mFTRX0fwHCO+awoHowc9ZXMNByiewmUQxg1app/PCHI9G0AM/L4dVXI3ieTJBrWrxOcegT\nDoc5+eSTD/Yw2g1K/PeWVLC/iHvEA5OFqQqW4XAmAepPf7Lx/RCaFiAIs+Xk8Ux0LIosoKjxbq3l\nulg4gHw8fjzEHZd+CYclho1ttx31yt6rKCrKnvQqKC+vaND6UyMWRHa9qz1tGjz6qMujj45C15No\nGoRCcUpKHGIxq5ELTom/QtE8Svz3lpQJ+8IYh98ty2SmnnIKBIFs0/joowXE43IzxvcFr7wCl12W\ndf631FK2gKhWjoaH0EwMouxLx61Dnaab1tGozPxNI6dEi4LVkGoG1lBKY+lSm5ISB12Twi8E6JoG\n2BiGfKx6ASsU344S/33BsphXZrF0WfaTLl9+Kds0go5hCDRNYBh+qgRxM/Xnd1FL2Uh6kKpq2Z7M\n11kpj9jMmVLAk0np/tJ16N3bZdIkGdL5ox+ZzJ10m6yIiswOPqkSPnwZAk3uKVRWtpuvTaHYJ1Q9\n/30kEpHWZZqBA6swTQ8Z2umjaRrJpEEyabJmjc3Wrc10yUrXizCMjKna3HPtgKbzYCKRqf0fBFBY\n6BD4ccBH1+P8uMSheCycPANKbocTXw6wcVK5w3LvRaFQ7Bpl+e8jliUFq6oKZszI1OJJU1NzBUFw\nFitXyno8q1c3swm5q4ytQyGL6wDTqPVAaGfLP7GigNCPglRUUMB3N+eSF4P8mJxuk+g42Oh6u5oz\nFYp9Ron/fpD2W0ci8O67ETRtJuDheSbjx9/Npk0WQ4dKa3aviqAdbllcLUDTeRAykUFz5kD312s4\nY6zOv0sCjl6ps7ljIcexlDAeAQYjeYKjB1pciMwFaGdfn0Kx12j7myXWWvTt21fsrulxS1JX10xd\n/r08tq7OZe5ch4cftlmzRlar7NsXqqul+O+uP66ieVwXxtkur3jlhPHQwiaTLovy4ovQP9i5JHRO\nzs49itObySr+X9HW0TRtuRCi726PU+K/d+3glixxqa8vxzA8wGTBgig1NZnyA03LFqfr6w8bpoRn\nf3Bd2FDl0mO7w92v2Lzlyy8y3WQmG8OACRNkH2PXhQEDIFUZF9NsV3voinbInop/i2z4app2saZp\n6zVN26hp2j3NvP4TTdM+0zStOvV3c0tct6Vorh1cc6QbmIM81vfjHH30eBYtchkwQL5uWalIE1zu\nYSJnC5dEArp2VYKzP1gWRJ60iJ41jrd8q8GV1pS0z7+gQPYdqKrKtMQEuZGsmr8rFC3g89c0zQCm\nABcB24B3NE17UQgRa3Lo/wohRu3v9VqDb2sH57qyY1dJiUN1td3QwBzi6HpAnz4LKC5ezJ13RtlQ\nBZbj0GVZAa+LMZh4eJgM0qJtKlnrYFJQIAU+COTKCuTjCy+Uvv6aGnnM6NGZzeNQSIo+yGQ8tRms\nULTMhu9ZwEYhxIcAmqbNBq4Emor/Icuu2sG5bqZOj+d59OhhAlHuuivK6MgYupctwzAChPC4oncV\nQ56eBYHHxWgIAgwCBB6PXOFwljL79xvXhdtuk0KupeL5QVr648dnVla33ppx8yQScNVV8r8/+QRu\nukmtwBQKaBnxPwH4e9bjbcDZzRw3WNO084APgDuEEH9veoCmaRVABUDXrl1bYGh7TnPt4Bwnu06P\njxAexcUOW2bbDFq0nM0l0voMkiHyq4GEdAfpuk5gGPiBhm6anHW3fUA/S1sl24UjhLT+r7wS7rzT\n5fjjHZYssVm0yCLWxOz417/gnXegLO6y7T2HuZts1uXv3CtBoWhPHKhQz78Czwsh4pqm/RSYBVzQ\n9CAhxDRgGsgN3wM0tl1i27JOTyJhIoSsRV9dbXNXjwf5aKSP0EETUDP5bObGItzGLDTNw8gx0VMN\ndVcX2LzkWNgoodlbsipnN/vdCQEffuiSSJTz4Yce8bjJc89FWbeu8cH19VL45wflmIGH96DJw3qU\nCTmqH7Ci/dIS4v8xcGLW4y6p5xoQQmTnW/4BeLAFrtvqWBZMmWIxaVIU05Q+/1jM4oghnxCEAQNE\nEoy8epZiUU6UC3WHayttiiqs5kr3KKHZQ5r77iIRmD698UZvUZGT2qvxCYXkymztWgvDkCuDcFi6\nera952AGHiF8BB79A4elnqUifxTtlpYQ/3eAUzVNOxkp+tcDQ7IP0DTtP4QQn6Ye/gBY1wLXPSBY\nFvz85xYDBljE49LP/OV3b6JzYllDDfrPj74JTYOlwuIdLI6ogSKaL92jhGbPaO67GzcORoyAp57K\nHLd6tY3vy836IBmiqHorWzSXyBMWNTWZVcPcTTbJh0004ZEQJot1W2UCK9o1+y3+QoikpmmjgHmA\nAcwQQqzVNO03yEbCLwKjNU37AZAE/gX8ZH+v2+LsysfguliOwzuP2TyxwmL7dnjm8SLmzAvxVUmS\nvDUhvjOyiNzcncvPZ5csUEKzd+zqu4tEZJmMeFxu+n7vexZ//nOUvB1V3FE9gwti07n5jBl81XM4\n+cURYjGLW2+FGTMsziTKBYbDd6+z6fiZxVM3yr2Cujq5yb87N5NC0ZZQSV7QvI8hO2Mr1bWrXERZ\nlLD4hZjIBO4lhI+vGRi/nYBrj2skHGkhKSigkQWq2HO+ZT5uqKnk+zKUc2xiImN7/BefDQzYfgmI\nsAZaLj//eZTqaqtRWKhhwOmnuzz0UDm5uTK81zCiXHSRpVx0isOePU3yUrV9ABwHEZdtA5PfeDx/\ns8Mpf7CwmnbtwuFNYeFg42Ei8NBTZuluyvQrIdkHdlXiKF1ULzvR64SKAtZcGRCYgAZogiDwGNSz\nikErZAmIIlYzWMxhTmIwXxXXEA6nk/U8qqudhn7KykWnaA8o8QdWF9h0D0zCeCQweSJms/x8WD7Z\npqih1KTJEmFj+LAiZDH5kijXdXboFrF3Ugnl6299mrqFjr2ghmRYR9cDaeULjaQXYkz1DArw8dEw\nSYKAgcznP1fe3SiKa+ZMu1HegHLRKdo6SvyBl2osXiSKTVaRsIR8nsooNXMcCgbbTCyystwQFrvq\nsKV8/a1P0yqg775rc9RROYSEh+8brF07nOQMuCA2nRA+OnJBALIE9M3xama8FWXLFof33rNZv95i\nxAhZhkO56BTtASX+SL/83zQLBNg4ACwPWxQUwNljLOJxCy0KV1wBd9+9e2HYVZl+RcuS7RY6cjXM\nu3so/yqG12IRbrnF4rkNLqOYhcBD0zX0IAnISeB7dw3msiKL8vJMFFdpaaY5vELR1ml3G75NyzGv\nnubyx585bPcLeBRZjyepm2x4MspLNRYv/afLeSKzIsjJgYULlaAfUqQ2WUTcI2mYvD85SlGFxS9+\nAW895HK+cHjbtHl6zGq6V8+RRYBSKj9tGowcKXMC0qWgQU3cisMXteHbDHVLprGyfhSB4aPrOfQ2\nKukxagy/9j0CNHQCQgRowqNoRRXHbq/iDjGDED4eJuVEIQ7/GONApa2U4VAhtcmiBT5hzaOoxsF1\nLSZNgqSweBsLPQl/zLcYN6+xaV9TkykV4XkyiijdS1ht1ivaMu2nh6/rUjt9JAEJICAI4tRunMNX\np8f5eIjPl4U+AQYJDAgZ1C19mvgRT1FfKLNCw3hEqCJKOZcvuxd/QHkzTXkVB4Vm+h47TqYHMMiX\nmtt7aXoq7LxZr1C0RdqN5b+lyiHv3QD9emRmrmYQ/l4Jqx6aTxAGPSH4ovJaTln7GUedV89HP16U\neh6Kx0LOOhMEmMjJwFdhPIcOzWyy2Eg3TjwuY/snT5Y9FpjoNOoTadk20WhmIx8aW/5qs17RVmkX\n4u+6MG6GzSteDr3Gxvl3X53vVEymtksNwWYdCAg0nV4F/8tJ2wRboKF2TyBgfslZVMYqEcDQ1Aai\nrpTh0KJJUsBO8wFZCXu6gQg0DJFEC4ewhg3DymqzpjbrFe2BdiH+jgNv+bLw2gXrHE4/zybSz4I6\nF13PSdWF0elU7RMiIL9aZ2vSQIiAZNLkv6sriSF78v6yb5Q7ypqP71ccOjTNDt5yq8OJ9R668MEP\nZB4YAhH30aZOleZ+ysGfnkfq6ly2bNmzvs4KxeFGuxD/tF/3Hc9ipWlx29EwaBDceCMcddRQ3noL\ntr1WyszYGGoK43xRovOXx39OTV5+QyXPdGPw6yotuinRP6RJZ1inXT7XXQdbZtvMEzKRz8dABnx6\n6Ai545ve7U3NGEsCGno1766vs0JxONIuxD/bBVBbCw8+CIWFLscdV0447HHRRSZjX4swrLCSikdG\nQdjn0sTjjB0ra8NfdRWcdZZyAxwuOI4U/iCQf88+C6RKbqcT+TTgp70e5LzSv9KpWpD3gQEzZ0Iy\niR8yefXaoQwY2rivsxJ/RVuiXYg/ZFzCgwbBObgMKxlPOBxvaMNYUuJwIlvRwkl0QyCER2mpw+bN\n1h4ldikOHWw70+c3m6VYLMVC06BXL5fvPjSPzWHBlsDgiGmXcuaf/4oW+Ajf46jlkBgiyz9oWuO+\nzgpFW6D9hHqmuLXEJUo5V1cvIJwI8JM6yaRJsrqAO6pnEEoISIKhhTj7bFvFeR+GyCY8spGL3uQO\n13VZBbR3b9meUzcCklrA3JzOfBOYJDDwMJkbizB2bJSqqgnk5iqXj6Lt0W4s/zRX5TsEmscRsYDi\nu3Q23d6XDzqWMbxkBcc855M7Fr4o0YifPIwB96kf/OFKRQUUFTUuq53971NPpdtzxhFCY1VtKRcS\n4fzs+k4xOO88i3792H1PSYXiMKNNl3doWsoBaFRvua7YYMXDGgFJfC9E8VhBp5hPApNLzSgTHUv9\nztsorgux2DS+971RBIFPIpHDuHFR1qyx6Jt0sXF4O2zzwJtWozBRlfarONRp9+Ud6upcVq4sT/V3\nzURruFh8MaiS4tDT/P3aL/HF+xhGQBCC35eMQIt1xcHmHV/1d22rpI34Tp1qECLAMAJ03eOxxxxq\nXoKLHy4nHHigmxhEVY1uRZukzYp/ba1s7J0drRGLWYyzXf73lNtY/4gnM3h18H3p939pZYS1qXj+\nHJXD1SZJL/zq66FnT5tHHjEJhaSB0HFzAd88NJ6QiGMQIBJextWTVaN7dYHNSxOVB0hxeNMmxd91\nZX33oiIT8CAIsfm/t7LsC5frE1V8U+I1ZPCKJLy34kKefXY8/ftbjB6t2i62ZdJGvBAQi1mMHRul\npMThNK+AcX8Zg54S/iQ6gWbyzXkF1B7vkP96JXmLalhdYHP2GNXuUXH40+bEP+PStygujvLbn1ZR\n9tgMCmLTOYeZ6CT5ulrW7AkEBMkw69aN58knlX+/PZA24tN5AGkGHrMCI/AahH8BF/LhiMGc4Y8h\n2JxyHf4syktPWMoDpGgTtDnxz3bPJhLwzdoP8U5O8HGJ4Ohqn7wY5Meg91ioLYGF1Zdxzu07C78K\n7mibZCf8gUvfvuWEQh4JLcSOpQZHVkMCk4nh8dw3xMH3G7sObdtSXdoUbYI2J/5py657d5eHHion\nx6xnkyYgSFXovFMjb60gLwZHx6CeznxR0/g9VAP2tk064W/LFofNm1PiDux4bAQ7nuvKm9g8ELEo\nLISVK02CwMP3TbZts+nXTxV+U7QN2pz4py27Dz5wyM31ACGbthoQ6Dp1v/4BR1//V0QQ4GEy24ww\n0W78Htmrh/p6WfJF/cjbHvn5NrpuNkSE5RdHyOtvEUm9XlcH//rXUBYvhvnzI2zaZFFZqfaEFG2D\nNhvnX1fnUl09ACHiDc9pWg4lJQvJi8n6/m9ic2qkeZePbcsJAFCtG9swzeaCpJ5fsaIc3/dIJEzG\njo0Si1mEw3KvQK0IFa3F/rqc232cf16eRefOw/j006lI01+jc+dh8gduQTcrY+E1xbJg+HCYOlVG\nhSSTamOvrZKXZzVbuiEdKmwYfkPtp1jMIpnMFAFV94SipTmQLuc2Xdvn888j1Nfn4id1/PoQX/2t\ndI/PjUQgN7dRZ0BFOyI/3yYITJJJg2TSpLraBmRdIHVPKFqLqirpaj4QbUTbrOUPsGiRxarnKplQ\nPJJO1T5HbRgDpxbt0VTaTGdARRvj25bXeXkWHTpEmTrVYfly2dPBNGHMGKiuhpKSzA9T3RuKlsB1\nYcYMubIEaWi0poHRpsXftuGbX9Vw0hpBiIBA93DGO+SM37OY/iadARVtiKbL679VuhTVOI1mgn79\nLHTdoqoKzjsPSkul+NfXw/z5oGmycujw4XKlqO4Vxf7gONLiB3lvDRvWuvdUmxZ/y4Ijp9gEPzNJ\n+h5eYPJfC2zeW6w269o72RFdZXGXHqPKIfDwQybPDos2BAJkGwATJ8rksLRllvb9N+kCqVDsE7YN\n5xou/QKHJWGbSKR1b6Y27fMH+LLI4iI9yr1MoJwoSwKr1X1pikOfdD6IYcAFukPIlzNBEPdYP9Wh\nvFyuDtLU1bmce+5EzjjD3em9sjeAFYp9xcIlqpUzgXuJauWymmwr0ibF33Wllea6cgNlUcLiAcY1\ndHEyTVnTPX2Mov2R3tOZMAGumWKj5Zj4mkECkzeE3UjM0xViff9eJk0qp7Awc9OoDWBFi+E4GEkP\nXfgYyda3Jtqc26epL/fMMxu/3qOH9Ns+9ZRLr14Of/qTzZQpqq5PeyTj0rGgKMq2KoehM2Q57wYx\nd11qPxhP0C0OBOjEebQkwv/G7mKGXsFvL3c562uHgsE2ReomUuwPTarHtrY10SLir2naxcCjgAH8\nQQjxQJPXc4AqoA9QA1wnhPioJa7dlOzm3fX18OGHjV8//3yIx13uv182b08kTN59N4qlfrjtG8ui\nm2UxMZIVAbR6GowcSX4PH/0hgR/SMJIBpdUbKeennMYmfv7q49JKW2xCUZRPuq3ms8/msGPHYN56\nq0JFiil2y+ppLjVzUgZENErdu1XUlkB+IeS14nX3W/w1TTOAKcBFwDbgHU3TXhRCxLIOuwn4Qghx\niqZp1wO/A67b32s3R0FBplqjELBtW+PXS0uhZ08Hz2ucwAPqF9quScV9WraNNc6Sj0eNgmSSvDVw\nxliNzSXfoXv1v8iPybTBq4I/oyc8CGRQ9iexB/kgPhcAIebz9tswb3wRs4Y7dIvYahZQ7MTqaS7d\nf1pOTzy8+Sbvzarky96zCHwPfeWshiZUrUFL+PzPAjYKIT4UQnjAbODKJsdcCcxK/ff/AeWapmkt\ncO2dqKmRYVIAhYUuQ4ZMbPDR6rp8vbjYxjBMhDAIhUyKi+3WGIricCHtK7z3XtI7vVuqHIKkjLsT\nwJExg/nP3UxeSvgB/sz/R9IwG5z+n53ySaO3vaL/07zilXPi1Mz7KhTZ1MxxMPEI4RPGY8eKpwmS\n9WRXkm0tWsLtcwLw96zH24Czd3WMECKpaVodUAB8nn2QpmkVQAVA165d92kwti1/i6ed5vLIIxnX\nzl13Rdm0ycK2ZQJPaWm02ZouinZIkzaNW1K+/1dEDiZxAgxGMpkZegXixO6cuXUOfxKDmRmq4PQ7\nruKqfAdsm2O7reaLD5Y1vG1y8fGYLEcXqvi/YmdcF2LfK+DEIdCpGo6I6fSav4J1gwRBCNBD5Ofb\nrXb9Q2rDVwgxDZgGsrDbvryHZcGUKfDWWw7hsHTtaJrHnXc6nHZaZmN3VzVdFO2QJhttb2Lzlm9R\nTpQBOLyp2SzVLHJyYMDzFcydW8HTD4PwYcjjFtGovK+OT7kO0z7/I7sUoeXMg6Qq/q9ojOvCyJEu\nD9w/mr+HfT5OQNGdPt9ZK3uN/KtEY0vOMPIuaD2Nagnx/xg4Metxl9RzzR2zTdO0EHIfo0kV/Zaj\nogJ69bKpr5dtHEMhk6uusslrzd0TxeFLk1oep2JhzoJlcYulgYUGhAyorJSHT5qU2VeKxzMGvdw2\nqMC2K+jfH/r3ByKqRkh7ZlclRBwHevVyCIU9WW5ewI7ego5rQxwR0wjHTL6cuqvSky1DS4j/O8Cp\nmqadjBT564EhTY55ERgKuMDVwBuilWtJ9+tnUVenXDuKPSQrlddCzgXjx8OCBVLog0DuFzlO4/aP\nmgZbt8K0aTKEeKdqjKpGSLvl2yp02jb86U82yYSJKeLoSchfF+bvdz/O36trZORPReveN/st/ikf\n/ihgHjLUc4YQYq2mab8B3hVCvAg8Dfw/TdM2Av9CThCtjnLtKPYVy5Liv3jxzmHXOTnS4gcZUTZt\nmgwmSE8Syr2vgJ22khrdE5YFt9xiMfuPC7nm7Cq+70HelAh5lkX3AzS+NtvMRaFoCZpbtqczx6dP\nzxTi6tXLpazMYcUKm02bLFXnR7GT5V9ZCStWyNdKS+G222Sf8XA4a2Jogebh7b6Zi0LREmR7bbJ/\nl127Zgq8FRa6PPxwOTk5HkOHmuTmRgGLiRN3/g23wG9bcZiQvZVUUCDFPt0dML1SBPncyy+7HJ+s\nIv/2GeSt8g9Iqzgl/grFHtDUinvuNpf/1B2iwuZ7ZQ5mOI6mBRhGnCBwuOgiaydf74Hs0qQ4NEgb\nDxMnSis/Tfa+UWGhy4DzB7DZi6PfL6N98ta3vu9Qib9CsQc0LQF92e/LuTLw+C/d5DfVtxFKBAQC\n9GTA6hcKGo6tr4cHH4SzzpIbw7vyASvaNrYt3Ttpyz+bq8qqMPR4Q9TPFyUaeZsPk9o+CkVbJzsV\nIEIVoWQ9mhAYmkf/NdWcMVbn3yUBR1brzF9fg65LkRcC5s6FF1+UFUBDqV+cCvtve3ybS8+y5GsP\nPggvvJBxGQKcsg30BCnjAdZXn0n1bZVc1cqWgRJ/hWIPSPtvN1S5DJkue+0JICFC/B+D6R9bzFEx\njwQmjm5z5ZUupulQXS1bQAaBnAxGjJD7Bcrn37bYE5eeZckV4AsvNH7+6xMinD52Bl+XJDiiOsxV\nsUqO7mJxVSuPuU2Kv9pUU7QGlgXHVzng+2iAj8ZMhvEHKlhDETYOizSb436wmp/9bBRB4JNI5DB2\nbJT335dlolW7x7ZDts58W1hn9rEFBbL8TDIpn9d1WHOUxfPvO/SPOTjYLMVi6uDWH3+bE3+1qaZo\nTd7E5mpMBNLKr0JmYS7FYikWAy9yue22kWhaEsMAXY8zfrzDxo0WBQUyRLSqSk0ChzvNhXHuqhR/\n02N/8xuXzz+vQgjYvLmUnj1reKvQ5oE14wAoLISiotb/DG1O/Hc3AysU+8OpEYtLZ0Tpl3BYpNss\n9RvfXMce6yBEJpRD1w0GDrTp0kUKQnrDb+ZMWLhQ3ZuHK011pqamUYWQnUo5pI/t3t3lrLMGoOsy\nS1BWINZ56KEc7rorypo1Fu+/LyeL1jZc21wbx+zerGpTTdHSWBZMdCyO/O04fvyERYcOcumu6/KH\nvGKFTSKRgxA6YNCp02WAFIDsUD/V8/fwpjmdsSwYN25nwc4+9srSKnQtjqZlSs9DQG6ux+DBTkP8\n/4G4P9pkhq/y+SsOFNm+3HRtn+Jil8rKKoSYiRBJdN3EMKJccIHVYPnn5CjL/3Bnb3TGdWHxgy4/\n2WCzbpKHCGf6Qmiajq7nYBjRZvND9pZ2neGramkpDhTZ99qmTfDnP8NFF1l06+aweXMS8PF9D01z\ncByLqip5rPL5H/7src58+ZJDp6RPyR3wyUCNdzgTv6PNBadWk3/KYPL6Wbt0HbUGbVL8FYoDzbRp\nMoYb5L+9etl07WqSTHokkyZjx9pMmQJPPpk5p67OVVVn2wmOA28ENr8kxJGxgJNjJo8bN/F4aIzs\nAW0uhmgRlmUdMKNAib9C0QLMmdP48bPPWlx+eZRP/1bF0SvgiPcbBx/U1bmsXFlOEHjoutmqvVoV\nB56mE7ttw7wQ4Elnj6ELfnXFCvQXZQ9oEffQDnB0ihJ/haIFGDwY5s9v/NgCuj87CxOP0cxiU4Es\n+AZQW+sQBB7ZvVqV+LcNmk7shhFl0SKLBy91MF/w0YXA0HwE8E1gEsYjEZhsKrA5ABGeDSjxVyha\ngIoK+e/TT8Pxx8s47SLHQegeWuBj6B5FNY5sZ+Q45J9XgK6bDQLRmr1aFQeW7Ind9z3+8AeHZ5+1\nmBeyiZpmQ1vP1ztHmKZH6B84LNZtLquxlPgrFIcjRUWwejUsXw7z5sHfKm2KcmTmj2aaMiQole2T\nFwrR+85LqB3UmfziiLL62xDbttn4vomue8TjJsuX2wQBLE5aPFsRJdLVYXWBzV9etViqgavL/tAP\n2Qd2nEr8FYoWomniz0s1FkXRKFuqHN7E5vwVDt3SB/g+efe/QN6kXIhG0t4gxeGM67KlyuGe6Tb/\nOj1Kaals7hOLyf+5QQBYsPhkGD0aqqvlaUaqP/SBjv5S4q9QtBC2Lat2BoH8t6AAfvigxV//aiEE\n9A9BNGRiBPXU9RTUlgjyV8XJU2nohzf/f3tnHx9Vde7779p7ZgfbSoKhFpSCgmgBQ8JLbfdBcWtU\nfK32cNvbak8QPNAqaKNolbanNz21pfU1rdIWVLjMtZyeY6lagQo4soXiVkFICAQU0YKgVJs2AV8y\ne2bvdf9YM5lJSIAYNG/r+/nwSWayZ2bt5MNvrfWs5/k9mdZuCxcyyA9YiUVpbZzf1c7JKeRS3d5O\nPrmUVMpn7lyL2bPjTS41ILsAACAASURBVKZ/dXWf/LC1+Gs0x5BMzWQYwsyZWQMvUNv+2ePjfHfs\nXbx55ROEUTCSIcXHF5LfOcPVdJSMcU9jI0iJCUTxcXB5AZvBg+Gtt9Rmb/x4F9NUZwGRiE9JiUtt\nrU002jlOBFr8NZpjhOtmPfxPP92juDhr6QxqQnhgo039iLO4Nu9PIEJCIagPN2vx765kYn3pWT8U\ngqS0cHEA+P731VmQ68Kkkwt5LzAITUkkYnHqqQ7f+U7nFfxp8ddojhEZD5dhwzzuvruUaNQnmVTb\n++3b1f/us0KPkRv3IK8xESLESEkKZj8Cv9Ylv12Jo7ZucByCiAWhj4hEMK6byqq+ZfStUrbMmSww\nGw9Ky2kYFlA/zqBgeiXOnZ3799bir9EcIzINX1591aVPH7W9N2jkjqtjfPhZmyU3eqzwS7G2+bx3\ni6RhNBRUQX5tEmIxPLT9Q1egPbbwHjZzZJwJuKwXDnPLbK6yObQRS3qHkL81JH+7gDPqYMLHfCNH\nQIu/RnMMsW0YOdKhenOEMBVgpiRfWbqQ/HllTJrm0me+jyEDCrZCwVb1moaR8OagTdxwg0dVlVKZ\nhQu1HXln0R5beNeFvwQ2z0kbM2j9Ws+DnXscrolYmLRi+N9J9DhLZ42ms8nPtymumcqpiwXFsyF/\ni1KFIWUORh9L+T+jRP+Vcqi6H961NzJ3bikjR3qAsn/Wls+dQ2t2zZ4Hc+eqr0e6NpfMLmLaQzal\nMs7u6T/pMh2m9Mpfo/kYyB9fRv7ti5u3dsrEhSoqaHhrNdV3S0ILECBE2CUyQDTZP1Mm5g9th4Fa\nXttS03N3EX/BZslgmzmdr/uAFn+N5uOhLVWwbaiooH7+s4TRVM7eW3SJDBCNIteu+frrYehQj8uL\nY5xQDTtjZdi23ayXA0AYeuze7bJ3r8Ojj6oXjxnTdnvHzkaLv0bzcdGW4bttU7dzHoSzwAgQRoQB\nA6YxYEAZjqMVvyvheeB5Hvfdcx5WNIGRhKI5C6lZ4FJabpNIwBe+4DFpUoz331/E66+nSCQs1q5V\nBVyWpZr8VFWlzf660J9Xi79G8wnjeTBnehGXDr+OA2Phkm+XccYZdtvphbo1XafhulBU5BKJ+mBC\nKOHAmUl2PeLS2GgzYoTHvfeWYlmNCCERgmbhO9+H++9XNR7r1qmc/67yJ9Tir9F8wuyMZVM+66SJ\n9zKsB+Y4cHbK5faIwy/Wppt6ZE4MEwl1UDxvXjZ5XPOx4zjw+987pJIWlkxgpOD4LVHuq3WQEkpK\nXKJRH8OQSAlhKEilLKqqHED16Q2C5n15tfhrNL2Uc3Gx8Hl/ZMAr9wYUWPNp/GARj50uKawN8FMW\n/3lHHPdim6v3uAxJJJR6hCHMmtW1lo+9gG3bbG6evYYrSmL0q4YH6stYH6rff1WVcvCU0icITFat\nmsbTT5c1VXVffbVq7alj/hqNhiFlDqlHLP5R0kgYlWBIDOnzQQl8rlYi8THWuSz5B7w/Zg+3jBD0\n2wYC1DKyKy0feziuq5wbamvtJkG//PIFzL2pgrVrJ7N8+Qxmz45TXNzcwRPURm3UKOXx1BWjdlr8\nNZpPGttmyXVx9q+LUZJchCFTCBnhU1WSJAFJLHaNKGyyiNh0tcGY2XDCDonIywPH0ccAnxCOA3l5\nKuoGcOmlC7jllm8D8MUvrmIou5j351+wfbtNEGRfZxjqdZm/T1f8G3VI/IUQJwD/DZwC/BX4upTy\nn61cFwA16Yd7pJRf6cjnajTdneFlNt9ZbDPstjLGjXOZPt3hne/C0p+4/L+9DkPTsWTTDEhJuHfM\ndC4aNBinwsHDPmr7AU3HyM3YLSyEgwdVs2YhAAnXTryHDcuvorHEbvLnNwy44AKoqOjaf5eOrvzv\nAOJSyp8LIe5IP769les+lFKWdPCzNJoeQ0ZUli+HE09Uz71XZHPzOzY+UNhQQxgKpDRIpSyW15Rx\nykwbx4bY9U0Owl3uELEnYttpYzbX5aVTS3ifVZC27u6/VnIeLj+vtvkyHg4u6w2Higq7y/9NOir+\nV0LauxQWAy6ti79Go2lBGHqcfbYK7Rw8aPHkk3GCwObrIxcwY9YshBEQygjz5lWydatNeTn06ePx\n/vsuI0ao+HIk0rUOEXsiNQs8vjCrlEjgMy5qcd9F11B69n/Rf62k//I+rMHhS9IjTikWPqG0sIjT\n1duzddTb53NSyrfT3+8HPtfGdX2EEBuFEC8IIQ4xvMsghJiRvm7ju+++28GhaTRdm9dey4Z2IhGf\n995z+Rfh8bOSGzCjSQxTYhgphg/fTBgqq+iBA0uZMuU/uPfeUkaN8pg6Va/6jxUNDR67d8+loSFr\n4FOzwGP/9RWIZAIRKqe3+mWjmHn7X/jtip8y5eQ4LwobJ53BFSEgKv1uYcx0xJW/EOIZYEArP/pB\n7gMppRRCyDbeZoiUcp8QYijwrBCiRkq5q+VFUsoFwAKA8ePHt/VeGk2P4LTTHA4eVGmCqZTF5s0O\nP+4Xo7AqYG8A0gAhJJMmLWT1anU2EIn4CBEgpc/YsS5jxtjMnasPfjtKQ4NHdXUpYehjGBbFxXHy\na+ELs0oZESYwCUlhEAiL9RGHDYFNtWVT+SNYXg5rGx18qZq2G3ldLKezDY4o/lLKC9r6mRDib0KI\ngVLKt4UQA4F32niPfemvrwshXGAMcIj4azS9iQkTbGKxOKtXqzTBbdts3iVG/rsw4M/w9hUgDIhG\nA2691eW00xySSYtUyicMI/Tvv4cHH/SabARaO/jt6VlBx+r+6utdwlD1YAhDny1bXII7YWLKx0gL\n/zNcwM+MCr71gM2kOnUAXFenmq/X1dnsKoxTVHcMBvMJ0dGwz5+AKenvpwBPtrxACNFPCJGX/r4/\nqoVBbQc/V6PpEZSV2dxwwxxOPtnGMGAxZSTI48RVYPgQpAwMw+KqqxwmTLCpqYmzYsV0pJRcdtlD\n3HVXKWec4TUd/ObieTDH8XjvB3OZ43h4Xuuhje5Kpvj5P/5DfW1pt9weCgocDMMCTMBi/o2F7Fi1\nB19GSGGSIsobDOULqRpOfGQulxd6lJerzy4vV3pfNMOGOXO6hfBDxw98fw78jxDiOmA38HUAIcR4\n4DtSyn8HRgDzhRAharL5uZRSi79GkyZt9Mm6dbDBt7nYXMPt/V1eeqSQARfU4fsOr75qU1cHhYU2\n777rEokEmKYK/5SUuLzxhn1IpKHJRgIf37f40/JKksny5qGN/O4hVK3RnqYrRyI/38Y041RVuexb\nWciC6nIsfFKY1JxyBSP+uoLpLMAkJHjJIHw5j7EyzvrQ7rYZVx0SfyllHVDayvMbgX9Pf/88UNSR\nz9Foejq5+eT19TZX3m+TSoH8g8oplzJbOPTVr6rwT+asoKHBaTXkc27OIaTEZ8SJS/lnTmijvt7t\n1uKfaaTyUawTWoaL1C7CJpGwuYO5TfYb/ygJ+az/FtHdAaYMkUCEkCD0Od90eUHYXc624WjRFb4a\nTRchI94TJ0IqlX1eplMfMuZgffva3H57nDPPdKmqcnjtNZsf/ODQ9xtS5hAssgh8H8OyOGXcZBqC\ndU0r/4IC52O/p4+TIzVSaYvcHr2RCEydqp73ffXVxaFupMkr9waEUYkQm3hvCJz4Z0G/WkkKA2FZ\nfO1XDsfVdZsQ/yFo8ddouhCuq0Q+F8NQzxmGWuGWlQHYzJ9vI6VqIZgbdsiuam3sNVl1zLdtihuK\nqK93KShwuvWqP8NHsU7IDRcFAcyfD9GomgiSSXgBm1+NncaF1nwwJFKm2H8Z/O3iKB8uvZkRFDCk\nzKHItrt1SEOLv0bThcj1kjEMuOUWKCjIZpbkrjIXLz405LFggTISC0P1PvG4zcgbVDZLQYOKbfcE\n0T8SDQ1em5NcJlyUqZKWUk0C06dnr7n0W2UEyUWEYUI56gmQkYARdxQwZMicT/RePi60+Gs0XYij\nDWW0dp3nKcfnTMgokYCNGz2SydIec8h7NLSas59zz5nfXSwGCxcq4bcsuGGM1yxVs+GBqeyp/y11\n6ZcKTP70J4fx47tnmKclWvw1mi7G0YYybDxsXGpqHOa6Nnv2cIizZEmJSxD0nEPew5KOd9Wfvacp\nZz8IfJ54wuX00w/12hk8ONti8foSj6LyFm55Y8fwz4OAACHhyQdv5lfL2q6p6G5o8ddouiPpU0uZ\n8BkWWiw34myI2JhmNjNo3jwYPdqhutpqWgVHo4Xs3j23x8T8m8g5xe07ShD+AmTEIJmyuOceh127\nsoKd2xwtDOHMMz3qTqygfliCgq0hMuHzXIWL+UMI+xhASBgKxLADxySttKugxV+j6QbkpiYCJCpc\nzk34iDAgis85ocvzSVtZDaMOgYuKsvnrb7zhMnRoIa+91nPy/JuRc4pbsAXGzIa6kgg/qKpka63d\n7FA8FsvG+0eO9NJ9ExJsSYaMvs3A2mrxw2ccDu6He+6JKEsNQzJp0iJWrSpj165Dayq6I1r8NZou\nTsvURCnhiymHVaFFnvBJSgsXp+nwErINvwAuvNBm2DC49toKxo1LoFayPSwE1OIUt18tfKZWMpQ6\nDENNhnv2qAPxhQuzv6eLLophWY0YhiQQBlWTL+CHtRWqTeMWWLFiGldcMR8hJJaV4tZbWw8hdUe0\n+Gs0XRjPU9W/uW18AdZLmwtFHEe4PCsdXmhhHyyEErtYTLmBZla3UoYIYfSIPP9m5JziikWLkMkU\nmBZfutlhxgFYtAgeekiFwzLnIqNGeVx22UKEUM3Xk6ko+4dWsCnPhg/VNatWlTFp0mKiUZ9oVNls\n5Od33m0eS7T4azRdlJaxaSGUeGUE7AVsPGnTmv1tGKr8dSHgG9/IWEeHSGnwz39ewIknVvScVX+G\nzEl5WRnCdYk6DlfZNtvnqgyoIMiehwgB48e7GEaAEBAEgpUrp/LBBzZTpkBtLaxdq3r3zp4dp6LC\n5aKLetY5iRZ/jaaLkgljZwq8IJuXLoR6PiNmppl9nLtDkBKqqprbQfzoRxXs2mX3iIyVVrHTeVCu\n6jSViQgNG+YxbpzL+ec77NtnM3Fi1iU1lbKIx8vYsUNNFJYF3/ueygSaPNnmootUrQTQYyYALf4a\nTRcl17sms9rPCDxkJ4EzzoBzz4UxY2DpUti3z6O4WFk/1NbaTavXkhKX6mplHd2yKviIdCNv6Nwz\nkkxa5urVHo2NpZimOuy+8kp12N3QEGfLFpft2x1s22br1qxRXEEBrFx55LqB7ooWf42mi9KyeXh5\nuRIl08xaE0gJ27fDK6+oit7f/tZj4InnYUZ9UkmLW25dw7ZtagLYuVNlA5lm60ZobVbFtqamORNA\nV5sXcu0bEgl1ZvLDH7qYZqbeoZH9+2NN1c7nnGNzzjnqMDgTWsv9/bT0+u8pB+Va/DWaLkxuwVdR\nUXYiuOGG5tc1mb7Vx4ienAATLJngzhkx3uljN1lDQOtCfdjV7WG8k48wL3yiZCahwkI1lsxZyerV\n8PbbDpWVJoYRAJL9+xcxYEBZ0z16nppcw1BNjpWV2fvIeP33FEO8DFr8NZpuQmYimDs3G/rJkFmt\njngH/nY6hBKMFLy/HJiseozkvg+eB3PdplngsKvbdPxJJnxShsWOQqfJ0KyjnvrHatfQchL67W89\n/v73GG++qTJ2ampsli9XaZsgCcMU1dUxhgxROx3XtZvOV4RQPkoZ8vNtiovjPcoQD7T4azTdjlzz\nN9OEm2+GQYM8iotjwH5Ouz+C/+mA4zZHub22jBdWqdfNmJF+g1aW6wUjD7O6tW1qKuM8NtPl2cBh\nU7lNvEiJdUc99Q+7a2gxMxxuoshMQmec4TFpUoxBgx5h8OAkY8bAxRcv5JZbXFatKuOSSxYDPkFg\nIsQi3ngjhWFYTJwYx7LsNu8jP1+FzpYs6TrhrY6ixV+j6Wa0NHUbOdKjquo8wjDB28C+mVHefPhK\ntpcM4ABArToIbhJ/183GRBIJcF1qmUN1tToUHj360NXtsjqbn0mbIAQzZ4XfXk/9XAE/7K4hd2Yw\nTfZfOo05K8r4S9C6t47jwOjRHj/7Wakq2hJqayQERCJJxoxx+eMf57B5cyWwlA8//BQTJjxFZqcz\nZIhLPG63eR9dKbx1rNDir9F0Q3LPAnbvdpHSz/7QSHHSdcsYZEic5GJmz44zebK6uKHBo/60lyj4\nQkh+LRCG7KovTAubjWWpFFBoLuiHW+EfrRFdSwGtrDzMrqGF6f7nnpjPChZTSpwNvn1IeMm24Ze/\ndEmlfISQICFTAGGKCF/6ksP113skk+UEgU8qZRIEkXSOv8VzzzmUlbV9H8eyZWRXQYu/RtPNKShw\nEMJS3vNAGBoYRpgu6vIZM8alqMjOHur2b8S4F4pnQ/4Ogzer6poJWyzWvFdAZjKYMkV9PZxIHo6W\nAlpXd5hdg+PQMNqkfkRAQRXk10qi+JwvXKqtQ711PA9WrnRwHIuI0YhISU54Aax6GHDqdTg32uze\nPZc33vAxzQDDgD17prNq1eCmlNjGxpzdUQs6Et7qqmjx12i6Ofn5NiUla3jssRjbtsHOnWOYNau8\nqairutrBdeGkk9KHukISRuAfJYJP78yjcLKDtS4rbMBhJwPVSewwpGM7NYUOy+psLi9UPvmXFzr8\npEVcvbVdg+fBxo1QdI/qomL4kuLbDD6z0+KMqQ7x9OfPnZsVYcdRO5fHH49z1dgYN21ayAm1AUks\nVn2vjKtonrVjmhbPPVfGkiXZD28WGmvBR20Z2ZXR4q/R9ADy821GjbKZOVO1Ijx+N5w/einx6sns\nel2tlAsKHAwihKkAIwV9qwyuT1byRexmwgbNxR7aEfJoYTX9V1HJMFmONHyK8ixerIyzrM5uU0Az\noaHJk11GjkxhmpKwj0H9rReQf3oFZemD39zw0ZQp2f67maK2ZynDwcXF4eX7bZ67Cmy7edbOl79s\ns2hR9rMnTz787/ijtIzsymjx12h6CLathHlnzOOaReUYtT7XmutYfnMRrmsDNsU1U/nnC/PpVyX5\nVC30p65pxZsrbC0ng4ULsznwjnOYFM10bCdjNf1VuRQL9Rjfp6jOpWhO2wqaOYv2NxXCNQZSSMxI\nHgVXVUD6ELpl+Gj/fvXakSM9SkpcDhwopCh/M303A7VZh1Pbbt7GMrPKX7pUCX9bq/6eihZ/jaYH\nYdtguy6kfAgDIvhsus/lZ1JlybxYWcaIxxYjkz5JlBX01FZWvLmrXM+jqU+AEFBTk602zs188TzY\nucfhmoiFIX2SocUfmcxE1mEYPsaRguWeR8nTLtPCQn5ZW07j7ICGcQYnfLuyWfaR42S9jEwTBgxQ\nDVkyzqWGESJCML4FU2cv5H+/5uI4zSecTDXzqFEOdXU2Rd25E/tHRIu/RtPTyDmdTBkWzwYOQboC\neFmdTdFzcbxYjE194fpRR47hu64yO5NSfV26tPnKe2fM46SYy5yFDqkUNIopXPEVePH0Mv7v/Tbb\nUkWUGi5fq3Qoaitu4nmkzi3lwqRPKQKDkE/VhuRvF5gj6mBC88uFyDZe79sXxo3LOpciAVMVun1Y\nkmTxRJchOZ/bdPAdJAgSJlWPPsiPfzyDNWt6VljnSGjx12h6GjmnkzsKHTbcaCOSWY//9SEE31zM\nqNDHMBazfn2ctWvbjsO3zHS55hqPgQNdXn7ZofBVuGZRKcL3eVpGAEmEALnc4lXKSKXgeWnzorQ5\nrg7aWmDvjrmcnPSJEJDCIMQkiWh1t+C66lwDlPjffz88/LADWEACRAgpVeHcb3uU/HnNX6+qmdV1\nZiTkpyUz2VNbRCzWM5q0HC1a/DWabsZRWSKk4zbvedlVciqlzMs++MBlyhQfw1AFTqsejtF3U4zl\n2yEMy5gwwT7krW68Ef74R5g2zeOUU0q5dkqCa68xGfLHyzAf8kEGRFE+0iaSMPA58JSLlOq9IpHD\nR3yew+F/YSFR4ajvUsnEkXX828PZm8yEaiZOdDAMu8m2Oghg3z6bK69Uh7nRaCHJXZsp2A758w7N\nS9271yH0TQwjVBNEVYiDSz29SPnR4q/RdCvaW2nqujB8uMfo0VmL540bHb75TUv1ppURrvvHw7x5\nT4owCokPF9HQsKZZjD0W89i718WyHN55J0aQakQYEkSIeP8pkjKCIUCaqseklAGBabEm5QBq8pk6\nFWyyfkINI2nmlTO8zOaSR+L8SzKdoRO1mfYwZPQ4azyXAASx2BXceef3qK1Vk8BLL4Hj2NgZw7nd\nsOQ95eff0jHivPNsvjr8QX5aMpN+VSHH1eaxPuLwiyOlsPYwtPhrNN2I9laaTjp5AWfdPQuiAclk\nHrfeGmfbNuXvP3asy9cb90D+fMIoYIIR+uzfH6O6Osa+fTBo0BgGDixn2jQ/XREbQrqCVgRw/MuS\nh8Op7GEwzwuHB+dBUZ3L8nqH5+9SA5MSJvXNzloNo02q7xOEpACLmpo4Th7ErnP57/0OA7C5bkDz\n+2gK1aR3FwMGPMF9963glltcamttnnhCee9nCtLamiAzIaM9tUWsrv13AJ49uYxfPNa7Qj6gxV+j\n6Va0q9LU8yh8diYH/i2lhF0kmDHD5bbbbHbsUP7+k2/26PvnhRhJn1BCgGDv3ocxjBQDBkAiYWKa\nUmXQCCW8QgAhfO5pQYoIu6+Gp6octm+3WVYHRXNU60TDUBk5Z57pUXBcBQ3DEuRvDdnvhIRSgoBU\nyuftF2MM+91ijjN8ZkctSmWcpwKbxYuzwh2NFja7NSEgGk1y0UUxSkrUruaVV+ympvUtrIuahN1x\n4GzTY2VQioWPj8XAa8p6nfCDFn+NplvRrkpT16Xg5RDjGyrzxRQmU6Y49OkDM2eq3cM3fmkzPuny\njQfvYkT5UwgjQIgwJ7UzIAyjhKFAygimqcI6ST/CMzsv4fR7/8yF0Ydwkou5/fY4e/aoIqyM82im\neTx5Caq/HHLaPMH+SUr4VcvJCH03k60FSDTyv4mRAM5rdFl1XyHJGzcj5SLI6VasuphJLrnkYUxT\nkkxaxG6rpPSlOt463SEM1S8mDJW/f+7v79HpLnm/9TEJMAyfqwpc6GXxftDir9F0O4660tRxyP9J\nHsW3JagfZ1Aw/UHy81Vjl0yvX9+H9dJmSP5ZjBRPql7BaVM0CaRSFg888AD9+tVx1lnK/Oz++10e\ne8yhpMRlRPQpTDNACJ/iYpeHHsqu2ONxePVVlz59fCAk7GPw7rShSOt1VPhGsHr1VLwdZZSzUIkx\nkut4hKks5IMRKbZeF5JKCQyjeQODTA/jSCRQP5MJ/nP0TE5ZIhltWNjE8bAxjObe/ABDyhxYrLZP\nR6w96MFo8ddoeirpbUK+65Kf3iY0NHicfbaybd6yxSYSUTuAqioHkVTLcRHA8dvh4HHH8+Cye1ix\nYgbRqOoelp8PH35oU1urPiLTGB4sNm92+GLgcd6HLuvucvje4zYjRzpUV2f7BBzofxuN75cTiSjf\noXHjxnDmQpeVCy7l8uefxERiksIEDpZIwigYhkyv9AUgmxrZBIHakUQiAaQMTqgKiBAiQ59zcXnR\nsMnLa0Xbe6JRz0egQ+IvhPgaUAGMAM6SUm5s47qLgV8CJvCwlPLnHflcjUZzlORsE3JbNd5zj8Wj\nj8b58pdtNm+G+fNtVs2+lWsvugu/H/zjSxBE3mPmzHKkhBNOqCMMHcCmoEC9dW5jeN93KNgOK0nH\n0p+wqFkQp2hGcz+dX//aZsmSIkaPVjYMN91UTjTqI/8zwt9vinJCbUBABMOQfKYqhZEMSUoDYUTo\n27eEgwc3IkRIEAiefvo61qwpY84clyd/WohdW04ynSq6VjiMHw9jx6qK5J0xj3Nx1ao/8zvppaKf\noaMr/63AvwLz27pACGEC84ALgb3ABiHEn6SUtR38bI1GcxRk8uMbG/c0a9WYTLqUl9tUVkKfPrDM\nuIrSSfcTsZJIAYaQRGSC8vJZCBGQSJi89daDOM6Mpk5iGSO1fxEeP5IVWCSIEAIJ3r+tghoqKJqh\n/HQWLIANv/K4fL+Lu9Vh6NUuhqHGI0zYdNN03r5nMCf8q8Orr0L9Ey67ZhcSKakjL8/huusASpHS\nRwiLU04pY948ld45aBA8eFcRB55ycaXDxoiNqFbuoGeFHvH0pBQssjDX9IBOLMeADom/lHI7ZLZj\nbXIW8JqU8vX0tb8HrgS0+Gs0HzO5q30hTISIEIYqlr9pk9PMV//VV10ifdIZPUAQCKQ0MM1UOvQS\nsmPHTPLyilizxiYWg02bILLBY7UsxSKBSUiAgUnI+APP4H97Hc+treS49+vY8EQhj1LelGUztaqS\nZNICmcCUBp87bgxV02aQKoQf3g9JbKUStSpzaMkSmHx6JU7RUp7bNpmZv8mmZ9o22I/beJ7Np10o\n2gMPPaTOBRxcLFT1cNBTOrEcAz6JmP/JwJs5j/cCX2rtQiHEDGAGwODBgz/+kWk0PZzcxuxSwsCB\n03nnncHMnq1SI3N99XPj80JEiEansmrVGM47bxZSJtNdr0Ieesjl29+2+c1vVNHUnye6WCmfCCEp\nDF5nKP1H7uJgScjxVY2cufkGDpSE/HykgVUrVVwen6G1dSyaXclPS2bSvzrA2lbOTUYRLwibIGh+\nH2EIYxMeD9WUY9X4XMM6/hArwm6lt2/GZG7x4nSqZ+jgp6uHe/MBb0uOKP5CiGeAAa386AdSyieP\n5WCklAuABQDjx4+XR7hco9EcgdwGJoZhMWBAGWecYTNv3qHnnfn5zePztbU2990H1dVw000zESIk\nlcrj5ZedZj18T7ylkIMr4ECxoO+2COuH/CvDvnMXYRREIEEGyAgYyYBRs0361prsvUxw6sQn+Py6\nkxj6XxJDhiTxOSd0eV60vir/6pkx/ja6kROqJJ+qVYe6nme3WtCVe6ZbWGjzh83x5jF/zZHFX0p5\nQQc/Yx/w+ZzHg9LPaTSaj5mWgp6xbchdIWc6YuX63Tc0eGzYMJehQx2WLZvBX/9a1FRMtWtXThtF\nz6P/6hupvjtQ9XoEXwAACitJREFUVcKE9K13SUUFhikJhQohYahag2XOFTAaBs94guG8BF+EvUaE\nzy8zSYYW6wyHc0yPs1Muz0qHF9L596NGeRTfvYjdUcmbSSiaYzKkzGGJ23bFc/MzXZvemMt/OD6J\nsM8GYLgQ4lSU6H8DuPoT+FyNRkPzBia5tOUT1NDg8fLLpYwapbKCZs+Os3OnzaWX2rzzjjJ5axJV\n16V+ZLLJHkLKFP36vdRUBdx0HKisgLjq6kuoSi0lDLOGc3/76gDyi8bzSvEAZlXX8LV7yjFIEAiD\nuwbP4z/enEFxsYsZVZXKoSE48Ktp9LNtHHpeb91PCqMjLxZCfFUIsRc1pS4XQqxMP3+SEGIFgJQy\nBcwCVgLbgf+RUm7r2LA1Gk1Hac0nCGDVKhcpVaPzSMSnpMTlkkvggQfg/Wc89s6cS80CT13sOHym\nOoqRRIk96nC2KQVE1XJBCANWQv7aOoYPn9wk/AI44fm9VJ/9BB98dgEDnVkcHNaIkCERmeKOvbM4\nJ+KxZYujDocxMcw+FIxWLmyZ8M5PfnJkkztNczqa7fM48Hgrz78FXJrzeAWwoiOfpdFoji0Zn5sJ\noct608Fx1OHpnXc6/OIXVlMD+JoahwkT1IHrqrAUK/QJb7CIbY4zvMxm3Wku4ewYZ1y0ln6X1apq\nnszKX4JMe+uf+KwFv3Y46SSl0O++eDefXbiLZF/ZFDJKoRrL59dKNZHIgMXTXJYMnsPxx8cZNKh5\n+Ap0yv5HRVf4ajS9FBuPuCgFfILA4pUa1Vx969Zs8VZ1tUNJiVJWR2RTJpOBzyvzXb6z2Kay0uam\nP9uMrfT43c6J7ClPIQWkklFenXcZqQJo3DyAH+0sYy42NnDSSTM4aUARxEtpGJbASIYEwiCZyuOh\nqhv5MfcTEQFGXh5DyhxU218dtz+WaPHXaHorrouR9BEyIAx8HpvpMmieskTYsUMVbwFs26ZCOWeb\nDiEWQeiTlBbPymydwJo14Lo27xWupWR7jFcGwIv7y6gdpIq7wlC9R7MU+xz7ieLjC9kS1jH7Voct\nr9h41lUsnnZods5RNbLRHBVa/DWa3orjkDItCJUlwrOhw2Xpgq+KCnjmGZq6ZYUhrBc2v5+uUian\nLHTYEDSvE7BzVudnoao7M8KfeY/C5s7MeNi42DgGnDOBnBRUu6nvbkbwCwvbbhyvJ4T2o8Vfo+mt\n2DY7Hozz2EyXZ0OHTXk2dztKQCsqYN26rC++YSjBHV6mRHlu2ZEF1/NUs/fM4W5Lh81MttHYhMeH\nhstn5jnYM5o3VcnNSDIMdTidcSPNHFCXlqpxGoaaPGbM+Fh+Wz0OLf4aTS+maIZS2vOXuhROhiI7\nWweQLZJSop0r9G3VCWTIiHYikRX+lg6brquE/5nwPKKhj7zBgqI1zd4oNyMp8z5CZNM6XTc7QYUh\nzJoFRUV6B3A0aPHXaHoznkdReXppvc6Comy+pLJ88A4pEMt5aevtEj2PRIXL2ITD+lB56l9wgdpN\n5Iqy40ChiJFHAgHIIAGxWLOLWnYuq6w8dCLKdAwDNUlo656jQ4u/RtObOUxT4FxTOMOwKC6ON5sA\nNm70mDzZZdOmbAtFGzUjnJvwWRVaXGTE2ZRnHyL8oB6fcgXwhHrcmj3kkaz3bVuFembNUrfQqn+/\nplW0+Gs0vZnDNAXONYULQ5/6erdJ/BsaPIqKShk50ueaayy+//04jmM3TSYiDDjO8LnzApe8CrvN\ng9mB3yuDFQtVV/VoFMrKDhnikfL4Z8xQoR596Ns+tPhrNL2ZwyytW5rCFRQ4TT+rr3cBVQVsGD6/\n/KWLbduA0zSZCMvCmVwI7lxqahxKy+1DQ0R2esLooHLrQq/2o8Vfo+nttKGcbZnCQfOJwTQtRo92\nsu+Ve1Kczs38gmExNoizPrQPMWDTyt05aPHXaDRt0pYp3OEmhiYxnzu36TwhIn3ON1xeELY2YOsi\naPHXaDQfiWYTQ2sB/ZzzBGFZfK3S4bg6HZfvKmjx12g0HaOtnM8W5wlFtk1RZ49V04QWf41G0zEO\nky6q4/ldlw75+Ws0Gk1TeMc0dUeVboRe+Ws0mo5xpEosTZdEi79Go+k4OrzT7dBhH41Go+mFaPHX\naDSaXogWf41Go+mFaPHXaDSaXogWf41Go+mFaPHXaDSaXoiQUnb2GFpFCPEusPsjvrw/8PdjOJzO\noLvfQ3cfP3T/e+ju44fufw+dMf4hUsrPHumiLiv+HUEIsVFKOb6zx9ERuvs9dPfxQ/e/h+4+fuj+\n99CVx6/DPhqNRtML0eKv0Wg0vZCeKv4LOnsAx4Dufg/dffzQ/e+hu48fuv89dNnx98iYv0aj0WgO\nT09d+Ws0Go3mMPQ48RdCXCyEeEUI8ZoQ4o7OHk97EUIsFEK8I4TY2tlj+SgIIT4vhFgjhKgVQmwT\nQny3s8fUXoQQfYQQLwkhqtP38OPOHtNHQQhhCiE2CyGWdfZYPgpCiL8KIWqEEFVCiI2dPZ72IoQo\nEEL8QQixQwixXQjRpWxPe1TYRwhhAq8CFwJ7gQ3AN6WUtZ06sHYghJgIvAfEpJRndvZ42osQYiAw\nUEq5SQhxPPAycFU3+xsI4NNSyveEEFHgL8B3pZQvdPLQ2oUQ4hZgPNBXSnl5Z4+nvQgh/gqMl1J2\nyzx/IcRiYJ2U8mEhhAV8SkpZ39njytDTVv5nAa9JKV+XUvrA74ErO3lM7UJKuRb4R2eP46MipXxb\nSrkp/f1BYDtwcueOqn1IxXvph9H0v261ShJCDAIuAx7u7LH0RoQQ+cBE4BEAKaXflYQfep74nwy8\nmfN4L91MeHoSQohTgDHAi507kvaTDplUAe8Aq6WU3e0eKoHvAWFnD6QDSGCVEOJlIcSMzh5MOzkV\neBdYlA69PSyE+HRnDyqXnib+mi6CEOIzwFKgXEp5oLPH016klIGUsgQYBJwlhOg2ITghxOXAO1LK\nlzt7LB3kbCnlWOASYGY6JNpdiABjgd9IKccA7wNd6gyyp4n/PuDzOY8HpZ/TfIKk4+RLgd9JKf/Y\n2ePpCOmt+hrg4s4eSzuYAHwlHTP/PXC+EOLRzh1S+5FS7kt/fQd4HBXW7S7sBfbm7Bj/gJoMugw9\nTfw3AMOFEKemD1i+Afypk8fUq0gflj4CbJdS3tfZ4/koCCE+K4QoSH9/HCqBYEfnjurokVLOkVIO\nklKegvo/8KyU8ludPKx2IYT4dDphgHS45CKg22TASSn3A28KIc5IP1UKdKmkhx7VwF1KmRJCzAJW\nAiawUEq5rZOH1S6EEP8FOEB/IcRe4P9IKR/p3FG1iwnAvwE16Zg5wPellCs6cUztZSCwOJ09ZgD/\nI6XslumS3ZjPAY+rtQQRYImU8unOHVK7uRH4XXoh+jowtZPH04weleqp0Wg0mqOjp4V9NBqNRnMU\naPHXaDSaXogWf41Go+mFaPHXaDSaXogWf41Go+mFaPHXaDSaXogWf41Go+mFaPHXaDSaXsj/B5mj\nDFi6aXY5AAAAAElFTkSuQmCC\n",
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAD8CAYAAACfF6SlAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvXt8FNX9//88s7uToChotEVEwOIF\nA7lBikxRGAyIl0/Vfmm13qKgpFpAsV4q/WlLayuKSkMFL1iwpNZbpUVtpSArIypjuYZEAlQUpVGp\nGk3wlp3dmfP742SzmxCUS8IlOc/HI49ld2d2zi4z7znnfXm9hZQSjUaj0XQsjP09AI1Go9Hse7Tx\n12g0mg6INv4ajUbTAdHGX6PRaDog2vhrNBpNB0Qbf41Go+mAaOOv0Wg0HRBt/DUajaYDoo2/RqPR\ndEDC+3sAO+Ooo46SvXv33t/D0Gg0moOK1atXfyylPPqbtjtgjX/v3r1ZtWrV/h6GRqPRHFQIId7d\nle2020ej0Wg6INr4azQaTQdEG3+NRqPpgBywPn+NRtOxiMfjVFdXU19fv7+HclCQmZlJjx49iEQi\ne7S/Nv4ajeaAoLq6msMOO4zevXsjhNjfwzmgkVJSU1NDdXU1xx9//B59hnb7aDSaA4L6+nqysrK0\n4d8FhBBkZWXt1SpJG/+OiOvC1KnqUaM5gNCGf9fZ299Ku306GJWzXfpOKCLse4gME6JRsKxd29l1\nwXHAtnd9H41Gc0CiZ/4dCNeFv453EHEPEfjImKeM+a7uXFQEt9+uHvWqQdPOqKmpIT8/n/z8fLp1\n68axxx7b+NzzvF36jDFjxrBp06Y2HmnroGf+HQjHgZcCm18QQhAgRYiwbe+wXYsTfMcBzwPfV4+O\no2f/mnZFVlYW5eXlAEyZMoXOnTtz0003NdlGSomUEsNoed786KOPtvk4Wws982+n1NW5vPvuVF57\nzW1079s2hMMAylcopaCysul+s2fDsGFw223NJvi2DaYJoZB6bOGmodHsa/ZF+Grz5s1kZ2dz6aWX\n0q9fPz744ANKSkooLCykX79+/OY3v2nc9rTTTqO8vJxEIkHXrl259dZbycvLw7IsPvzww7Yb5B6g\njX87pK7OZd26IrZsuZ3PPivi8cddiorUe/PGOkREghASI/B4/adlVM5WV1DlbJfx4yEehyCAWCzN\nK2RZKj5wxx27FyfQaNqIfemJ3LhxIzfccANVVVUce+yx3HXXXaxatYp169bx4osvUlVVtcM+dXV1\nDBs2jHXr1mFZFnPnzm27Ae4B2vgfZOzKTKe21iEIPMAnHPbIzXXwPFi1ysUcuRVpGEjAQHKl/win\n/HQ43H47fScUMchPfXAo1GyCb1kwebI2/JoDgpY8kW1Fnz59KCwsbHz+xBNPMGDAAAYMGMCGDRta\nNP6dOnXi7LPPBmDgwIG88847bTfAPUD7/A8ikjMdz1Oel5Ym4K4Lq1bZ5OSYgEciYVJRYZOb65KT\nU8QmPMKW5KhXlfMnjK+uHiAsPYpCDq8HFoYBM2dqO685cEl6IpPXQ1t6Ig899NDGf7/55pvMmDGD\nFStW0LVrVy677LIW8+1N02z8dygUIpFItN0A9wBt/A8ivinmmro5WOTmRpkxw+Gww2wuucTitNOm\n4vtqNeAd0fRzpQEIA2Ga/KjUplONzubUHPgkPZH7Ovt4+/btHHbYYRx++OF88MEHLFq0iLPOOmvf\nHLwV0cb/QOIb8uh3NtNJ7rZ1a+rmUFFh8eqrVuM2hmEjpUkQeHx4VohjFiWQ8QAZhs0TBUfKQt47\ncQD/fWcBV1RNIuvN7lB5NtToO4HmwMWy9v2pOWDAALKzs+nbty+9evViyJAh+3YArYSQUu7vMbRI\nYWGh7FDNXHbFp0PK0A8d6tKjh0N1tc3IkRaepzJ5pFTG3zShtBQmTVIfmZvrUlpaxhFHbMM0u3Hk\npsP5/B/T+TQv4LP+EaSUHFoZZ8CNEiOeOp7EQGZkEFqqg7yatmXDhg2ccsop+3sYBxUt/WZCiNVS\nysKd7NKInvkfKOxiHr1lQXZ2MpvHw/dN+vSJ8sYbattx46BnT8jKgvnzob4eTjnF5c47i/D9GDU1\nAWBQfWgG5uWz6Nmzhsj2FXz00bNkVUhEIpkISkNQOCAe81g9zSE6yGpcBOhiX43m4EYb/wOFXYhe\nJQ3uaaelsnlCIY+BAx02bLAwTSguVtsWFSnDLyXk5ztEIh5CBA2fFCClx5w5NYwbZxOP/xohJJ/m\nQa8wyLSZfwKDOCY3Pm/jPq+G9tBDLi+95LB6tc0dd1iNixR9Q9BoDh608T9QaCF6lW5MIeUVys21\nmT5dZfMYhsm4cTYnn5wyulOnqu2SHr3ycptEIoRh+I2HC4Iwq1fbXHhcGSdv80gcDuE6eOGsfDpt\nMPn44+5Ea8/maFHDMmHzmq+seZ8+LsccU8Tll3v8+McmN98cxXGsJuP7Gq+VvkFoNAcIrWL8hRBz\ngf8DPpRS9m/hfQHMAM4BvgSulFKuaY1jtyvSolfNQwBXXNE0mFtZGeW885TPf9mylDsG1+WSrQ6L\nQjavSItBgcvwDQ5vlp1D/3HPAhLfFyxaNIas/8Coux/BiDfcJQT0EOsZHryMi0r3LCyEtWuBhkVD\nfr5DOOwhhI+UHgUFDlu3WpSVNfVarVrl0r27Q9euNl26tPyddK2YRrP/aK2Z/5+AmUDZTt4/Gzix\n4e9U4MGGR00LuC5MmaIqbINAGUto6hUqLLR4/32LkSNTr/271CVnUhG9PI9o2GTReaWc+c9JGAkP\nOT9MxZAIddk+oZBJ797F3HROGcYCH4Hy7wsJEeLc23kSt3xZypoMiwEDID3uvnatyhoSwgNMKitt\nKivBMFRsITfX4YsvssjJmcSWLWplkpcXpUsXa6dhDb0a0Gj2Pa1i/KWUy4QQvb9mk/OBMqlSi14X\nQnQVQhwjpfygNY7fnkjOjpOG3zBo9OUXFzc1klOnwoCYy+mBwysxm5r5TqN1DeFxzpfzIfBA+pCA\nPv/9PluGfMnRR4/Gti2oKiM91yt5Axj8xQqWhofzz4lLWbTdIl02fP16ixtuiDJzptNg+C18H04+\n2eWee4qIRDykNAAfCOhcGSP4xxT4wRRs2+K0kMuQwOG1kI1tW3o1oNHsJ/aVvMOxwH/Tnlc3vNYE\nIUSJEGKVEGLVRx99tI+GdmCRnB0nDf+IESplM1m6nq6ucGHtbF4KhnEHt7E4KOK4/Kym4mujRzc+\nl2aYt457gU8/jbJ58yTq6lwoLkZkZCCFwA8JtvcFBBgSQr7HmukOjzyixpF+Azi0AmpuAjsjKRSX\nCiqHQj6GkSAIBN2eF+RfH9Dl3hehqAircjZRUcQd3EZUDCPn89n7tERfo/k6hg8fzqJFi5q8Vlpa\nyrXXXrvTfTp37gzA+++/zw9/+MMWt7Ftm29KWy8tLeXLL7/czRHvHQdUwFdKORuYDSrPfz8PZ7/Q\nPOln9OhUrn44DGPGqBWAVTmbPvdeiyRAACEjRp+uNRCN8m6Zw8vYnJhjYUVzwHHYdvJW6o58BPDx\nfY/Fix02b57MKdcvBcfhP92zGD58IgNu8RBx8EMmS+I2PipwfP758Pzz8F3fZQlFmCs8xDqT35wd\n5ecLrCZBZSEkh70BJ5SCCEAgCepjGPPnYyRiCBkQxAM++ut4hl6Rg2laTZKctBtIsz+4+OKLefLJ\nJxk1alTja08++STTpk37xn27d+/OM888s8fHLi0t5bLLLuOQQw7Z48/YbZL61Hv7B/QG3tjJew8D\nF6c93wQc83WfN3DgQNlRWb5cyjvvTD0OMZbLW7lTDma5FELKYeZy6YciUiq7rP4iESmXL5fLl0vZ\nqZOUoZB6XL5cfWZt7XK5dGkn+eKLIblwYSeZnb28ye4gZV7ecrn2gWvkV7+8Rs68tOn7t9wi5TXX\nSPkA18gEQkqQCRGS71xzp+zUSUrDkPKGG66RL70k5NKlyDfHCplo2DkAGRcR+UXpLdLPMKRvIBMZ\nyNUzDfnOO3c2+b47G7+m/VNVVbXb+9TWLpfvvHOnrK3d+xOlpqZGHn300TIWi0kppdyyZYs87rjj\n5Pbt2+UZZ5whCwoKZP/+/eWCBQsa9zn00EMbt+3Xr5+UUsovv/xSXnTRRbJv377yggsukIMGDZIr\nV66UUkp5zTXXyIEDB8rs7Gz5y1/+Ukop5YwZM2QkEpH9+/eXtm1LKaVctGiRHDx4sCwoKJA//OEP\n5WeffdbimFv6zYBVchds9r6a+T8HTBBCPIkK9NZJ7e/fKekl650rXa4PijDx8DApklGGxB2QTUWi\nqi+8gT87VhOJhz59XP7zH4fsbJVxU1kZZcUKhzVrbKqqdpxSV1RYLKy1yP81PDcKBuNi4+BgE3Pg\nl93LyGIOBhIJBEaIl7EpLVUqEEOHFuP78wgCj9r8EJ4QRGScAIOnT7+BHnn3c/h9AV3LoTYfPs/J\noE9Xm1691Petq3NZsMChTx+bN96wdM8YzdeSlC4PgqaJBXvKkUceyaBBg1i4cCHnn38+Tz75JBde\neCGdOnXi73//O4cffjgff/wxgwcP5rzzzttpD90HH3yQQw45hA0bNlBRUcGAAQMa3/vd737HkUce\nie/7FBUVUVFRwXXXXcf06dNZunQpRx11FB9//DG//e1vWbJkCYceeih3330306dP55e//OUef7eW\naK1UzycAGzhKCFEN/AqIAEgpHwJeQKV5bkaleo5pjeN2BHJqHKQRQwQBEOMM4fBaxAY/DL6qxpJC\nMPuprtwplbs/HFaZN/fcU0RmpsfatSaVlVEyMizmz7f46quWjyWlqgwGuDbf5czF6qaTIEx4tSS8\nMo5EIgAfwR/9sYx/2CIzEyZOhL/+DIp7XsExF8MhxcUsqob3H3d44gObEZ3KGPznerYPgK2XAhic\ndEKpulhdl/p/lfH2t+fSK9vnnntU/cBbb1kt1bppNEBT6fIg8KitdfbK+EPK9ZM0/nPmzEFKyS9+\n8QuWLVuGYRi89957/O9//6Nbt24tfsayZcu47rrrAMjNzSU3N7fxvaeffprZs2eTSCT44IMPqKqq\navI+wOuvv05VVVWjZpDneVhtMANqrWyfi7/hfQmMb41jdRTq6lxqax0O6VfLkZEA4YFhBJzxw1p+\nfAYYC89VTngpSYQyeMm38QMYLF1uKnQwz9xKZqa6MBIJjxUrHObPtygtVXn7c+ZAIqECuUFDDr9h\nqBk8QMF2hww8QvgYBCp5pyE3yEcQI5MyipFSVRIvm+YSbYgFBAtMzGXFXHC3xdSuFvI2l1sXz8FE\nIv8M5b+H7f0gHq9pTG/KiNWTG5Gsuw/o53HTTQ4nnWQ1rghqa5vWDGg0XbvaGIbZOPPv2tXe6888\n//zzueGGG1izZg1ffvklAwcO5E9/+hMfffQRq1evJhKJ0Lt37xYlnL+JLVu2cO+997Jy5UqOOOII\nrrzyyhY/R0rJyJEjeeKJJ/b6+3wdB1TAV6MM3bZtZWzb9ihSJhBdDLqNhxNnqODp8L/fh/xbKUHC\nh3AY46oxbCwoZs0kiyExl8VBEZ1WeVAZpvyYELWnQCJhsmaNTX29MvwPFrvcigoK1xdYTJqkUksN\nQ838XRcmz7V5AZMIHj5hBD5hfKQRYo68mj/JYl4nZYhtHEw8wvjEEx7vljn0siyGDnU59YRJmP+J\nK82gOHx7EXzaN4Pqahsecziu3sOQEhGHruWCz3NMLrjApkuX1l/aa9oPXbpY5OVFW3Vi0LlzZ4YP\nH87YsWO5+GI1p62rq+Nb3/oWkUiEpUuX8u67737tZwwdOpTHH3+cM844gzfeeIOKigpASUEfeuih\ndOnShf/9738sXLgQu2Fpe9hhh/HZZ59x1FFHMXjwYMaPH8/mzZs54YQT+OKLL3jvvfc46aST9vr7\npaON/wFEytDVk5xlSymJfGYgZICQgBcgCTCQJOKSujVvk1Os8uNjUxw6LfEQgQ8enPK/cazM6cmM\nGVnk5zsArP8j+I8W0SvhUWyaUByFUosJE1ScYOJEyM+HVQmLIqLYOHxMFvdzHeCDCPHtm4pZNd1C\n+MrNdNFF4PzFxsNE4hHH5GVszq9z8f0iTj75K/hP6ntu3DSISZNKCYfhB922cjNhwoARCdPp7DHk\n5RU3XshtsbTXtB+6dLFa/Xy4+OKL+cEPfsCTTz4JwKWXXsr3v/99cnJyKCwspG/fvl+7/7XXXsuY\nMWM45ZRTOOWUUxg4cCAAeXl5FBQU0LdvX4477rgmUtAlJSWcddZZdO/enaVLl/KnP/2Jiy++mFgs\nBsBvf/vbVjf+WtL5AOLdd6eyZcvtNPhYAIHvZxK8NpGhv/k9Id8nQQSQhEkQImiUXP7LmCgFBZAz\nqWnF1GsBfPFFEaGQRzxusvHKK5j44SOEpI8UAvGTnzC154PcfntjQ69GhFBxgFuZyh3c3jjzF7+9\nA9ee3CQd8+c/h0+nzWY083k2PJrLl5XQvbv6Poev98m7AYwEJITJsMChLhvuu08VhR1aEeL1u8fy\nTq9iLiq1mgR49cy/46AlnXefvZF01j18DyCSPkwIASb//OdPmDQpylm/vhvbf5nb+S3DWcpwHJYw\nAh8Dg4Ag5rHpYYdTJ1lUlqaarLtYPPKIQyikiq/CYY/tBRCXIbWukJJgzqP8X5aLaTYt5DIMGDkS\nHn4YjrjAJgiZyvBnmFRm2Tvk4d99gcuDGZMYKaLcH5qEhdv4fbb3C1FRavJuyTWcIRxcrCZFYZ/n\n+Kws6sn9q6wdGnEnl/bHH3+HNvwaTSui3T77mWQws7ra5q3HYKi8gowz4Zn3ipk+3WoMxr6O1cTH\n/mumMJRXGt0sL0kbz4N/1Fh8blusWuUi5VRqarKIx02k9JDSxBtYzJ8WwTj5MCEkQSJBTo1DNKrE\n2R59VAWCTVPpC1kWUGKBqxRHK7NsTp1k7SjH4DiEEkkpCZWj2cWanPLJFtg8UGfxWsP3KS+3iceV\nRpCUqs9wUseoeXpnWyztNZqOjjb+bczXVaumuzQy14b40cOCiEzAXBN7ZjGhUCoTJ53sbJc+BQ4l\nbinHv1vDy8JmpVR6/llZMH68at4SiXicdJLJrFmlHHlkDccdZ9Ozp8XjESj25hHBw2goq03WFjTX\nD2qkYYN/TG0qx/DKNJfYlw7H5WfRp4V+BOmGO1m93KePS36+w0MPlXLjjTVkZNi89ZbVqEqh0zs1\nmrZHG/82pLlo2b9LXXJqnEbLmh7MPOqNAFNCCEk87nH4WoeZMy3Gj1c3gFAI4nFl+O+7rwjT9JBj\nTDIzo5xjWI0G23EgOzvlUhHCY+TIGm6+eXKqveP9Fs+sjTIMh17FdhMr/009UdPlJ4YYLhMWNBSg\nLTZ565ZSJTHxNboMN93kMmyYikEYhklBfSmdVzn8u1StWrSkg0azb9DGvw1JFy0bEHM55ac2BHGI\nRMBx6JqtfOK+7/Fx/xDHCkFYJhqzZUpyXEZdnUrJvO66lICaYSg9/SBwGDIkFSStrFSyy5ddplw9\nhmEihI3vp2brNTUw+UEL2H0rm95zpsdjDmaVSu+EGP+rms9Rj02hqsrCmZrqM9y1q6ooLiqC0aMd\nDEON//D1MQ65aQJ4PjmGQc6sWWCV7HBMrfWj0bQ+2vi3Iemz5CsoI+QrYX7pefzt/DIeG/IgP/hB\nlMrKMg47bBuv/whwuvH3T4uxqsA7vYie0qM4w6SyIIqUFuvW2Y0+/ETC5MYbbWbNShnFmhrYuNHi\nxhujFBQ4nHqqTWGh9U0dIneL5LH++Geb0ZhADJERYJ65hNWrX+GGG6IEARQUFDVq+q9bF8XzLNas\nsbn0UhPD8DhinUDE4iBBBgHBtROoIoeckpSFnz2bxtVPRoaWfNZoWgtt/NuQ9Fny91eAWJB678OP\nYMECOLSikl8Ne4TtvXy2XwPeWJM5NxUjljkYeAh8ZMyjZr6D71scth42XHEFtXkqKLxpk9UkQGrb\nykhu2mSxZYvFtde6dO8+lRdftHnssZ1bzd2ZXVfOdlk43mGjb1NElEl5U/jOVUv4IicgSHj07+8A\nEImk8vPz8x1M02LjRoubb47y4x87nPjleoT8i+ojABD4PHWtygYqKVFjmjBBBaAH4zL8K4enJtnQ\nLB1Uo2kNampqKCoqAmDbtm2EQiGOPvpoAFasWIFpmrv0OXPnzuWcc87ZqfzDAcOuqL/tj792p+q5\nfLmUGRnSR8ivyJCDWS4Hs1zWhyJpKpfIJUuEvOQSpeD5BZ2kR0h6kU6y4uHl8vSwei1OSH5BJ/k9\nsbxF5cukSuarry6XL7/cSS5dGpJLl3aSBQXLW1TL3B0lzYqHl8uvjKZjGDNGKYa+9FJKMTQ7e7lc\nuFAd++WXO8na2uXy4YfVMUDKwSyXHiEZpCl/xojIwSyXOTnL5bJld8o//GG5NAzZ+FskjznMXK7V\nPtshe6Lq2Vb86le/kvfcc88e7TtkyBC5du3aVh5Ry+yNqqfO89+HfHD2GBb3/gnDWcrrWNg4hP0E\nRgAiDl3KIZGIUF5u8zqqwvbXxh1snBklp8TiZwNSEgoRPK7q47ToBrEs1fSlR49UQFlKj379nBab\npuxqQxXXhb+OdwgHqTEUhRzGjbMoKIgSDt/BL34RxTBgwACH2trSJvn5NTWp7KULv1VGWKRaSAYI\nxjOT7dkwbVoRicTt5OQUkZfnMlw4mMQI42MSY0jc0U1fNArXVS3t0otD2oB58+YxaNAg8vPz+elP\nf0oQBCQSCS6//HJycnLo378/f/jDH3jqqacoLy/noosuIj8/Hy/Zg/UARLt92ogmYmRV4A8v4uiY\nx1BMQkYx2X3hiJOyYKEkiIOMwJvdh/LqK3excaOy5q9j0e08i5wcdYLn2Fl4K1ISCt+92SYnzfA3\nd92kC18JYbJ+vd1iOmXzBjLNYwLJz926FSoDm1sbZBx8w+RHs5JjsDj9dIv773epr09l83TtGm2S\n6mkY6iYj7G0Ez4IRBylg/vDTmRMt4eL8qUQiqkE8eMyY4fDZ9CxCCwIkECLg01AW/9dsjJoOyD7q\nAfrGG2/w97//neXLlxMOhykpKeHJJ5+kT58+fPzxx1RWVgJQW1tL165duf/++5k5cyb5+fmtPpbW\nRBv/3WRXfOPNJQlO/dMoIrF6QkgkHqOPKuPLSxzOOX8Fm78DRy+DbacJnnz3LL73PYtwONW0/aPn\nXBL/LCIcePQxVTrlf8tryBptk1NiNY4nKyvV8St1HTQVvpo1y2px7OmxiebvpV9f4TDIsMWZiShn\nGI4y/CVNf4QePRy2bGlZi8ey4IEH4Kc/hXe7d6P8XjhiHXyaB+5b2cglUFlpI4QJqN8uN9emyyAH\nnjMgCPAx+PnVNfTSPn9NS0vWNjD+S5YsYeXKlRQWKsWEr776iuOOO45Ro0axadMmrrvuOs4991zO\nPPPMVj92W6KN/27Q4kSDHe8G6fn7nStjRP7yPKKhAYoUMOQXj/BZ/wDvZckpsxpcPpWSxLFZ1JwM\nY8cqWQUp4fTAUU3YUSd4n6419Fk0eYfxJGfUzatk04usvi6Hf2fvpV9fAOPGQc+eFllZFv+ogc/d\npvt9k8xuSQnk5MD06cXUnP0oddkqa2nxg8WAahC/fn2U885zGvf94OStdMuIILwEIdNUtQkazTct\nWVsJKSVjx47ljjvu2OG9iooKFi5cyKxZs5g/fz6zZ89ukzG0Bdr47wbNJxpvlrlY85T1lWaYbY+N\n4ZCi4iYG8Ih1AuGrPrsBgg/PDfgiVxISysdvxJVUs+8JvrO1pvH8feQRdRwHpZYphEeo4QRPd8Mk\nxyNlqtF6a14Hza+vYmWjd7ra/iaZXdeFsjJ49lmLqqql5Oc7lJenOosZBhQWWvTqZaVWUEd6fHBP\niPp/jeOw7xeTo1N9NPD1S9ZWZMSIEfzwhz/k+uuv56ijjqKmpoYvvviCTp06kZmZyY9+9CNOPPFE\nrr76aiAlz3ygo43/btDcEA7DabS+Mubz1cKHefOoeeTlRRsN4CH/V4v88++REuIixLYz441qmbV5\nEETUzD8ImfxopvKfu25KZO11LM6ORCm7SlXjuliNhjfZtQsaKncb2im25nXQ0vU1derXr7Z3psWT\nXKnU16vvX1VlsWGDhWEoox8KwcyZqc9KX0F9cjLMe70n8ydZRHN0rr+mgW8qSW8FcnJy+NWvfsWI\nESMIgoBIJMJDDz1EKBTiqquuQkqJEIK7774bgDFjxnD11VfTqVOn3UoR3ddo478bNDeEvbBhnon0\n6pFhSW2+bPRz9+qlXDPruhbR+V6fruUGz3x8Id/t/5ekVD/PvHcpzw09jFEZ8L1fFNMzW8k6r1pl\nI6U6oYWA7Kssej2onjtTW3LDtG31a/Pra49W265LbIrDgJjNa2nfLTNz5zet6mob31ernmRDGt3X\nV7MvmDJlSpPnl1xyCZdccskO261du3aH1y688EIuvPDCthpaq6GN/27S1BCqu0HsX2Vs+PZctmf7\nTfzcFRUOvu+xvV9AXbZArPqIIDAIhQISCYOtW/vxzMuTOcuBuuxUkDgnxyQ3N0pFhdXE1QItu2H2\ntSHc7dV2w5R/WMxjcWByphFlZdhi7Nidj991YeRIiz59VKVyZaXNpk3q96ithVGjYPRoFUPQ+g8a\nze6jjf/eYlmsxWLVqmLyQ47KTumisnCuv97mrrvChMMBiUSYl18eTU7OKw2aPCbHH29z//3KbiUS\nKRdHMsXx1Vd3FDrbR27Ob2S3VtsNwRIR+GQIj5sKHb6dVqX7+Yuzib84n8jI0XQeWZK+C2+8oVxD\n48bBhRcqwz9tmtpv8WJ4+y8uv/t3kZKTbsN0P42mvaGN/16SyrixME2r0fY4jlLhlDLVjnHLlhxm\nzixl2LD5DB48mnPPTfnv+/e3uesuk3BYuTgOO8xm8uSWj7kP3Jyti23jh00C3yMuTUrLbS6pVL/R\nuUfOpt/1P+HQOAR/WMznz0PnkSUMHepy2WUOq1cruefkCmHUqKYfLZY5SFLZUNondHCT9J9rvpmk\nbdlTtPHfA9ILuMrKrMYAZnp1bGiFy+3hKRz5nwRf5EhCIZ9Ro8o488x5ZGR4SPkKq1bl4HkWvg8V\nFRY33RQlL8+hosKme3cr1Ux3CcvUAAAgAElEQVTlYMey+MuYKJsednhJ2qxIWLw2Xv1mJ+TNJ6ch\n40nE4aNn5uMPysH3i7jySo/LL1ey1VbDDzF6tJrxJ0nPhgoMk41ZNjn76Wtq9o7MzExqamrIysrS\nN4BvQEpJTU0NmZmZe/wZ2vjvJukFXGDy+uvRxuBsKKSKrSbbLi94RZjEEDcHrJlmUHOSSY8ekJm5\no9hZMnNn82bl4ggCqKqCV15pP16ME4strpnX8F3TahKerR3N/4ssRjRUOf/aGc1VFSkXWCjk0aOH\nQ1J+uqRB8XnOHFi7FkI+PMYVSODPfjErJ1qMXbt/YiGavaNHjx5UV1fz0Ucf7e+hHBRkZmbSo0eP\nPd5fG//dJD39MAg8srMdyssthFDFWTU1MCSe1OAJSMQMtjw6grvEFO6/H3x/XmMBVG6u3cR/D6p1\n4pIlOxZrHeykxyrSq5H/9kEJp/8Yjlg3nwW1o3n6rRIGlrvk5X19oRjAx3fO5pat4xEyIEYGf6IY\nz1MFcvPmtZ8bZ0chEolw/PHH7+9hdBi08d9N0gu4PM+kvNwGVH+WZFbO5IhNwgsjCAiMCO/0nsKs\nW1SAs66uaQFUdrZL9+6p51OmwEcfufTr57B+vY1ttx/rlR6ryMlJv+mVUFRU0pjBVFhokZ2980Kx\n2bPh0Z+4vMwEQiQQgEmM4Ti8jtXEBaeNv0bTMtr47ybJCtZ58xwefjhVmXrCCdC5UrVpnDMpi9C9\nEgIIAskLL8Dpt6T2Txqz5hpAeXlRsrNh+vQipFRibNnZUfak49aBTvOgdTSqKn+TVFVZOI5FVlZa\nDUCDlEbVAhsbp9HwSwBDUJtrE6pUsQTdC1ij+Xq08d8DunRRvvmqqtRrh1e5nPgTGyni9DEMgkBi\nIAnwGySId2xA0tyFVFvrNLyTUrVMF0Zr78ybp2bsjz6qDHgiodxfhgGnh12iQqV03muY3NFtInwq\nkQ2qoJsmwMsvpvodl5bqWb9G83VoPf89pLhYzS4bn1NGBh5CShXNNARxQsQxeTVss3XrjpLjSRcS\nhBp92y291hForpsUj6e0/4MAvhd3wIuB7xMKYlw4wqH8XtgyFspnwP/OC8jPd1RnGKlWCxqNZufo\nmf8ekszlLyuDuXNVmmJStkECb/b9PnUnDcLB5vUXLF57ZMcg5M5E0L5OGK29kl65HA7vOPN/p2cW\nfBCo3gfhAM/KZPsp8Fl/tW3gG5SX2xiGdvloNLuCNv57QdJvXVwMb5YVI+c8CgmPmDS5auMtrNli\nccUVaja7OyJoOxNGa880r1yGVGbQ/PlgHF3DmlMNsioCPs0zePPIbI7wXicc9pAyxIwZD9Cjh0WP\nHqoWQLt8NJqvR+xtlVhbUVhYKFetWrXvDrg7+jA729Z1caY43LbE5rXAapAnhvJyZfy1+sCe4bow\nfrzLnXcWEQ6r4PhLL0V59lnIzd1REjojo+nvnJSRBp3/r2n/CCFWSykLv3HDXWn0uz/+9mkD993o\nYF7x8HLpRTrJwAjJREYnefcFy+U116R2SX6UEMr7LISUGRmyyTaa3Wf5cin/8Ifl8umn72xsRJ9s\nBt/8LxRSDeyT+2VkpN4zTf3/oGnfsC8buAshzhJCbBJCbBZC3NrC+1cKIT4SQpQ3/F3dGsdtNXax\ng3mygbmIK5EyGYuRu2AK5Q+5DB+u3rcslWnSr5/LJZdM5ZRTXOJxJbusZ5x7jmXBxIkWmzdPpqLC\nanSlNSfp88/KUn0HyspSLTFBBZJ183eNphV8/kKIEDALGAlUAyuFEM9JKauabfqUlHLC3h6vTfga\ngXrXVR27huHwJjYvNTQwhxghAkawhKG8wohYlFWroHt3h1Aoi3vumUQk4hGPm/z859F2Vay1P8nK\nUgY+CNRcHtTzESOUr7+mRm1z3XWp4HE4rIw+qGI8HQzWaFon4DsI2CylfBtACPEkcD7Q3PgfuOxE\nJ9l103V6PC40TGYTZaSIMu3QSQz+fAVhAiQeF367jH795rFli0efPgLfDwiFAqT0uO02p1GYTLPn\nuC5MnKgMuRAqnx/U/TpdBO/aayEWU/+Ox+GCC9S/338frrpKr8A0Gmgd438s8N+059XAqS1sN1oI\nMRT4D3CDlPK/zTcQQpQAJQA9e/ZshaHtBi3oJDtOuk6Pjww8TsfhH/1tjhixGnG/moEmCPNpLkiZ\n1OM3CIVCSCkIh03OPNPet9+lnZLuwpFS/fbnnw+/Odslx1ENX/5R07T4DuCTT2DlSujTx2X5cofM\nTJv33tuxV4JG05HYV6mezwNPSCljQoifAPOAM5pvJKWcDcwGle2zj8a2U2xb6fR4nonEI46Jg83V\n/aZx8oM+IgBpwHPDTuWZ94qx4vMQwiMcNjnhhFLi8Rqqq20eeEAbmj3hmxKwpISPn3fJXlhE4Hn0\nkSb/EFH+bTTduL5eGf577ilqdMXdc0+UO+6wdPaVpsPSGsb/PeC4tOc9Gl5rREqZXm/5R2BaKxy3\nzbEsmOpYTLwqyrc2OKpgC4vb3nsfIw6iIYeks1dPVZXFjTdGKSx0KClRevyqFWEqlKANza6TapKT\n+u2Ki+GRR5oGek8PHPA8DOkTwWOodHADi1BIrQwiEeXqWb7cIRLxCIV8pPTIzXXYsMHS4m+aDktr\nGP+VwIlCiONRRv/HQJNOx0KIY6SUHzQ8PQ/Y0ArH3SdYFjDHYvhwi1hM+Zlr+l9FsHJFowZ9dZ+r\nEK8pMbJNmyz69oUhQ1pOItKGZtdo6bebPFk1rH/oodR2rxg2CWkSSI+ECGOO2Eq/D1wmTrSaNIXP\nzLQJglQz+IoKW1cCazo0e238pZQJIcQEYBEQAuZKKdcLIX6Dyjd9DrhOCHEekAA+Aa7c2+O2Nund\nudKra+vqlORyNGrz2GMW27bBdDeHk6aGOXp9grqCMP3zc8h8esdkoa9JItJ8Azv77YqLlUxGLKaC\nvkd93+JXn0T59sYyrFvnMjT3Ec59Yy65740l86xiXCyuvRbmzrU46aQoAwc6nHyyWpndcY6L5TiA\nDZal+8BrOhS6wpeWpZW7dLF26Nr1s59FKS+3uPjiqYwde3uDCyHEd75zB++/P7mJ4UgakiaSxNqg\n7BZfU0jdqKnk+yqV80c/msp1g26j+4sBx/wLhC8IIpkUySjL4laTtNBQCAb5LouDIjoZHiLDpLI0\nyqmTLO2i0xz07GqFr9b2oam0cjzuMXOmwxlnWHTvnnpdSo9+/RzWrrUoL7eJx02kVMHdrl1tevVq\nKifQ3F+tDcnus7NG9UlRvfRCr9NCWQy4OUB4KhYjUB1dLji6jGNHKAmI73ynkqFD57Ns2Wj6/6MG\nE1Ws59d7vDXHaeynrF10mo6ANv5AdbVNfb1JOKz8wY8/bvPrX8PSpamuXUKYrF9vEwrBW29ZvPpq\nlFGjHHJzd1Te1L7+tqe5W6igrgYZMwgRIAGJwCPM4FvnkpPr4/uCSCQBQGHhYv756S14rzVkcUmT\n36+1m9QNaBedpr2jjT+wbJnFY49Fyc9vKhK2bJnF0KFRtmxxOOEEm1mzrDQ3hMXOOmxpX3/b07wu\nb2OZTX8ykHj4hFjSYyyr+8FpuY8QCvkIoWIESYbcWM7Mo6PUPevwkrRZGViMG6dkOLSLTtMR0MYf\n5ZffsEFd7fn5DgBvvqlaCI4caRGLqQbt3/8+3HLLNxuGnRQMa1qZdLdQEMB9717BYWtgwafFXHK7\nxbMPuZwan4eUHoYhUPkGilNPHU23bhZFixqyuAwoKEg1h9do2jsdL+DbLIr42msus2c71NRkMWGC\n0uMJApNDDomybJnF44+7TWSDMzJg6VJt0A8k0gPzvm+SmRllyBCLn/8cXnjBbcjpt5kxo5LDD5/P\n0UePpnt3ZeVnz4bx49XNIykFDfrGrTl40QHfFvj8xdkcct4ERNxHmBl8/mwp9cYkios9gkBgGEqP\nxzA8jjqqjCFDyhgwYC7hsE88bnLjjcoyrFzpkJ3dcbpsHeikB+xDIY8ePRxc12L6dEgkLN54Q/VW\nePVVi8mTm07ta2pSUhGep7KIkr2EdbBe057pMMa/rs7lk7+Op7eXULIMXoz4i/M5oleMruUBNTmC\nuuwwiYQgHA7xZXQOPdbG+TQv2SrQ48wzyxg1ah6RiMfatSYFBVF9AzgASPY9Tqbqdu1q8/jjqR7A\noNI7W4q9NI/PgA7WazoGHcb4V1Q4HJIb0DPS0G83EiLj2HzyblyMEYdeEcmTV1+IMeQj+n5aT+7P\nljW8DmvvgZqTlGVIlwiorXW08T8AaKkXsm0rN04spnL7Z86E7GyXd99V24BaMWRn20SjqUA+NJ35\n62C9pr3SIYy/68L119vceWcGa6bFyKo0+NaFM+m8qgaZMBBBgIgbfPeLp/jvtySHLlY3CBGox21P\nDuLWulKkhFGj5jXJ79ccGDTve9w86J6dnV6wF2pw8yUQIky3bmP46U+LG/fXwXpNR6BDGH/HgYoK\nJbw2YIDDoEE2E0da0NlFmBnImIdvGNQN8AmFAuoKDPzHQkgZ4EmT3y0vZT3Kb/z001HGjGk5v19z\n4NC8OviVVxx830MIH98PGtI+JVL6fPDBw/zvf/MaK7sbs4hcF6Y6+i6gaZd0COOf9Otu2mSxZYtF\n9+4wahRcmw8nffcKXnkFXu5RQPGJk8iqiNFlrcFvj/oZ3raujUqeycbg48ZZujHLAU6ywjrp8rno\nIqistLn7blXI5/shhBCEwx5CSISQBIHHtm1lja6jrU9B3wlFhH0l/6Ajv5r2RodJ9UzOBGtrYdo0\nGIxLFNWhy8OkiCjH967k0f9OIOT7eGRQRJR/C4vzz4dBg/QE8GBh6lS47bamAV9Qrp9kIZ8QUJIz\njXMPfZ66Asln/cOAQMoEYFJ5yRVc+8EjqomPEUL89g4lK6rRHODoVM9mJJfyo0YpIzApMgVzXayx\nDaONw7e+3ErYTxBCIvEYLhzWZVq7VNilOXCw7VSf33SqqiyqqlTBnoXL1esXYSLBDLH0/zuHyNDn\nAR/f9/i4P3gfKPkHQiYRHfnVtDOM/T2Afc2ll7rcd18R37lqCSIjIIFBHJN3e2dh3ToXMiSBAcIM\n0/cntl7tH4RYFsyapRq5GM3OcMNQKqDDSLbnDJBewPoHulFfb5JIhIjHTZ55r5giovw6dAcbZ+qT\nQNP+6DAz/yTDhjm8/bbHFzkB5fcaHPNiIdu2DeBCey1f5vmsuw+6lAs+GzCG4mv1BX+wUlICOTlN\nZbXTHx+fqNpzQgyJYP3/CvjjTcXk5TXVd8ofZ5FTsvN+DxrNwUr79vm3IAifLgXQpSpE7s8EeAni\nhCm/T/J5jk8iYfKLX0SZNcvSE752iuvCp9NmM+q5CRD4xMjg7EiU5dLipJNUbKCqSvVfTk8TTe/3\noNEciGif/05E9auqLN58s5QBsTn0+tPnENtIiIAAcO8ax6oRPSkvt9m0Sfd3ba8k5wT5n9RAEBAi\nIEN4lF3l8LIF3boVEQ4rQ5+dHW0iHxEEurhP0z5ov8a/BVF9F4vx410euHwiuZM91fgDGv3+f/2w\nGPfxVFqnjvG1P5Jzgvp6OFXaDMMkgocIm9TlZ/Hhh1M49tgYhhEAXqOrJ10+orpayUfo7C/NwUy7\nNP6uC29utbk0bBLCww+HWfTlVpb/08W2yzh6vYcRVx2ffCDKCH4XnkLe1RZXFui2i+2Z5JxASngd\niyKi2Dh0PjOLU/tMokAow59IGIBJ1n+y6LzKYUBhKTUn1VBdbTNypG73qDn4aXfGP+XtsZgbijL9\nB2V435tLRu4jDPUfRYgEtZsgaND4SRBhYeEU7i7V/v2OQLLgLxZrmgo62FqLMDwMI8D3DdasGUGX\nqtGMeHoSeB6dTZPO0SiPL7O08JumXdDujH+6tych4ZNP3qb7f+N8q0rySa7P9n5KpVNl9cDz288l\nu2hHw7+z5uGag5t0zZ/D17tc9UQRpvQQvwuz9p4Qn5wMiYTJE09MYcFgZwfXoW1bukubpl3Q7ox/\ncmY3IOayOCgic2k94iUJAnqZsO4+QV22ZHs/qMuGrc934+iapp+hG7C3bxq1e6Y6IDwIfPAg+8Nx\nrMztyYYNKsunF8A8ExnzSBgmG7Ns3aVN025od8Y/eXHGpjh0WuIhAolE+fdF3OCE6vNY0+95giAg\nkTBxnGJmzWr6Gemrh/p61eBDX+TtkGZi/plnFXO6ZXH66Q3vu7C+Qfvpz4li1k6yKEXHhDTtg/ab\n5++6MHy4cu4maejBWJet9P3Ly20KC1t2+di2sglpu+mLvT2yM/+e6+IPL0LGUtpPr2MRiahYgV4R\natqKvXU56zx/y4IxY5APP4yQEikEYswYsCy6AKefnjbDa2HXsWPh4YdVVkgioQN77Zb0LvDpNCz/\nwviN2k+vY5FIqHNCB3s1bcG+dDm3a22fyoJivpKZJDDwjDAV+QW7vG9xMWRmqvZ/OrDXAbFtfMMk\nTog4Jg42oHSB9DmhaSvKypSrOT2brK1ovzN/4B81Fuu/U8ovh46nboDP9hMmUVeXs0vVmTqw1/75\n2uW1ZbHpgShPXesQDVRPB9OESZOgvBzy81MXpj43NK2B68LcuWplCWqi0ZYTjHZt/G0bNm2q4b+X\nSUKhAEN6LFjgcNJJu5bTvzOPgObgp/ny+sUXXXr0aCrcllNi8XmORU0Z5AMFBcr419fD4sUghFIO\nHTtWrRT1uaLZGxxHzfhBnVsNXuo2o10bf8uCILD56itVmu95Jvfea/PWWzpY19FJz+jq08elvr6I\nLVs8wKSyMtqYCJA+AZg6VeUPJGdmSd//ww+rpu/6nNLsDbYNubku/fo5rF9vU1zctidTuzb+AIZh\ncfPNUbKzU1K9oZAO1nV00rM8Bw50CIWUcFsi4bFihcPPf241NeauyyVbHV4QNq/S9MTRAWBNa5Cd\n7TJ9ehFSegihRAWh7U6odmn80325ZWVQXm5RXq5+RCHURZ+VpWZy2p/fMUmP6QwdauP7Jr7vkUiY\nrFljNzXmDT6iXp5HVJgMa0j7BOWXlVIHgDV7T22tA3gI4ZMUFWxL9dh2Z/yb+3K/+92m7/ftq/y2\nj090GRJ3mByxmepoXZ+OSMqlY1FXF6WiwuHGG5Wcd9KY19W5BH+fQlcvhvADQiLGXWcVc0/oZhYu\nLOHnP3c58USHE06wsfRJpNkLmqvHdu1qt+nxWsX4CyHOAmYAIeCPUsq7mr2fAZQBA4Ea4CIp5Tut\ncezmOE5KtKu+Ht5+u+n7w4ZB5lqXF7yG5u2eyTNlUX3hdnC6dLE4/XSLWbNSq8ZevWZTXj6ew3r5\n5IUlBALMgM7nbubGfj/huOPeYsSI+wEP3zepq4vS5alKmD+ft/JH83TXEr2y1Hwjr73msnmzmkDk\n5UX5MlrGEesgM0Jben323vgLIULALGAkUA2sFEI8J6WsStvsKuBTKeUJQogfA3cDF+3tsVsiKyul\n1iglVFc3fb+gAIatTfZvVQU8w3Bo019Zc8CTbNOYna1m8HV1LuXlE5AywfZ+sOYeQfiVIwiGfsJn\n/QEJQ4b8DUg1eYk/MA1+sQAJfGfxYt4RMD4/hxkzHHJzdftHzY689prLZ58V0aOHx2efmbz7ZCm5\nN8xTrot72jaLoDWKvAYBm6WUb0spPeBJ4Pxm25wPzGv49zNAkRBCtMKxd6CmRvn1AQbjcitTGYwL\nqObdNTXQq9hGZJj4IoSRYdKr2G6LoWgOEpKtPbdsuZ1164qoq3OpqHAIApV3JyXU9g3xQJer2d4v\nle2zbNn/w/dNIIRhmHRZ8j6gGgQBXH7IHO68s4hEIvW5Gk06mzc7RCIeoZBPOOwRWzSHYB9VebWG\n2+dY4L9pz6uBU3e2jZQyIYSoA7KAj9M3EkKUACUAPXv23KPB2LaqwCxMuERpcO1gMlJEWZthqaCc\nZRFaqiu4NIrmbRorKhyuv97mzjsziERiSBlixoyZLFxYQkZGH3r3ns/LL4/mX/8q4bTTLmDYMFUf\nELmoEl5aQVIta1P/7vSOrEYI3f5RsyOuC/VLs+hpQF0BfNrXIBi6FvmCJIiDDIcJtWEWwQEV8JVS\nzgZmgxJ225PPsCyYNQu2XutgBsq1I4TH70Y6ZExJC+zqCi5NA80DbeXlNhUVFjfeGCU/36GiQqUI\nZ2TApZeWsGBBCS+8oFYA11xjEY1a9OoFlDRklDX4/L/skUMotAjYNwE8zcGD68Jk22Vh/DoyhI98\nHNbc47M9R/UaOXyN4D/dx3BOG9qo1jD+7wHHpT3v0fBaS9tUCyHCQBdU4LdNKCmBSmzkBBPpe4Qy\nTOwptnbra1qkSxeLvLxoY7/eSERl+2zcaFFVZSGESuksLVXbT5+eiivFYqmUUNcFp6YEe0oJlgUT\ngbq61OfqWX/HY2cSIo4DQ+IOpvQwJARxyKqQ1J0S5pOTBR/2MelyWHGbjq01jP9K4EQhxPEoI/9j\n4JJm2zwHXAG4wA+Bl2Qba0nnlFiQo107ml2jSxer0TgnawCmTIElS5ShDwIVL3Kcpu0fhYCtW2H2\nbJVC3FyNMf1zNR2Lr1PotG2YHLHx4iYZIoaMQF1BhIyM+3n77RpOOMFmyJADvMK3wYc/AViESvWc\nK6VcL4T4DbBKSvkcMAf4sxBiM/AJ6gbR9mjXjmYPsSxl/F95ZceWjRkZqTYRUirDbxipm4Su9tVA\nUwmR5ueEZcEl91tMnrOU4p5lHHMxfKeouCHleN+Mr/02c9FoWoGWlu2uqyrHH3kkJcRl4XKG4eBg\nsybD0jo/mh1m/qWlsHateq+gACZOhHhciQMmbwzJlOO9cRPqZi4aTSuQvnhMvxH07JlK+RyMyxKK\nyJQefthkY2mUz7FalA/Z2y5NmoOHdAmRrCxl7JPdAZMrRVCvvTLNpSC3jLe/PZe6bB/DMMnLi2p5\nB41mf9N8FvfQQy6XX+6werXNGesdMohhyAARxDh8rcOpk6wdfL37skuT5sAgOXmYOlXN8pOkx40G\n4zLp+eGEn4uRG1HZPtv7aW0fjeaAoLkE9LHHFnHllR6XXWby4m8mwoqgITc74F9fZTVuW18P06bB\noEEqMLwzH7CmfWPbyr2TnPmn8+NvlxH+KIYRqKyfLmsFn+e0fWpwu27jqNG0FkkJ6FAIzjqrjFCo\nHvAxDI+s/ytnzT0G74yFNdMMXgtqMBquLClhwQK47TbVpUm3gWy/uK6a4bstFHJblrrZX3BBSoEg\nSXgEyAgEhnp0M77LO++0rcsH9Mxfo9klkv7bVatc+vWbSzJRIpEIs2zZaHInvELdKUoSeu0jNpef\n4PLtDQ5LUS0gg0DN+MeNU/EC7fNvX+yKS8+y1Arw2Webvr69XzErB87lqDfifNw/wtQ/ltJjs0Vx\n26b5t0/jr4NqmrbAsiCRcPA8H8MA3xcsXDiGf/6zhC1bchqrgU/fWMn9wQTAJ0YGRURZYajCMd3u\nsf2Qbme+Lq0zfdusLLXySyTU64YBW7dajHvVITfXofyPqpr8+uvbfvztzvjroJqmLSkvtznxRBMp\n1Sx/8WI1PauqUtXAPzzW5f5gPKFAXd2ZIsaM8x2igyyyslSKaFmZvgkc7LSUxpnsDNfcpbfDthe5\nhB8vI5BQaRRw5poaKqtsHn9jMgDZ2ZCT0/bfod0Z/2+6A2s0e0NhocX48VH69Utp/qRz4vsOUqZS\nOYxwiEG32Pgog5AM+D36KCxdqs/Ng5XmdqamJpXW2ZKUQ3LbATGXkieGE5YNVYIJYKXBmTKDkSLK\ncmmxcaO6WbT1xLXdBXzTA3M6qKZpbZRwoEV29mTGj7fo1Ekt3Q1DBfKWShuPDHwMdRKeey6gDEB6\nql8bq/Vq2piW7IxlweTJOxrs9G0v+nYZoSCGICX9LWRAJ8Pjqj5OY/7/vjg/2mWFr/b5a/YV6b7c\npLbPaSGXJ84p45iFjyrnrmlSWRqlcKLVOPPPyNAz/4Od3bEzrgv//KfL94+yKbzFw0ibCAjDgIwM\nKkujLdaH7C4dusJXS/po9hXp59pbb8Hf/gan/j+LY7o68HwCfB+/3uPwtQ6OY1FWprbVPv+Dn921\nM9XVDp8P91n3e/jWvwQbN36Xz0+0Gda/nMjI0eSMtIjm7LuJa7s0/hrNvmb2bFXMBerRusXm+2GT\nwPeIS5Mr5tpMLYYHH0zbSS9ROwyOA6tX21xySZjaUwI+PtHkgQeuYtKkSazDwzBeIa8uB8uy9tmp\noI2/RtMKzJ/f9PmD5Rbbx0T55O9lfJoLNR80Sz7QaWntm2Y3dtuGv/6VxvoQw5D87Gdrm3SQ29ed\n3rTx12hagdGjYfHips/79INvnT+PSMTDis/jsMOiNHYU0mlp7ZdmN/bK0ihOjcXkyQ6m6SOExDCU\nHGx9vUk4rNKGq6tt1RFuH6GNv0bTCpSUqMc5c6B7d5Wn3b27w5YtamYXCnn06OFQV6d6BmcVZtF5\nZ4nhmoObtBt7EPN46lqHqVjk5dlMn26SbOu5bl0xs2cXk5ur0oYvucRiyJB9N0xt/DWaViInByor\nYfVqWLQIXnyxaW/gSCSLdeuKCAKPd8wwveeezbc3dCPzLB39bU9UZtn0NUxC0qM+MIliEwDr1llU\nVkY57zyH6mqbl16y2LCBxv7Q+/r+r42/RtNKNPfkLFtm8dOfRqmocCgvtxHCafTxSumzpduzvNs9\nk7zsYrrs78Fr9pq6OpeKCocJs2wOiUcZLlLaTqDy94+rBu6CX8yBZQ3pnqGQqhDe1/d/bfw1mlbC\ntpVqZxCox6wsuPJKi+eft5AS8vJoWPbXc/h6SddySW1+jNpe+zbQp2ld6upctm0rY9u2ufi+z913\nm9x4Y5SpVZOVgmdDKZWFy7m/LyKU8FgoTYqINor+1dTs+3Fr46/RtCLJmskggPHjUwJeoJb9ZWVR\nJhZOI/fGBRhxCCIBX52UBfsw0KdpPerq3AZXXj0gMQwIhz3y8x2qqix69oT331erwRGGQ9j3ENIn\ngoeNw+tYRCL7J+SjjRQfIx0AACAASURBVL9G00o4jrrIpYTCuMswVE/f9GX/vHkWPf48iPzgOUQQ\nYCQE4VfX/v/tnX18VNWZx7/n3pkJVjTR+IJWQcHXYEKCVHulwsVg1VYtW2prrTuKlrgK1tQqLdvt\nNt1tpUWtcZVaXgxLqtR2l2p9axEHbkG9CAHCW1hUFBGBQqMJvmXuzL1n/zgzmUlIgBgkb+f7+fiZ\nzOTOnXMn8jvnPud5fg9c2rVj13w6GhrSoTw160spSCYj1NbaAPzrv6q9IMeBrx6bjyw3ICExQhGO\nucLmXwZ0XcGfFn+N5hCR9nAZHnd5ISglgoeHur1/Vah/3eec42Keug1/sYlBgAxJNp34KIMbozr0\n04042Po71W1LubwKEeLkkyfw6qtRTjlF2TKns8AKClzWri2n9j6fY9YaHH9NJVMu7dq/txZ/jeYQ\nkW74Eq9wOOJFDxH4GEYTPxtVzdZvW/z2ty733FNKOOyx7ipJXi00FMPeggS7dlVTV6ftH7oDHam/\nq6uzuPNO5fK6caPNjBmqCUvrRizpO4S9QwP2DhUYp9fT/7O/lP2ixV+jOYRYFlBhI5eFCOI+Miw5\n/poqLvxWlHPPdUgmPYTw2TsU9g5V7zl6I0Tmr+ZH/+2yNKFUpqpK1311FR2pv3McWLfOYs0aC9Ns\n+1jXhZoam8LCTI7/Z92f92DQ4q/RHGosi12PTeCTv8ykoViyt8CnocGhqMhm7doIQRAHAo7eCCcu\nhJP+CiRr+Issbc4ASSS0+HcV6fBddv1de2Ggto7NJnMXYVFUFOPBB9X/B90hxKfFX6P5DPhcaZTX\nj5vXXOCVl6f+wQ8bFmPr1gr8lxYx7AcSI7VXKAi6RQaIJhO+S4s9tB8Gan1s68k6+y5i3TqLl16y\nuPjiw3Yp+0WLv0bzGZAW+oYGp1n406+fdloF781YjJFIImQqT0QIjEjXZ4BoFNl2zbfeCsWfuFxz\nQjV7S6CmJoplWS16OQD0X69eWJ9v85s16s0lJfu/M+hKtPhrNJ8RublWm7f3ubkWb58/A3/eZIyk\njxEKwU03YUajTNGK361wXdg4xyUmxpDzjzjSgbVXVfHyyw6XXmoRj8MFgUuUas5kLoFIMkRGqE2F\n7yIR1eSntlaZ/XWnP68Wf43mMOO6MGlmIV8bczPHrgO7IkphWWolOW3f8EFjo7vPHYTm8OA4cLHv\nEJEehoQgAXm1CV7yHZqaLC6ULjFKyaEJA4mQtAjfeR488ICq8Vi2TOX8d5cJQIu/RnOYqanJpHwe\nuc7kg2dgPXDrb2DoUIc//MHmkUdUU49MBWkcIQzOPHMGJ59c1tWX0GewbZhi2HhBhBwRR4bhvaIw\nVY/aSAk2DhE8TCQSCBAkiOBgA6qvs++37MurxV+j6aMUFzt4nscx/+czbKqPSMwk+de5/Ha65KMi\nn0QiQnV1DMex+NKXnObsICkDXn99MkceWajvAA4jLhaXyCV88/hqGovBfTbK+vXq+3ewSQpV5OVj\nMk/cxH/LaHNV93XXqdaeOuav0WgoKrJZtSrC0aubEAmJEUhM6XHcBmgqkUjp8cEHDs/+GPoP2MYX\npwo+KlTvldI/7B2f+jKOo+w6lmOxfLcFL8BEZvEcFSxgPHMoY6yIMQqHJdJmucz8XQwDhg5VHk/d\nsVunFn+N5jCTm2uxcWOMZe9WM0XOJUQSaYb4x3mSZNInmYwQvJzPIllKZKeHX26w7gH4qEhiGDnk\n5dm6/e9hwrYhJwficfX85mAWM7kFgMt4gZMHbOHnu3/FcsPC9zPvMwyaPfo72uj9cNEp8RdCHAv8\nATgN2Ap8U0r5fhvH+aiwJsA2KeXVnflcjaanM2KERekPLRaJKJeYDtfMsPGHwqKnHZ591ubqrSqW\nHMJHBvDyPRM55lcDGTfOpq7O0u1/DxPZefz5+XD6rQsggLRT8+ST7+N/jx1HJGJRW6veYxgwdixU\nVHTvv0tnV/4/AmJSyl8KIX6Uev7DNo77REpZ3MnP0mh6DWlRWTYdztqhXjMMi8pKlSFyEesJECQx\nSBBhwT+i/HPcIjcXqquhqUmFI7rbJmJvxLKUMVtDg0PirmKY/kLaop89oyTFux1+/3uLggKX4mLl\n8VNRYXX7v0lnxf9rkNrWhnmAQ9vir9FoWtF/vcvkp1Lunysi/MeoGL5v8e3TZvHwO5MxfR+fEOVU\n8oq0WFUOJ2xxyZ/tcKFUVtGhUPfaROyNvPyyS1NTKabpwRURXnjhO1xj/J49oyTbLutH7Q9szj3X\n5f77VQaXlBEKCmJA91Z/o5PvP1FKuTP18y7gxHaO6yeEqBFCLBdCjGvvZEKIstRxNXv27Onk0DSa\n7k39gkxoJ4yHuczhvPNcKuzbCMsEJpIQSYazhiBQVtGX31dKhf8TYpRi4TJhgl71HzJcF6ZNU48p\nXn7ZZfnyCiAO+ASBh3veUOyml7hnzy944IEYmzZZFBc7hMMepukTCnk0NDhddBEHzwFX/kKIF4EB\nbfzqx9lPpJRSCCHbOA5gkJTyXSHEYGCxEGK9lHJL64OklLOAWQAjRoxo71waTa8gf7yN90IEiUeC\nCEukzbe+Vc0HR/nwOMgABJIbqeIxI8olhkPE9zDwkXhcYjoMLLGY1kZhmKaDtOHj3FgATU2lFBfH\nMYyAZNJAyggbN9ps3mzx1lsWlZWqeGvdOptEIoJheJhm93DtPBAHFH8p5dj2fieE+LsQ4iQp5U4h\nxEnA7nbO8W7q8U0hhAOUAPuIv0bTlygss3hqS4yV9zksCWxcLL64p5q9X4Sdl8NJz4IhISx8fj7W\nIX+8jfxehETcwyfE5/1t/O42FzdlI9DWxm9vzwo6ZNfXysf57WoH54swcKCHEAG+b7B69Vjmz6/g\nttss6uvVBnB9vWq+Xl9vcdRRMU45pedUYnc27PM0cEPq5xuAP7c+QAhxjBAiJ/XzccBIoK6Tn6vR\n9ArG/criypemctSXLQwDFi6MkkjksPNSkBFIYkAkgl1hU1hm8fiEGI8yEZBMZDYL/VK+4LvNG7/Z\nuC5MmuRSVzeNSZNcFc1oI7TRU0kv1n/yE/XYqUtKezObJn4owk8X57Nu3TYSiRBBYOL7YXbuHMyp\np66nqWkao0a5lJerzy4vV28fOdJi0KCpPUL4ofMbvr8E/iiEuBl4G/gmgBBiBPAvUsrvAucCM4UQ\nAWqy+aWUUou/RpPCslRa4LJlsHmzxY9+tIRbbnFYODaf4R/Us6fApmm9Rb0D+SUW7wiHkPRVGmjK\nR2ZtxNpn4zfbRiKRiLD9fyrht+W9Jke0I01XDohlsb4yxpZHHVYemc+1U8oJhz2SSZMdO67i+OOf\n58orZ2EYAUFg8MknOQwZEmPDBqvHZlx1SvyllPVAaRuv1wDfTf38ClDYmc/RaHo72fnkDQ0Wt99u\nkUyqdE6xTD2mC4duHGbj1Wb2Ct4+zSY2f1/xSdtImKaPlB4jPlpwCNWy6zlQI5X90Tpc5Low5nsW\n8bjFdddN45Kwst84enXACefv4P2TfQwjQEowzYAg8Dj/fIdNm6xuZ9twsOgKX42mm5DW4VGjIJnM\nvC5TqQ9pczD5RYvL1sf4ku/gYLN6h8XtbZyvqMhmzRrlOxMKRTj+G+Phd8u6p9HMp+BAjVTaI3tv\nNxSCCRPU656nHmtrbY5cZzJsqo+RkPDYanZcBjvGCj4slPi+gWlGmDjR5uyze+5+ihZ/jaYb4ThK\n5LMxDPWaocL/RKNQjcWvZlpqJeq3XMRnVrUWJSWZhjL9cy2IFfaqHeBPY52QHS7yfZg5E8JhNREk\nEqop+4rpN3FhYiYikEgvycnPwoAXwjx/5/fJuyKvuRXjyJGfyWUdFrT4azTdiGwvGcOAO++EvLxM\nZkm2Zs+bt+8iftYsZSQWBOo8sZilSo3mO6ocs7sazRxq9pMGlA4XpaukpVSTwMSJmWPGlEShfC6y\nKa7abEowkj5XHZUHF089nFfymaHFX6PpRhxsKKOt41wXJk/OhIzicXi92sWa18eMgNrI2c++5vR3\nV10NVVVK+CMRuP56t0Wq5s7jJhD69W/Jd1OTRMjk8W02Z7q94yvU4q/RdDMOdnFeUOBy8skO27fb\nTJtmsW0b+zhLjsbpVZu8+yPd8ezEv24jJ+4hAp8g7rG0wiGnDa+dgQMzLRa/8x0X3y/lrbc8DCPC\nsGExjjqqhCNq1KpfGnDPCd/nZ7MtIvN6xxyqxV+j6YFkOnx5NDVFmD8/xmuvWZhmJjNoxgwYVGjD\nvKyUmPx8emNJcPb3UX+C4DwDjMDACyL8eJHNmmUZwU7fGMTjKjx2kXA54h8V9L8pzt6hKpPnqacc\n7OUwMGkgZIAMBMe/sxef3jOHavHXaHoA2SFsgNdecxg0yAOUl0xRkcPGjRZCqN+bpuoXm85fr1/g\ncGpxPkPKe0+efzYNDQ5BoL6PxgJY92s4enWICqeSV7ZamFmCne2K+kVc1TdhdRyxMWDt/Qa7h0S4\n7z6bOZvgxSBEBA8hJTcyl8eMKKvbqKnoiWjx12i6Oa1TE6WEs86yuffeCJGIRzIZobbWbt68BBXl\nSVf8lpZbDI/DT1+sYLCMI2Q3bCjbSfLybAwjQhA0IYTkw0JoOFcivHqMbWoy3LZNbYhXVWW+pyjV\n5NCEiSRIGEhnLHf/toING9T3UsVNlDETE0mOkeTnY9sOIfVEtPhrNN0Y11XVv+kQRToNdMMGi7vv\njlFc7LB6tWrwko0QSuyqq6GkyeUFWUoE1Qu4OWe0NyxfU+TmWgwbFmPXrmp27ZqLlEmEiHDZZTZH\nHw1z58Ls2erS0/siFi43iSoMqZqve0GYdwZXsOW5zHdZTZQbmKfcV3OUzUY3d2o+aLT4azTdlNax\naSGUeKUFrK7OYuNGq3kVm00QqPx1IWCKTFtHBwTC4K3BY/no7goKe8PyNYvcXIvcXIsBA6LNtQ2l\npRbvvqsyoHw/sx8iBIwVDiHfRwA+gv9mAmv3WtxwA9TVwdKlqndvKTEeHOdwwRS719wpgRZ/jabb\nki5GShd4QSYvXQj1elrMTDPzPPsOQUpwsPFI2UHICNE3K1hdbhEr7FVa1kxurkVdncX8+ermJp3X\nPzzuconhMPxOm015Flfm25BySU0QYX4oyoq5aqKIRGDKFJUJNH68xQWFZOJoveRL0+Kv0XRTsr1r\n0qv9tMBDZhI4+2wYPRpKSmDBAvjgBZfRKOuH5VjNq1cbh79h4wYtN0APhnQaZU+wK24rzf/VSpdz\nJpcS8j3EQxHGpTe7C2Nsr1bfy1AsXpmdyYrNy4OFC9s5YS+YALT4azTdlNbNw9OJOqaZsSaQEjZt\ngs2bVUXv/NtdLl80hrD08ESEUrkENzUBrAqrbCDTbyfk305VbHYaZToHPnsC6G49A7LtG+JxtWcy\na7BDOPAg8FWqT3V1c0HFIMsiCjTNyoTWWnw/h9Q+tPugxV+j6cZkF3wVFmYmgttua3lc2vTthLpq\nckQcISFHxLnvq9VsuMpqtoaAdoR6P6vb7DTKIFAtCtPi350WxelJKD9fjSW9V7JoEVxv2CzBJERq\nxpw7V5kkWZnrKC9Xx5umatDSfB2dsQ/txmjx12h6COmJYNo09tnkTa9WP/cVCBaBSIAMQ90J6vdT\np7Y8T2Ojy9tvZ4Vx9rO6zaRRevh+hO3bbQYNUufq7KL4UN01tJ6E5t/uEppfzTvboVpGecm3eFTc\nRJmYiZAS30uye3o1J12gPtxxrOb9FSGUj1KLL+zT2Id2c7T4azQ9jGzzN9OE738fhu51+fKuao5Z\nuYs3JocINfrsKQjzwJwodVXqfWVl6rHNMM5+Vre5uRamGaOqymHVKpstW6zmFX5nPfX3d9fQep9h\nfxNFehL6gu9yQ1M1V973KGaQAGACVYzBoZooE4x5CN/DlybHPDUX+XQSkRPhysoY/xmx2r8Oy8LF\nUp9P79B/Lf4aTQ9jn4UoLowZA/E4Ehhihlkw+mvseHoAR6e6ai9YkBF/FcZROf9BEKehwaGOqbx+\nQ4zROAyK2vuo29KlFo89ZuH7asJJr/A7uijOFvD93TVkT1BCmAhxE+XlUdata7tfsW3Dl0yX5/1S\ncmQThpSkip0Jk2CMcKjsN5U/fqGS45cu4CM+x9U8gwjUhxfWO8RiVrvX0Z3CW4cKLf4aTQ+khfnb\nNKe5E4kAhJ/knxY/i4HkFuZRSozx4zNm/8c8vYL6UwL2DgUI2LYtn8suA8+ziEQsYlGg1Sp7fyv8\ngzWiay2glZXtnzN7n0FKnyCYyT33zOMHP4ixebO1T3jJsmDeTQ79ZnrNRVvNkbFwiHNutnm1xKXg\ne+VIPJKYJAkhBCRkhL822Izbz3X0xj1fLf4aTU8npcwyHgcgwMAgIESAxGOMcCgszDiaHRVvYlgY\n1t4Pe4cavPlmfQthq65u2SsgFlMfc8MN6jFrn7RDtBbQ+vr27xry8mxy60xy1/g0FMPeoZJQyGP4\ncIe33trXW8d1Ydkum3IjQkg2gSmpvwC8Y8G48WaiX09tliTVhCIErBg2kadrB+JIm+XTLWYOydwd\ntfMV96o9Xy3+Gk1Px7JgyRKWfreaujpYTQkPUt7c4/dv2BzlgJWydxaBRCTg6NWChnNzOOMMu4Ww\nAfudDKLR/Q8nHavfvt1m6VKLUaOUT/6oUTaRVnH1tu4aXBder4bvzBUYCUEQlqy932Dv0AgXXGA3\nZzqlzUlBPXqexZPEuHZANRdOqeLDQp9kMsLu3VGi6YNSF2pGIvwhEuXBLK+G7NBYW19xb9vz1eKv\n0fQGLIvIHItyW7Ui/OR0uProBTy9dzxrdlrcZwPYyEgIGfeRYXh/mMGvf13J1VdbLYQNWoo9HHzI\no7XV9CuvVFJSUt7sk79oUYylS612BTQdGvp+k6P8eZAYSYPT3x6LcX0Ftm3tEz664YZM/93lWCzf\nZVEwJ0pxsUNtrc3rr1uceSZYrRS8YL0FKzKfPX78Ab/iXiH6abT4azS9BCuVsVlT41JYWA54TPCX\n8eV3C3EcC2yL0x6bwMfPz6SxRNJ4NuS+Wt+84s0WttaTQVVVJgfetttP0cyO1YdCHhdfvIBwOFMj\nMGiQw9Sp7Suo46gsprcG5eO/Y2BIiRHJ4Zh/qoBUbUHr8NGuXeq9X8TFxuE9kc+59Wt4fw/UknE4\ntSxaKHhZahgLFijhb2/V31vR4q/R9CIsC04+2eGtt5TgmqbH4sUOjz2msmQWLYoSv24eQZCxgr7j\njrbPk90QPt0nQAhYvz5TbZyd+eK6UFNjU1gYAdT5ly0bT1HRMgzDwzQj5OXZ7Y69sdHl2GMdrrgi\nn+jkcta97nPsWoMTv1VJ/6wZxrYzXkamCQMGqIYsi1LOpaYMkLtBLoUrplVx2+8cbLuN2JLjYOXb\n1NuW6n3Qx9Dir9H0MloXZa1aZTevkpcutbjtthg1D1fz4ZPwiyth3AFWvI6jzM6kVI8LFrRcedfU\nuCSTDnfcYZNIwOWX38BVV8Hu3VEWLrTYurWQ8893mDixfV+gxkaXVatKOeMMjzvuEBhGwEeFAR+e\nJwgNrqd/q+OFyDReP/poKDUdIkkvtckNhoQgAcfXJXjwQUeFfNKk4kYyHucsYbJh4MP87GdlLFnS\nu8I6B0KLv0bTy0h726c3XV97TXn6pD3+t/0BSn+RCurXzWP9kBjP1rcfh2+d6XJrscuFix0WGzYf\nF0FhYSnJpMcvfxlCSkko5JNIRHjttSjJpOo9sGmTxdlnw8iRbY953ToHKT1M00dKAylNkklBKLTv\n3YLjqH0NUOL/wAPwxztt5AMRpK88HQKhKpz3loQpKmr5fhwH6cURQUDYCPjp6EnUriikurp3NGk5\nWLT4azQ9jIOxREh72+/YkVklJ5Oqk9VxwqEg8DClj4x7OBXVeEXV1DwE/SuiFJa1PKllwe23w5/+\nBOUXuox7qJSvBXH+PWSy/rtfpREPIXxCIeUjbRgSKT22bXOQUp0rFNp/emRtrc2ZZ0aQUoWLHn64\nEsuqZ/LkrLuF1IVfmW/z74bVbFvt+7Apz2Lc3zIuePF31/D+MBhcGt3nbmN9vs05hqnCQ2FoLAko\n9hx6TZeWg0SLv0bTg+hopanjwIiEy8Vpi+fA4kVsphIhjIc0Qoy5Zg7nzkwiEuBPnguFLeMfT/3Q\nxZjucBw2xhvVBEJV0BrJgNw1z/De2aHUnoBa+avN3Qhr1tiAmnwmTICCgiw/oTpazGAjRqhwVEFB\nJkPnxz+G3NxWFx6PUygEqwdfRdnrU1iOmgRWrADXtrBSm8lr0hPkSS2/H9eF0ZMtvvH5h6mwJ9FY\nElB/Vg4bq2weeeQQ/ZF6CFr8NZoeREcrTb967CzuFJMx8fHIYSwx3JS//yXCYdjkbQzfOxMjASIA\nUIn9u6ZXs2MH5NolXH5fOVfiqYpYAkS6gtaE+kLJs89OYPfugdTV2Tz8MJxyisPf/mY398GVEiwr\nkwKaW2dSfJdAeEn8UITHJ8QYcj381385LFxo8/HHFqNGtXHhaZtO4LzXn2IJzzMGh+VYPPWU8t5P\nF6S1N0GmQ0ZrP1fII3nfhbeg7rkojzzSt0I+oMVfo+lRdKTStLHR5ahVk4jIJAIwjDhTRjh8q9Zi\nRdJilWkxp8Tl/cYqgrCH8CAIBMHMOZwok5wIJFeYGEhMAgRKeAUgBey8TOD7IQbOh+W7bdYKi6VL\nYepU1TrRMJRWXyRchj5TgbDj7B0acMJfA2iSICHwPd57spoTvjaPfv08xo6N8PzzMZ55xmLevCzh\nzs9vcW0C5dkTpRo7dVez0rOam22l54l4vOUEadtQVOTyq1+VEg57JBIRSkqifU74QYu/RtOj6Eil\naUODw/vDAgaGlcUzEZNxlTYPrYdJk9TdQ1mZxZlnOnyvaDo3rnwGEx8hg2ZTNAMfn7CSfhHCNCSB\n9InLEH/5+xVc/4O/MELO5hbmcZkRY9s2VYSVdh4dHnd5ISjliD/FCZ4JeGOSYMBflfCrAFGI94vI\nqgVoYvToauJxKClxWPyLfE4Lr+Gkv8xt4WMtAQPJTczBROIR4ZbTKjnjjHo++cQmCNQXEwQt5w3L\ngocfdkgkPAzDxzA8Ro926GvxftDir9H0OA620jQvz+btwhzW3h/nmLUGx1/zMP0ti3on0+vX81Q2\nzptcgMGfMaCFKVqCCLfzECeIes6+xSYahaqow+w3bOyVDmGeUQ1S8BiNwy9nZ1bssRjEKxyOeNFD\nBAFG0mDgysEY/psIAgIh+J0xgT/tjHJRsgrD8BFC8pWvPMrll1dx7GtJht8dQFwgybh0Skg1XYcQ\nPiYSiPOTiyfxzrES348wdGiMjRstDKOVNz9QVGSzdq1KhT1Q7UFvRou/RtNLaU75HOSQd71N/1xV\niXXdNoeFps1LWIRCqQrYpE0QFghfIk3Yey4k9h7Fv229j0dFGeEwOFHAgs1ft1g+XX1GujG8NCM4\n0ubsc1yKix2ee87m5z+3oMKGZSpOJSIRdlh3c9KycsKpZvJnXVvCQ3sctj31FU655s8YhsQ0k5gm\n5K9THkQGao+hOW2J9F2DuiOR+PimQeNwH9MMkNJj2DCHTZsscnL2DY1lp8L2hJ7EnxWdEn8hxDVA\nBXAucIGUsqad4y4HHgRMYI6U8ped+VyNRnNwpFM+geaMmUGex4tGhLvOj1Fws8WaNTBzpsXUL9xF\nef50Iu9D/qtA4kMqKUdKODFZT//1NlgWeXnqdOnG8GNw+PtZNh+E4f6sWPrLL8cYObJlnOqPjsWz\nopBR0qGefP7r9+Xk4OGHQtScFebDQp8gCGEYkvrCJIPCAcm4AWaI0PnFUFODCAJ8BFXczBORKNO/\n4vCbunyuPbOcUFKliq5bZzNiBAwfriqSa2rUpFRUZDd/J31V9NN0duW/Afg6MLO9A4QQJjADuBTY\nDqwUQjwtpazr5GdrNJqDIV0YsG1bc6qQ9D2OXOlQvt6ishL69QPXHccvxANEUh2wBJIIcWYwGSPw\nEbeawMPYdllzJ7Hlqebw55ku0esrCIfjmGYAxNmwoQKoUBOAZTFrFsRiLqd92+HpWpur6xxCgYfA\nx0xC5JWJvNQ4kMGDbV57Dd56y0GelM9pW+v5+Hybm2+GgrUqjUeGInzu5ijTohYXWBa+C889V8i2\nbQ5r1mQK22pq4JxzXO6/vxTP81izJkJJSazPCz90UvyllJsARNr4o20uAN6QUr6ZOvYJ4GuAFn+N\n5rMmuzDANCEUwg9UA5PF0m7hqx+vcIi8qDZ7VVhFEGBgksREIoMA/9ZJ9H+kkCVLLKqrYfVq+Phj\nl3vvLSUcjmMYAb5vYBgBZ5zxIh98sIwnnqjENOt5+ul87rqrvPnOoPrWSrytESAOIYPQ2BJy3iuj\nqQmmT4dEIiPQRg38di1889RKruy/gOc/Hs8t0Ux6ptoHsXBdq3memz1b7WsUFzuEw+nq4ZYN6Psy\nhyPm/3ngnazn24EL2zpQCFEGlAEMHDjwsx+ZRtPbyS4MAJg4ke0M5IYqm5W+1cJXv0V8PhRi9xUT\neHJrCRPXTsaQCTUpBAH/M8nhiqUWjzyi5paZM9PiGpBMGuzcOZhzGraQvy6gvrCJfG7jmLUBgy41\n+CAsm+Py4qJ6JopKfjp6Eh+c77NnUDnzHy5k0yarebhpggDOOMPluvvU5HFtYhk1NYUpwW+Z/ZQ2\nmZs3T92d1NbaJBKqergtu4i+ygHFXwjxIjCgjV/9WEr550M5GCnlLGAWwIgRI+QBDtdoNAeidWFA\nNMogy2JatI100VZ5pFuxuKsU1gIPMQmDAI8cFgc2RzgZoU0m8zmiFo5dJ2goDrFl5dcZ/sR0RAIG\nmeoewvDh9LDP6ukmjQUmA54R3Pbnp9h83sm8G5UIERBKehQVOWzc2Paq/JunVnPG/zSxd7jkvbM9\niosdXNdqs6Ar+1Ly8y1efz3WIuavOQjxl1KO7eRnvAucmvX8lNRrGo3ms6adwoDsFXK6I1YLv3vX\n5e/l0yj+xGYWrKyI2AAACk1JREFUZWwQhdjC4W/YrM6xuNdWp29sdOm//naK7vZVLYEZkJfnQFyo\nLJ2U/45A1Rrs/P1VfPI5KF38FLCCi16FTc+E2H212bxRO2yYy3nnOaxebVNXlxovLncvnEuOkMjH\nYe39JkOut/nNb9qveG6ZEmvRF3P598fhCPusBM4UQpyOEv1rgesOw+dqNBpotzCgXZ8g1yU5upQr\nEx5fJkIpMVZFLL50h8WRtVA5PnO6hgaH3DWJZnsIGSQ5bc+K5n2DNBLAhEv+4woS/7YAoPmYAS8N\n4MgLRiCdAfz7pes5/upyIM6NNxo8+eQMHnmkjNGBQ1gmMSTIpKBg9030y7V6ZW/dw4XRmTcLIf5J\nCLEdNaU+J4RYmHr9ZCHE8wBSyiQwGVgIbAL+KKXc2LlhazSaztKWTxDAiukOJDxC+ITxGIPDFVfA\nQw/Bjh0ur7wyjZdfdgFVSFZfGCYIK8sHyIhKWvyb7SAuh/qz6jnm5vEtfv/BkO2ccuNTDJozi6//\nZjJHbWgCAkwzyTe+MZlhw1yWGbaqKTBMRKQf/S5XjYTTNzb/+Z8HNrnTtKSz2T5PAk+28foO4CtZ\nz58Hnu/MZ2k0mkNL2udm6FCHjRvt5v64U56xWZgq3koQYZlpc94AGDIkndXj8cknER56KMaIERbP\n7XZ4dlQ1X961FGtjHQRK8BEgDVWXJcPw98siDMmzocxCAJ88di/bvrCF8F6ZunMIkL5qLL93qEw5\nhfo8+KDDSy9NZUt+jMJ6Zx9fi97WW/dwoSt8NZo+SkGBy69/rZw2gyBCEKjm6q9IVbxlo2L8g69V\nylpSkkmZTCY9Vqxw+OEPLSorLb53n8WCM1yemDyKghlJCMATYR4f8VXCe+HNgQP486NRZgxPpWeW\nleF9q5C/ry2l//o4QThAJAy8IIeZm2/ncv8BTNPHNHMoKrK5+GLQcftDixZ/jaaP0tDgAMrgLAg8\nZs92uOgiZYmwIm6xPGWO5j6uHDqLimykjDT3/129OlMnsGQJOI5F46il/H10NfEXYKmIshJV3BVs\nUufI3pDNtp/45Kx86v9Uzw1VNi+9arF4yjgefHDf7JyDaWSjOTi0+Gs0fZS8PBvfz3TPWrXK5uyz\nVey8ogJefLHZPp8ggPXrLTZtUimTd91ls3lzyzoBJcap1fnX4TSgaVbLc7RyZqauzsJxUi0kHyEr\nBdVq7rubFvz8/PYbx+sJoeNo8ddo+ii5uRb9+sWYPdth1SqbLVsyfXwrKmDZsowvvmEowR0xQony\njBkHFlzXVc3e035srR0209lGQ4a4bN7sEAQ2I0e2bKqSnZFkGGpzOu1Gmt6gTjX4wjBgxgwoO0BD\neo1Ci79G04cZOVIp7ZgxDmecQfNqu2WRlBLtbKFvt04gRVbXxWbhb+2w6ThK+O+/fwyhkEc8HqGx\ncUmLME92RlL6PEJk0jqzG3wFAUyeDIWF+g7gYNDir9H0YRobXXy/lEGDPHw/QmNjxvTMslRxVXtL\n/PbqBBobXV57zWHIENXK0TBg7Fh1N5F9CtuG996rJhyOpzJ74uzaVd1C/Fvn8VdW7jsRpTuGQcqe\n2tHifzBo8ddo+jANDQ5BkO6i1cr07ADd4l+vdvl+k8NimWmhWFCgevUOGuRx770R7r47xpYt1j7C\nD+p5IsE+Pj6tj9lf5zLLUqGeyZPVedry79e0jRZ/jaYPk5dnYxgqg8cwWpme7a9bvOvynbmlSOnx\nYyJ8xYxh21aLyaRfP4+77nI46yyr3Y3ZYcOi1NZWIWUCIcIMGBDdZ4wHyuMvK1OhHr3p2zG0+Gs0\nfZj9drXan3eC42AmlcgL4THvJodBlkVjY8vJ5NJL80kkpvHyyzaXXmrtcxORm2tRXOx0uquWLvTq\nOFr8NZo+TrtdrfYXc8maGMxIhEFRu/lc6ckkHM7njTfKCQK1nzBkSIwNG6x9biJ0V62uQYu/RqNp\nn/aW1PuZGNJi/vbb05pDQKbpcf75qq+uNmDrHmjx12g0n46siaGx0d0ndNN6P2HiRFVEpuPy3QMt\n/hqNplM0NqoMn7TIDxsWa179t95PGDmyq0erSaPFX6PRdIr9pYvqeH73pVN+/hqNRpMO74C5b7qo\nptuiV/4ajaZT7DddVNNt0eKv0Wg6jQ7v9Dx02Eej0Wj6IFr8NRqNpg+ixV+j0Wj6IFr8NRqNpg+i\nxV+j0Wj6IFr8NRqNpg8ipJRdPYY2EULsAd7+lG8/DvjHIRxOV9DTr6Gnjx96/jX09PFDz7+Grhj/\nICnl8Qc6qNuKf2cQQtRIKUd09Tg6Q0+/hp4+fuj519DTxw89/xq68/h12Eej0Wj6IFr8NRqNpg/S\nW8V/VlcP4BDQ06+hp48fev419PTxQ8+/hm47/l4Z89doNBrN/umtK3+NRqPR7IdeJ/5CiMuFEJuF\nEG8IIX7U1ePpKEKIKiHEbiHEhq4ey6dBCHGqEGKJEKJOCLFRCHFHV4+powgh+gkhVggh1qau4Wdd\nPaZPgxDCFEKsEUI829Vj+TQIIbYKIdYLIWqFEDVdPZ6OIoTIE0L8rxDi/4QQm4QQ3cr2tFeFfYQQ\nJvAacCmwHVgJfFtKWdelA+sAQohRwIdAtZTyvK4eT0cRQpwEnCSlXC2EOApYBYzrYX8DARwppfxQ\nCBEGXgLukFIu7+KhdQghxJ3ACOBoKeWVXT2ejiKE2AqMkFL2yDx/IcQ8YJmUco4QIgJ8TkrZ0NXj\nStPbVv4XAG9IKd+UUnrAE8DXunhMHUJKuRR4r6vH8WmRUu6UUq5O/fwBsAn4fNeOqmNIxYepp+HU\nfz1qlSSEOAX4KjCnq8fSFxFC5AKjgEcBpJRedxJ+6H3i/3ngnazn2+lhwtObEEKcBpQAr3btSDpO\nKmRSC+wGFkkpe9o1VAJTgKCrB9IJJPCCEGKVEKKsqwfTQU4H9gBzU6G3OUKII7t6UNn0NvHXdBOE\nEP2BBUC5lHJvV4+no0gpfSllMXAKcIEQoseE4IQQVwK7pZSrunosneRLUsrhwBXApFRItKcQAoYD\nj0gpS4CPgG61B9nbxP9d4NSs56ekXtMcRlJx8gXA41LKP3X1eDpD6lZ9CXB5V4+lA4wErk7FzJ8A\nLhFCPNa1Q+o4Usp3U4+7gSdRYd2ewnZge9Yd4/+iJoNuQ28T/5XAmUKI01MbLNcCT3fxmPoUqc3S\nR4FNUspfd/V4Pg1CiOOFEHmpn49AJRD8X9eO6uCRUk6VUp4ipTwN9W9gsZTy+i4eVocQQhyZShgg\nFS75MtBjMuCklLuAd4QQZ6deKgW6VdJDr2rgLqVMCiEmAwsBE6iSUm7s4mF1CCHE7wEbOE4IsR34\nqZTy0a4dVYcYCfwzsD4VMwf4Vynl8104po5yEjAvlT1mAH+UUvbIdMkezInAk2otQQiYL6X8a9cO\nqcPcDjyeWoi+CUzo4vG0oFelemo0Go3m4OhtYR+NRqPRHARa/DUajaYPosVfo9Fo+iBa/DUajaYP\nosVfo9Fo+iBa/DUajaYPosVfo9Fo+iBa/DUajaYP8v9oNISUbMrW3gAAAABJRU5ErkJggg==\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
@@ -313,7 +309,9 @@
"metadata": {
"id": "gD60bE8cXQId",
"colab_type": "code",
- "colab": {}
+ "colab": {
+ },
+ "outputId": "90d25fd8-bf3c-4a31-a275-0777fe3aa475"
},
"source": [
"# We'll use Keras to create a simple model architecture\n",
@@ -328,10 +326,32 @@
"model_1.add(layers.Dense(1))\n",
"\n",
"# Compile the model using a standard optimizer and loss function for regression\n",
- "model_1.compile(optimizer='rmsprop', loss='mse', metrics=['mae'])"
+ "model_1.compile(optimizer='rmsprop', loss='mse', metrics=['mae'])\n",
+ "\n",
+ "# Print a summary of the model's architecture\n",
+ "model_1.summary()"
],
"execution_count": 0,
- "outputs": []
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "Model: \"sequential\"\n",
+ "_________________________________________________________________\n",
+ "Layer (type) Output Shape Param # \n",
+ "=================================================================\n",
+ "dense (Dense) (None, 16) 32 \n",
+ "_________________________________________________________________\n",
+ "dense_1 (Dense) (None, 1) 17 \n",
+ "=================================================================\n",
+ "Total params: 49\n",
+ "Trainable params: 49\n",
+ "Non-trainable params: 0\n",
+ "_________________________________________________________________\n"
+ ],
+ "name": "stdout"
+ }
+ ]
},
{
"cell_type": "markdown",
@@ -356,7 +376,7 @@
"metadata": {
"id": "p8hQKr4cVOdE",
"colab_type": "code",
- "outputId": "3f1a7904-ffcd-4bb7-8bbb-bcd85a132128",
+ "outputId": "cab8f6d2-89fa-4bbc-f116-5e9ab633a9c8",
"colab": {
"base_uri": "https://localhost:8080/"
}
@@ -373,16 +393,14 @@
"text": [
"Train on 600 samples, validate on 200 samples\n",
"Epoch 1/1000\n",
- "600/600 [==============================] - 0s 412us/sample - loss: 0.5016 - mae: 0.6297 - val_loss: 0.4922 - val_mae: 0.6235\n",
+ "600/600 [==============================] - 1s 1ms/sample - loss: 0.7887 - mae: 0.7848 - val_loss: 0.5824 - val_mae: 0.6867\n",
"Epoch 2/1000\n",
- "600/600 [==============================] - 0s 105us/sample - loss: 0.3905 - mae: 0.5436 - val_loss: 0.4262 - val_mae: 0.5641\n",
- "...\n",
- "Epoch 998/1000\n",
- "600/600 [==============================] - 0s 109us/sample - loss: 0.1535 - mae: 0.3068 - val_loss: 0.1507 - val_mae: 0.3113\n",
+ "600/600 [==============================] - 0s 155us/sample - loss: 0.4883 - mae: 0.6194 - val_loss: 0.4742 - val_mae: 0.6056\n",
+ "...",
"Epoch 999/1000\n",
- "600/600 [==============================] - 0s 100us/sample - loss: 0.1545 - mae: 0.3077 - val_loss: 0.1499 - val_mae: 0.3103\n",
+ "600/600 [==============================] - 0s 149us/sample - loss: 0.1535 - mae: 0.3069 - val_loss: 0.1619 - val_mae: 0.3153\n",
"Epoch 1000/1000\n",
- "600/600 [==============================] - 0s 132us/sample - loss: 0.1530 - mae: 0.3045 - val_loss: 0.1542 - val_mae: 0.3143\n"
+ "600/600 [==============================] - 0s 124us/sample - loss: 0.1524 - mae: 0.3039 - val_loss: 0.1737 - val_mae: 0.3249\n"
],
"name": "stdout"
}
@@ -406,7 +424,7 @@
"metadata": {
"id": "CmvA-ksoln8r",
"colab_type": "code",
- "outputId": "1b834831-81e8-4548-dd8c-f5edf2c3ff43",
+ "outputId": "fdbc614f-f198-4d92-a393-5c6e034cb7a6",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 295
@@ -433,7 +451,7 @@
{
"output_type": "display_data",
"data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAEWCAYAAACXGLsWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi40LCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcv7US4rQAAIABJREFUeJzt3Xd8FHX6wPHPk5AQamhRWiBRUHqN\nYA6BIIjYQJTzQFHh9FB/Kp7lFMspopzlPAse56l32FCxIIoKogIRPKIUpRcJECDUEDoB0p7fHzNJ\nNstuNm0JhOf9eu0rM9/5zsx3djb7zLfsjKgqxhhjTFFCKroAxhhjTn0WLIwxxgRkwcIYY0xAFiyM\nMcYEZMHCGGNMQBYsjDHGBGTBwpwUIhIqIodFpFl55q1IItJCRMp97LmI9BORFI/5dSLSszh5S7Gv\n/4jII6Vdv4jtPi0ib5f3dk3FqVLRBTCnJhE57DFbHTgO5Ljzt6nq+yXZnqrmADXLO++ZQFXPL4/t\niMitwHBVTfDY9q3lsW1T+VmwMD6pav6XtXvlequqfu8vv4hUUdXsk1E2Y8zJZ81QplTcZoaPRORD\nETkEDBeReBH5SUT2i8gOEZkgImFu/ioioiIS485PdpfPFJFDIpIkIrElzesuv0xEfhORAyLyqoj8\nT0RG+Cl3ccp4m4gki8g+EZngsW6oiLwkIukishEYUMT786iITPFKmygiL7rTt4rIGvd4NrhX/f62\nlSoiCe50dRF5zy3bKqCrV97HRGSju91VIjLQTW8P/BPo6Tbx7fF4b8d6rH+7e+zpIvK5iDQqznsT\niIgMdsuzX0TmiMj5HsseEZHtInJQRNZ6HOuFIvKLm75LRP5e3P2ZIFBVe9mryBeQAvTzSnsayASu\nwrnoqAZcAHTHqbGeA/wG3OXmrwIoEOPOTwb2AHFAGPARMLkUec8CDgGD3GX3AVnACD/HUpwyfgFE\nAjHA3rxjB+4CVgFNgfrAPOdfyOd+zgEOAzU8tr0biHPnr3LzCHAxcBTo4C7rB6R4bCsVSHCnXwAS\ngbpAc2C1V97rgEbuObneLcPZ7rJbgUSvck4GxrrT/d0ydgIigH8Bc4rz3vg4/qeBt93p1m45LnbP\n0SPAOne6LbAZaOjmjQXOcacXAcPc6VpA94r+XziTX1azMGXxo6p+qaq5qnpUVRep6s+qmq2qG4E3\ngN5FrP+pqi5W1SzgfZwvqZLmvRJYqqpfuMtewgksPhWzjM+o6gFVTcH5Ys7b13XAS6qaqqrpwLNF\n7GcjsBIniAFcAuxT1cXu8i9VdaM65gCzAZ+d2F6uA55W1X2quhmntuC5349VdYd7Tj7ACfRxxdgu\nwA3Af1R1qaoeA8YAvUWkqUcef+9NUYYC01V1jnuOnsUJON2BbJzA1NZtytzkvnfgBP2WIlJfVQ+p\n6s/FPA4TBBYsTFls9ZwRkVYi8rWI7BSRg8A4oEER6+/0mM6g6E5tf3kbe5ZDVRXnStynYpaxWPvC\nuSIuygfAMHf6enc+rxxXisjPIrJXRPbjXNUX9V7laVRUGURkhIgsc5t79gOtirldcI4vf3uqehDY\nBzTxyFOSc+Zvu7k456iJqq4D7sc5D7vdZs2GbtaRQBtgnYgsFJHLi3kcJggsWJiy8B42+jrO1XQL\nVa0NPI7TzBJMO3CahQAQEaHwl5u3spRxBxDtMR9oaO/HQD8RaYJTw/jALWM14FPgGZwmojrAt8Us\nx05/ZRCRc4DXgDuA+u5213psN9Aw3+04TVt526uF09y1rRjlKsl2Q3DO2TYAVZ2sqj1wmqBCcd4X\nVHWdqg7FaWr8BzBVRCLKWBZTShYsTHmqBRwAjohIa+C2k7DPr4AuInKViFQB7gGiglTGj4E/i0gT\nEakPPFRUZlXdCfwIvA2sU9X17qKqQDiQBuSIyJVA3xKU4RERqSPO71Du8lhWEycgpOHEzT/h1Czy\n7AKa5nXo+/AhcIuIdBCRqjhf2vNV1W9NrQRlHigiCe6+/4LTz/SziLQWkT7u/o66r1ycA7hRRBq4\nNZED7rHllrEsppQsWJjydD9wM84Xwes4HdFBpaq7gD8ALwLpwLnArzi/CynvMr6G07ewAqfz9dNi\nrPMBTod1fhOUqu4H7gWm4XQSD8EJesXxBE4NJwWYCbzrsd3lwKvAQjfP+YBnO/93wHpgl4h4Nifl\nrf8NTnPQNHf9Zjj9GGWiqqtw3vPXcALZAGCg239RFXgep59pJ05N5lF31cuBNeKMtnsB+IOqZpa1\nPKZ0xGniNaZyEJFQnGaPIao6v6LLY0xlYTULc9oTkQFus0xV4K84o2gWVnCxjKlULFiYyuAiYCNO\nE8elwGBV9dcMZYwpBWuGMsYYE5DVLIwxxgRUaW4k2KBBA42JianoYhhjzGllyZIle1S1qOHmQCUK\nFjExMSxevLiii2GMMacVEQl0JwLAmqGMMcYUgwULY4wxAVmwMMYYE1Cl6bMwxpxcWVlZpKamcuzY\nsYouiimGiIgImjZtSliYv1uDFc2ChTGmVFJTU6lVqxYxMTE4N/s1pypVJT09ndTUVGJjYwOv4IM1\nQxljSuXYsWPUr1/fAsVpQESoX79+mWqBQQ0W7j171rnP7B3jY/kIEUkTkaXu61aPZTeLyHr3dXMw\ny5m0NYln5j9D0takYO7GmErHAsXpo6znKmjNUO7dPyfiPE4yFVgkItNVdbVX1o9U9S6vdevh3Io5\nDuce9kvcdfeVdzmTtibR992+ZOZkEh4azuybZhMfHV/euzHGmNNaMGsW3YBk9znDmcAUCp5HHMil\nwHequtcNEN/h3AO/3CWmJJKZk0mO5pCZk0liSmIwdmOMKWfp6el06tSJTp060bBhQ5o0aZI/n5lZ\nvMdejBw5knXr1hWZZ+LEibz//vvlUWQuuugili5dWi7bOtmC2cHdhMLPCk7FeUC7t2tFpBfwG3Cv\nqm71s+4Jj8oUkVHAKIBmzQI94dK3hJgEwkPD82sWCTEJpdqOMebkql+/fv4X79ixY6lZsyYPPPBA\noTyqiqoSEuL7uvitt94KuJ8777yz7IWtBCq6g/tLIEZVO+DUHt4pycqq+oaqxqlqXFRUwFub+BQf\nHc/sm2bzVJ+nrAnKmCA7Gf2DycnJtGnThhtuuIG2bduyY8cORo0aRVxcHG3btmXcuHH5efOu9LOz\ns6lTpw5jxoyhY8eOxMfHs3v3bgAee+wxXn755fz8Y8aMoVu3bpx//vksWLAAgCNHjnDttdfSpk0b\nhgwZQlxcXMAaxOTJk2nfvj3t2rXjkUceASA7O5sbb7wxP33ChAkAvPTSS7Rp04YOHTowfPjwcn/P\niiOYNYttFH6wfP4D2vOoarrH7H9wHq+Yt26C17qJ5V5CV3x0vAUJY4LsZPYPrl27lnfffZe4uDgA\nnn32WerVq0d2djZ9+vRhyJAhtGnTptA6Bw4coHfv3jz77LPcd999TJo0iTFjThiXg6qycOFCpk+f\nzrhx4/jmm2949dVXadiwIVOnTmXZsmV06dKlyPKlpqby2GOPsXjxYiIjI+nXrx9fffUVUVFR7Nmz\nhxUrVgCwf/9+AJ5//nk2b95MeHh4ftrJFsyaxSKgpYjEikg4MBSY7plBRBp5zA4E1rjTs4D+IlJX\nROoC/d20cpeZCfPmwbZtgfMaY0rvZPYPnnvuufmBAuDDDz+kS5cudOnShTVr1rB6tfc4G6hWrRqX\nXXYZAF27diUlJcXntq+55poT8vz4448MHToUgI4dO9K2bdsiy/fzzz9z8cUX06BBA8LCwrj++uuZ\nN28eLVq0YN26dYwePZpZs2YRGRkJQNu2bRk+fDjvv/9+qX9UV1ZBCxaqmg3chfMlvwb4WFVXicg4\nERnoZhstIqtEZBkwGhjhrrsXeAon4CwCxrlp5W7/fujdGz7/PBhbN8bkyesfDJXQoPcP1qhRI396\n/fr1vPLKK8yZM4fly5czYMAAn783CA8Pz58ODQ0lOzvb57arVq0aME9p1a9fn+XLl9OzZ08mTpzI\nbbfdBsCsWbO4/fbbWbRoEd26dSMnJ6dc91scQf0Ft6rOAGZ4pT3uMf0w8LCfdScBk4JZPoDQUOdv\nbm6w92TMmS2vfzAxJZGEmIST1vR78OBBatWqRe3atdmxYwezZs1iwIDyHVzZo0cPPv74Y3r27MmK\nFSt81lw8de/enQceeID09HQiIyOZMmUKDzzwAGlpaURERPD73/+eli1bcuutt5KTk0NqaioXX3wx\nF110EdHR0WRkZFCrVq1yPYZAzvjbfeQNkqiAQG3MGaci+ge7dOlCmzZtaNWqFc2bN6dHjx7lvo+7\n776bm266iTZt2uS/8pqQfGnatClPPfUUCQkJqCpXXXUVV1xxBb/88gu33HILqoqI8Nxzz5Gdnc31\n11/PoUOHyM3N5YEHHjjpgQIq0TO44+LitDQPPzp4ECIj4R//gPvuC0LBjKmk1qxZQ+vWrSu6GKeE\n7OxssrOziYiIYP369fTv35/169dTpcqpdT3u65yJyBJVjfOzSr5T60gqgNUsjDFldfjwYfr27Ut2\ndjaqyuuvv37KBYqyqlxHUwrWZ2GMKas6deqwZMmSii5GUFX0j/IqnNUsjDEmsDM+WFjNwhhjAjvj\ng4XVLIwxJjALFu47YDULY4zx74wPFuAEDKtZGHN66dOnD7NmFb4L0Msvv8wdd9xR5Ho1a9YEYPv2\n7QwZMsRnnoSEBAINxX/55ZfJyMjIn7/88svL5b5NY8eO5YUXXijzdsqbBQucfgurWRhzehk2bBhT\npkwplDZlyhSGDRtWrPUbN27Mp59+Wur9eweLGTNmUKdOnVJv71RnwQKrWRhzOhoyZAhff/11/oOO\nUlJS2L59Oz179sz/3UOXLl1o3749X3zxxQnrp6Sk0K5dOwCOHj3K0KFDad26NYMHD+bo0aP5+e64\n447825s/8cQTAEyYMIHt27fTp08f+vTpA0BMTAx79uwB4MUXX6Rdu3a0a9cu//bmKSkptG7dmj/9\n6U+0bduW/v37F9qPL0uXLuXCCy+kQ4cODB48mH379uXvP++W5Xk3MPzhhx/yH/7UuXNnDh06VOr3\n1pcz/ncWYDULY8rqz3+G8n4AXKdO4H7P+lSvXj26devGzJkzGTRoEFOmTOG6665DRIiIiGDatGnU\nrl2bPXv2cOGFFzJw4EC/z6F+7bXXqF69OmvWrGH58uWFbjE+fvx46tWrR05ODn379mX58uWMHj2a\nF198kblz59KgQYNC21qyZAlvvfUWP//8M6pK9+7d6d27N3Xr1mX9+vV8+OGHvPnmm1x33XVMnTq1\nyOdT3HTTTbz66qv07t2bxx9/nCeffJKXX36ZZ599lk2bNlG1atX8pq8XXniBiRMn0qNHDw4fPkxE\nREQJ3u3ArGaB1SyMOV15NkV5NkGpKo888ggdOnSgX79+bNu2jV27dvndzrx58/K/tDt06ECHDh3y\nl3388cd06dKFzp07s2rVqoA3Cfzxxx8ZPHgwNWrUoGbNmlxzzTXMnz8fgNjYWDp16gQUfRt0cJ6v\nsX//fnr37g3AzTffzLx58/LLeMMNNzB58uT8X4r36NGD++67jwkTJrB///5y/wW51SywmoUxZVVU\nDSCYBg0axL333ssvv/xCRkYGXbt2BeD9998nLS2NJUuWEBYWRkxMjM/bkgeyadMmXnjhBRYtWkTd\nunUZMWJEqbaTJ+/25uDc4jxQM5Q/X3/9NfPmzePLL79k/PjxrFixgjFjxnDFFVcwY8YMevTowaxZ\ns2jVqlWpy+rNahZYzcKY01XNmjXp06cPf/zjHwt1bB84cICzzjqLsLAw5s6dy+bNm4vcTq9evfjg\ngw8AWLlyJcuXLwec25vXqFGDyMhIdu3axcyZM/PXqVWrls9+gZ49e/L555+TkZHBkSNHmDZtGj17\n9izxsUVGRlK3bt38Wsl7771H7969yc3NZevWrfTp04fnnnuOAwcOcPjwYTZs2ED79u156KGHuOCC\nC1i7dm2J91kUq1ng1CwsWBhzeho2bBiDBw8uNDLqhhtu4KqrrqJ9+/bExcUFvMK+4447GDlyJK1b\nt6Z169b5NZSOHTvSuXNnWrVqRXR0dKHbm48aNYoBAwbQuHFj5s6dm5/epUsXRowYQbdu3QC49dZb\n6dy5c5FNTv6888473H777WRkZHDOOefw1ltvkZOTw/Dhwzlw4ACqyujRo6lTpw5//etfmTt3LiEh\nIbRt2zb/qX/l5Yy/RTnA2WfD4MHw73+Xc6GMqcTsFuWnn7LcotyaobCahTHGBGLBAuvgNsaYQIIa\nLERkgIisE5FkERlTRL5rRURFJM6djxGRoyKy1H0FtYHIOriNKZ3K0ox9JijruQpaB7eIhAITgUuA\nVGCRiExX1dVe+WoB9wA/e21ig6p2Clb5PFnNwpiSi4iIID09nfr16/v9sZs5Nagq6enpZfqhXjBH\nQ3UDklV1I4CITAEGAd6/aHkKeA74SxDLUiSrWRhTck2bNiU1NZW0tLSKLoophoiICJo2bVrq9YMZ\nLJoAWz3mU4HunhlEpAsQrapfi4h3sIgVkV+Bg8BjqjrfewciMgoYBdCsWbNSF9RqFsaUXFhYGLGx\nsRVdDHOSVFgHt4iEAC8C9/tYvANopqqdgfuAD0SktncmVX1DVeNUNS4qKqrUZbGahTHGFC2YwWIb\nEO0x39RNy1MLaAckikgKcCEwXUTiVPW4qqYDqOoSYANwXrAKajULY4wpWjCDxSKgpYjEikg4MBSY\nnrdQVQ+oagNVjVHVGOAnYKCqLhaRKLeDHBE5B2gJbAxWQa1mYYwxRQtan4WqZovIXcAsIBSYpKqr\nRGQcsFhVpxexei9gnIhkAbnA7aq6N1hltZqFMcYULaj3hlLVGcAMr7TH/eRN8JieCkwNZtk8Wc3C\nGGOKZr/gxmoWxhgTiAULrGZhjDGBWLDAahbGGBOIBQusZmGMMYFYsMBqFsYYE4gFC6xmYYwxgViw\nwGoWxhgTiAULrGZhjDGBWLDAahbGGBOIBQusZmGMMYFYsMBqFsYYE4gFC6xmYYwxgViwwKlZWLAw\nxhj/LFjg1CysGcoYY/yzYIHVLIwxJhALFlgHtzHGBGLBAuvgNsaYQCxYYDULY4wJxIIFVrMwxphA\nghosRGSAiKwTkWQRGVNEvmtFREUkziPtYXe9dSJyaTDLaTULY4wpWpVgbVhEQoGJwCVAKrBIRKar\n6mqvfLWAe4CfPdLaAEOBtkBj4HsROU9Vg3L9bzULY4wpWjBrFt2AZFXdqKqZwBRgkI98TwHPAcc8\n0gYBU1T1uKpuApLd7QWF1SyMMaZowQwWTYCtHvOpblo+EekCRKvq1yVd111/lIgsFpHFaWlppS6o\n1SyMMaZoFdbBLSIhwIvA/aXdhqq+oapxqhoXFRVV6rJYzcIYY4oWtD4LYBsQ7THf1E3LUwtoBySK\nCEBDYLqIDCzGuuXKahbGGFO0YNYsFgEtRSRWRMJxOqyn5y1U1QOq2kBVY1Q1BvgJGKiqi918Q0Wk\nqojEAi2BhcEq6K6M7RzPziRpa1KwdmGMMae1oAULVc0G7gJmAWuAj1V1lYiMc2sPRa27CvgYWA18\nA9wZrJFQSVuT+Hj1h2Rl59L33b4WMIwxxodgNkOhqjOAGV5pj/vJm+A1Px4YH7TCuRJTEslBIDeE\nzJxMElMSiY+OD/ZujTHmtHLG/4I7ISaB0BABDSU8NJyEmISKLpIxxpxyzvhgER8dz02drgcNZfZN\ns61WYYwxPpzxwQKgWV1n4FX3JhYojDHGFwsWOL+zAPuthTHG+GPBgoJgkZ1dseUwxphTlQULoIo7\nJsx+mGeMMb5ZsKAgWFjNwhhjfLNggQULY4wJxIIFFiyMMSYQCxZYsDDGmEAsWGDBwhhjArFggQUL\nY4wJxIIFFiyMMSYQCxZYsDDGmEAsWGDBwhhjArFggQULY4wJxIIFFiyMMSYQCxZYsDDGmEAsWGDB\nwhhjAglqsBCRASKyTkSSRWSMj+W3i8gKEVkqIj+KSBs3PUZEjrrpS0Xk38EspwULY4wpWpVgbVhE\nQoGJwCVAKrBIRKar6mqPbB+o6r/d/AOBF4EB7rINqtopWOXzZMHCGGOKFsyaRTcgWVU3qmomMAUY\n5JlBVQ96zNYANIjl8Wt1+nIAVuxYUxG7N8aYU14wg0UTYKvHfKqbVoiI3CkiG4DngdEei2JF5FcR\n+UFEevragYiMEpHFIrI4LS2tVIVM2prEnTNvA+DR758gaWtSqbZjjDGVWYV3cKvqRFU9F3gIeMxN\n3gE0U9XOwH3AByJS28e6b6hqnKrGRUVFlWr/iSmJZHEUgOxsJTElsVTbMcaYyiyYwWIbEO0x39RN\n82cKcDWAqh5X1XR3egmwATgvGIVMiEkgrIoAEEoECTEJwdiNMcac1oIZLBYBLUUkVkTCgaHAdM8M\nItLSY/YKYL2bHuV2kCMi5wAtgY3BKGR8dDzvXjsJgEd+9zjx0fHB2I0xxpzWgjYaSlWzReQuYBYQ\nCkxS1VUiMg5YrKrTgbtEpB+QBewDbnZX7wWME5EsIBe4XVX3BqusF0R3BiA2smWAnMYYc2YKWrAA\nUNUZwAyvtMc9pu/xs95UYGowy+bJhs4aY0zRitUMJSLnikhVdzpBREaLSJ3gFu3ksWBhjDFFK26f\nxVQgR0RaAG/gdFx/ELRSnWQWLIwxpmjFDRa5qpoNDAZeVdW/AI2CV6yTy4KFMcYUrbjBIktEhuF0\nQH/lpoUFp0gnnwULY4wpWnGDxUggHhivqptEJBZ4L3jFOrksWBhjTNGKNRrKvfnfaAARqQvUUtXn\nglmwkykvWGRlVWw5jDHmVFXc0VCJIlJbROoBvwBvisiLwS3ayRMa6vy1moUxxvhW3GaoSPcOsdcA\n76pqd6Bf8Ip1coWEOC8LFsYY41txg0UVEWkEXEdBB3elUqWKBQtjjPGnuMFiHM5tOzao6iL3fk3r\ng1esk8+ChTHG+FfcDu5PgE885jcC1warUBUhLMyChTHG+FPcDu6mIjJNRHa7r6ki0jTYhTuZrGZh\njDH+FbcZ6i2c24s3dl9fummVhgULY4zxr7jBIkpV31LVbPf1NlC6R9OdoixYGGOMf8UNFukiMlxE\nQt3XcCA9mAU72XLkGL9uW27P4DbGGB+KGyz+iDNsdifO87GHACOCVKaTLmlrErsytvHr9hX0fbev\nBQxjjPFSrGChqptVdaCqRqnqWap6NZVoNFRiSiIakoXmhJKZk0liSmJFF8kYY04pZXkG933lVooK\nlhCTgITkgIYRHhpOQkxCRRfJGGNOKWV5rKqUWykqWHx0PC0aHKFKvTD+e9Ns4qPjK7pIxhhzSilL\nzUIDZRCRASKyTkSSRWSMj+W3i8gKEVkqIj+KSBuPZQ+7660TkUvLUM5iqV2tBudEnmeBwhhjfCiy\nZiEih/AdFASoFmDdUGAicAmQCiwSkenu7c7zfKCq/3bzDwReBAa4QWMo0Bbndx3fi8h5qppTvMMq\nORs6a4wx/hUZLFS1Vhm23Q1Idm8NgohMAQYB+cHCvZNtnhoUBKZBwBRVPQ5sEpFkd3tBG6ZkwcIY\nY/wrS59FIE2ArR7zqUB370wicidOZ3k4cLHHuj95rdvEx7qjgFEAzZo1K1NhLVgYY4x/ZemzKBeq\nOlFVzwUeAh4r4bpvqGqcqsZFRZXtB+UWLIwxxr9gBottQLTHfFM3zZ8pwNWlXLfMLFgYY4x/wQwW\ni4CWIhIrIuE4HdbTPTOISEuP2SsoeEbGdGCoiFQVkVigJbAwiGW1YGGMMUUIWp+FqmaLyF04D00K\nBSap6ioRGQcsVtXpwF0i0g/IAvYBN7vrrhKRj3E6w7OBO4M5EgosWBhjTFGC2cGNqs4AZnilPe4x\nfU8R644HxgevdIVZsDDGGP8qvIP7VGHBwhhj/LNg4bJgYYwx/lmwcFmwMMYY/yxYuPYe38W+jIP2\nLAtjjPHBggXOw49mbPiSg0cz7OFHxhjjgwULnIcf5cpxyKliDz8yxhgfLFjgPPwoJFQh1x5+ZIwx\nvliwwHn40R86XEMY1ZltDz8yxpgTBPVHeaeTmPqN0RwsUBhjjA9Ws3CFhTlDZ3NzK7okxhhz6rFg\n4QoPd/5mZVVsOYwx5lRkwcJlwcIYY/yzYOEKC3P+ZmZWbDmMMeZUZMHCZTULY4zxz4KFy2oWxhjj\nnwULV17NwoKFMcacyIKFy5qhjDHGPwsWLmuGMsYY/yxYuDYeWAvAkq3LK7gkxhhz6glqsBCRASKy\nTkSSRWSMj+X3ichqEVkuIrNFpLnHshwRWeq+pgeznElbk3jsh4cAuH36aLtFuTHGeAlasBCRUGAi\ncBnQBhgmIm28sv0KxKlqB+BT4HmPZUdVtZP7GhiscoJzi/JsyQAgOzPUblFujDFeglmz6AYkq+pG\nVc0EpgCDPDOo6lxVzXBnfwKaBrE8fiXEJFClqvNM1Sq5Ne0W5cYY4yWYwaIJsNVjPtVN8+cWYKbH\nfISILBaRn0Tkal8riMgoN8/itLS0Uhc0Pjqe169+GYAnL3re7jxrjDFeTolblIvIcCAO6O2R3FxV\nt4nIOcAcEVmhqhs811PVN4A3AOLi4rQsZbgwtqOz05rnl2UzxhhTKQWzZrENiPaYb+qmFSIi/YBH\ngYGqejwvXVW3uX83AolA5yCWlWrVnL9HjwZzL8YYc3oKZrBYBLQUkVgRCQeGAoVGNYlIZ+B1nECx\n2yO9rohUdacbAD2A1UEsqwULY4wpQtCaoVQ1W0TuAmYBocAkVV0lIuOAxao6Hfg7UBP4REQAtrgj\nn1oDr4tILk5Ae1ZVLVgYY0wFCWqfharOAGZ4pT3uMd3Pz3oLgPbBLJs3CxbGGOOf/YLbVaWK87Jg\nYYwxJ7Jg4aF6dThypKJLYYwxpx4LFq6krUlI1YMk79gdOLMxxpxhLFjgBIq+7/blgGzhm1UL7N5Q\nxhjjxYIFzr2hMnMyIfwgucdq2b2hjDHGiwULnHtDhYeGQ8RB5Hik3RvKGGO8WLDAuTfU7Jtm0z66\nGdFV29q9oYwxxosFC1d8dDxdm7chN7NaRRfFGGNOORYsPFSrZr+zMMYYXyxYeKhe3YKFMcb4YsHC\nQ3pWKkePKgu22NBZY4zxZMEY0jgoAAAd/klEQVTClbQ1icmr30RV6DvpcvuthTHGeLBg4UpMSSQn\n9DAAmcftOdzGGOPJgoXLeQ53FgDhufZbC2OM8WTBwhUfHc+YhLsBePfKT+y3FsYY48GChYdOzVsC\n0LJWlwouiTHGnFosWHjYkb0KgPlrVlVwSYwx5tRiwcKVtDWJ++ePAOCB6X+z0VDGGOPBgoUrMSWR\nrKo7Acg6HGmjoYwxxkNQg4WIDBCRdSKSLCJjfCy/T0RWi8hyEZktIs09lt0sIuvd183BLCe4d56t\ndQiA0GNn2WgoY4zxELRgISKhwETgMqANMExE2nhl+xWIU9UOwKfA8+669YAngO5AN+AJEakbrLKC\nMxpqzh9nEl4tkyGxo2w0lDHGeAhmzaIbkKyqG1U1E5gCDPLMoKpzVTXDnf0JaOpOXwp8p6p7VXUf\n8B0wIIhlzVet1lEO7qtyMnZljDGnjWAGiybAVo/5VDfNn1uAmSVZV0RGichiEVmclpZWpsLmP1o1\nZCPfrFhsHdzGGOPhlOjgFpHhQBzw95Ksp6pvqGqcqsZFRUWVqQz5j1atlk5uRh3r4DbGGA/BDBbb\ngGiP+aZuWiEi0g94FBioqsdLsm55ynu0qlTfixytbx3cxhjjIZjBYhHQUkRiRSQcGApM98wgIp2B\n13ECxW6PRbOA/iJS1+3Y7u+mBU3eo1VbN2tI1czGwdyVMcacdoIWLFQ1G7gL50t+DfCxqq4SkXEi\nMtDN9negJvCJiCwVkenuunuBp3ACziJgnJsWdL9lJHHsUHUufruf9VsYY4wrqMN+VHUGMMMr7XGP\n6X5FrDsJmBS80p0oMSWRnGppoKFkHq5FYkqiDaE1xhhOkQ7uU0VCTAKhjVYCELKju/VbGGOMy4KF\nl5Cz1jgTe2MrtiDGGHMKsWDhoeD+ULlk743myyWLKrpIxhhzSrBg4aF+9fpoSDZU2wtJ9/PM4NEV\nXSRjjDklWLDwkJ6RToiEQPU9FV0UU0mtXQs33gjZ2RVdEmNKxoKFh4SYBKqGVoWGy/PTcnOdvxkZ\nMGkSqFZQ4UylcOONMHky/PprRZfEmJKxYOEhPjqelwe8TEjbqflpc9YuBGDMGLjlFpgV1J8GmsrO\nLjbM6cqChZdfd/xKbqtP8ufvfGQLANu3O/OHDlVEqYwxpmJZsPCy8/BOCFE4/wsAfvtiSKFfcn/i\nxpHcXEhOrogSGlMgIwOeeebM6gP54gtISSn/7ebklP82i+Nf/4LvvquYfZeEBQsvDWs2dCa6vpGf\n9tbnG1i3zpn+5BNYuRKeegpatoT16wuvn5EB48ZBZmbJ9vvee3DNNWUo+Blq1Sr46KPSr//llzB1\nauB8p6onn4RHHnE+PyXx8cfwpz8Fp0zBdvXV0LVr+W5zwQKoUgXeead8t1scd94J/fuf/P2WlAUL\nLzd1vIkQQqD+uvy0N0cPZ+XKgjy//QbffutM79pVkL5smdOv8cQT8N//+t7+3LkgAnu8BlzddBNM\nm1ZOB3EGadcOhg4tmN/itBqSng779wdef+BAGDIkOGU7Gfbtc/4eP150Pm9/+AP85z/lX55gyxtw\nsrcc7xT34Yfw9tvO9Pffl992KxsLFl7io+Np1aAV1N3oN8+11zpXIgChoc7fnBzo1AmmTHHmDx/2\nve7zzzt/F/n5vd/IkU7NZPdu38v9OXgQGjSAOXNKtl5FUg3cnLB/f/H7iWbPhubNndpfgwZQv/6J\n+1u7tvjlGzMGbr+9+PlLIiMjcB5vubnw7LNw4EBBWl7TSZVK+HDHl15yLqyOHi1IK2mNvTiuvx7e\nfNOZFin/7VcWFix8OK/+eU6/xY1+73OY73//g88+O/Gf1V/7Z96H0d+omLffdmomt93mzE+bBhdd\nVHBF5c+yZc7V9F//GrDIJ0VyMmRlFZ3nX/+C2Niih5HWrQtNinq+ois7G5YscabzmqW837N33oHW\nrZ2/IvDHP/re1ldfOV9Kzz0Hr78eeN9563z9dfHyAiQkFPR/FdeMGfDww3DffQVpeZ+zvIsWEadZ\no7iOHIE+fZzmvLJIT3fOU945KI6UlKKDZt6FlWcNMRjBorzs2FHRJQguCxY+PNjjQQSB5vMC5v3L\nX+Cep1afkO4vWIS473jeF5mqcwXlLe/q8ZprnIDkr6aSx7OGA87VeKB1fMnIgEsvhTVrSr4uwPLl\nThBo2RIeeKDovImJzt9AAwWKU7M4erTgPfXXB5H3hfivfzl/33rrxDw//ABXXVU46L77btH7zsx0\n1rnyysJXwYF8/nnx8wIcO+b89fzy9A4WUHB8xTFnjnMeAp2r4mxn+3ans91Tdrb/C6PYWBg0yAkw\nG92K/OrVTsCb4XGvas+g79nc5mu7WVmBL6yK4mubqvDgg0Vf1CxeDI0bFzRn5dm/P/Dn53RhwcKH\n+Oh4/vfH/1G/Zm24rXPA/KnbTvx0PjnnKeo9V4/oF6Op9bdaVB11MZH3JPDD5kQAJv78GoOnDKbf\nC/cUulLMM3du4fkjR3zvW9XpP8n7kOd9edSu7f+KPCcH7r0XfvnlxGXz5zvbu/tu3+sG0rFjwZVt\noN+k5JXZs+q/ahX89FPJf4+QkRH4S6JOHeevry/08ePhm28g71HueVe1AJ9+WjD9wgtOTdLTgw8W\nTD/0UMH01Klwxx3w5z/DaB93jinuMf7yi/MebXWfSv/ZZ05t4NChwsGiNFfdJe3r8LZoUeHOde9j\nCgvz/VnKq3V+/z3ExcG55zp9gR9+6KR79t95ltFzev78E7cbHu6/xpiW5pyrko4cO3wY/v536N3b\nf568CxHvZuARI+Dmm0+8+Jowwemo93VRGRLi9H2Cs94HH5SsvEGjqpXi1bVrVy1vC7YsUBkryoN1\n1fk38POKSD8x7ZxZyuX/p0TsVUb2KEg/7wvn77V/UMai3NrN73Zr//mi/OlGf75aGw78p5739wv0\nrN/N1AGv3ay9JvXSc255TEG1Ra+F+XlH3rM1f/pv8/6mC7YsKHRcM2YU7MPzWP8272/6z49WKqhe\ndFHp3jPP8rdsWXTea6918j32mO/1o6JOLKdn+VevLlj+/vuqY8ac+B56mjDBSYuN9X8uP/nkxLQb\nbzyxfJ569ChIHzzY97HkrdO1q//yff656qRJqjk5ql9+qZqb66T/6U9O3ksvLbzukiWq113nTL/3\nnurevb6364vn+5a37dLwft88jz8z0395Dhwo4v8J1dtuU23UqOA850lOLsjz1VeFt5mTU7DstddU\np00rvPz6651l3un+znWeXbucZRER/t+Hd95x8gwfXji9fXsnfdky3/s8cqRg+tZbVRcuLPyeFXU+\n8z4fZQUs1mJ8x1bCbrHyk1fDuHnazawfUxsW/R/88ifYd27hjMfqnbjyxv7OC2Cqx6WBuJdeU6dA\n1BpI8lGtcB18ueDSacfLzqXWzi9vBw3lmwUDYKxAahcAkpdF5ed965Wm+dOPzHkEgFAJJTTEaavI\nXT0QcBrMqzxRnarhQka223i88TtgDj/+CBEjryTsvB9oVLMRmTmZiAjNIptx8NhBth3aRpPaTUBh\nT8Yeru9wvbvH5/L3vWnvZmJfSaBORB2OZx/n/Abnc98FDxFaRflhcyJTpz4MwNNPQ9crl3B240yg\n4GFTeVf5AHd8dQcAnRt15t+vxAPtGTGiYPkNN/h+D+/46g46N+pMekY6u9KHArGkbs8CwnzmX7R+\nA1D4/LZo4XvbeTyv6DccXE3S1gPFfmhWv35OLaZ2bedKE5wa0q23OreXGTmy4Grdu6bmWbPIyirc\n7Ji0NYk5GxOpvnUQf76hDatXQ2QkNG3qv1mnLH7/e+fvuj3rSNq6l7iG8YwZ4z9/oOa6Pcd2cjiz\nBlCLDz5wOqFbt/Zd3gULnJp3jx4FaXc4H5f89w4KrtBXbPuNNfOnkhCTQLfGhc+TqlNLaN3aaRpr\n0aKgX8VXzVXVOUd5zcue+4OCWoy/Zum8UZXgjE7zbsbKs32708zlKSwMLrvMGf59Moh6H91pKi4u\nThcvXhyUbT8z/xkem/sYuerxadnfDA42gakfwoHmTlqH92DV7yEnIijlOMETAotvg6//DaHHIafq\niXnGerTxHI6Cb/8B+2NgS08n7S9RUGMPHGwE1fbBlGmwYcCJ6+eGwFevwbnfQbP5UGuXkya5IMDO\n9lB/PYz3+BaoegBG9nLutZUVASiMPwYXvgT9xsDTHv/5t1wI0T/DWD+fx7xyZIfBR5/B+iuL9x49\nWA+yakBkKvz4IHz/XOB1vFTt/RI1Ln+a2lVrk/LnTQA0fKERDWs25Hj2cTY8M5XMba2dzB3fgcEj\n4Pk0yGhQaDu1/labw//8Ad1edNNmzf5/5/C3f6HaBR+itVM5NvsvPvM1aLOCw7npHFubQM0BzxLR\nbiZ7XvgBABkbgv50F3wzgaiRd5D21msAXP3hYNbNb8eafz0FQIsbXyL5vXup2+5nat/yB0QkP7hH\n1Yji4LGDrE9PJmvBnTTsuIwaTTYTVSMKFFbuXsneMemFC9VqGuE3DKXvgfeY+cJ1+ck3T36A7Go7\nSUxJpPbhriA5rHn8K/9vQvdXnP+lw+43pOTQ8O9Nqb2vF7+Nd0YwNL3t/6jSemb+OWn394tZ+ZfC\n7UBXfziYhjUbsubzq/jhv5c7idfcAB2cyBEV1py0R1Py84fV207W3saE1NhL7pF6XPXq/Szdtpqt\nz8503vOnY6hbO5yj+2sjAlE5nfhlbMEY5OiuK7nygc9JTP+Afcf2see5BWSnxdJ//JOsrPomUTWi\niK0Tw+fDAo+Tb/3PNqy5q6A/tNek3qRlpBFVI4p6EfXyt9Hsrlt5dEQ3RnUdFXCbvojIElWNC5TP\nahbFkHeDwcycTMJDw7m7+918tPIj9h5dSZUHu5N5pBqZx4UqtfaSdemjZM+7HxbfDioQmuV8WQXD\nS1vgYLQz7StQAGyJh8ZLYG8LeGseHPUaT/rWDxD3b/hmAkRuLgh83t5YDDs7wy/uB7LuBqeG1ekt\nGHAP/Hs5tPX6ddzxSPj3MieojT8K1d2qwk/3wnavX1UdbApZS4s+3uRLYPK3Refx9rw7IH+sQHbp\ngvjxH+7lePwT7M0uGLqz8/BO59f+AMc9AlyIeynpFSgADmUeAg3c+3o42/nxxNFFw4rMt2d1ewhz\nOrMOfzOGwxva5y/T+Q/BIeeLNi9QAHy+7nPYWvAZSN7u/FBo39F97Duw2UnMioCke1nzu39AlUzY\n0RFmPMuWJUvhjs6s2bgfdnSF83z82EGFzLSmzHyzb6Hkd4a/4EzU3sq2g9FwXoDL4ZDsglo4gIY6\n7/mSs/OTUtPTYc/2/PmVqScOd/f5pawFXbVpa84vtChrv3Peco84rQVfLlkEYQUXQHv+tpA9D56d\nf1Gz1WvTW5e047Vh7WDsX0GBvY0A+HbdPJg+n+2tprFswP3+j9vDmj2FOzrmbZlXkH68Zn76ln/+\nh9tqRjJv8zwmXzO5WNsujaAGCxEZALwChAL/UdVnvZb3Al4GOgBDVfVTj2U5wAp3douqDgxmWYsS\nHx3P7Jtmk5iSSEJMAvHR8TzXz/8VatLWJBJTXmT/8f3MWT+fs6s1p0HKbSzfsZK1yyLJOh5GiISQ\ns+ccqJdMTs/H4Z+/QUgW1NoGB2KKLpDkgIYWBIqiTFpQ9PI9bZxAAb4DxViFS+91AoWnvKa4pSOh\npTtmdMMlvvex0f3iyChoKmNLr8J5DjSHxCeLLmtJA4W3rGqlX3feX2GBxxX+8ZpQ1W33yfbY7q+3\nQJSfcage/+BFCvHogQ3JhNxw/3k9L0TWX1EwnfgEdH3zxPyLbndqg/llql0wvSXe2V5qd5j7NCy8\nG7q8CfV/c5bvbg/J/WGy2x52vcf+8gl8NvnEi5I8eZ/Z367yf0xQuIx5Vg8u+KwC7D23cO10ge8a\n2AnyLhoONCk4ljze7/WxOoWCCxlnwe7WgfehwNxxBa0MR+vC/lj46T4oZrDgO6+hZYpzAbo9Dqoe\nLLzsnTm8HxFHr+a9Sl3DCKg4HRuleeEEiA3AOUA4sAxo45UnBidQvAsM8Vp2uCT7C0YH98myYMsC\nveqd67TVhLba8V9dNOS+5srDNZT7muh5j16nLR64WRnVRbnlQqXTJK11++Uaes5crdr6W61Sf4tS\nO7Wgk67uegVVaf2Zhpwz208HYk6RnYun5Gvw8LKtf8GrZVu/2bzC8+0+UFp9psS/UP7HetH48tmO\nZAXOc863zt9zZxakdXulcJ4ubxRMR24qmK637sTtnf+5cvbSspe92ytKra1l346/V42dziCU4uS9\n/rLy3fclD5RuvbvOU6occabbv1d4Waupyli0/7v9S/z9QzE7uANmKO0Lp6dylsf8w8DDfvK+fSYH\nC295I5M8RzH5SvO3jq9REuvWqb7w9mq9/4PXdMGWBfrrr6qPPKI6ZMR2rV5vn0Zf/p7yu+eVyE0q\n50/XGg23FfowhjVepY2ufkURJ9DUar1AQbVK1MYTP9Qd3lHO/Sbwh7/hkhPTRvRS2k4J3peEvU6P\nV/XdFV+G0+nV5mNlLPr64tdL/H1T3GARtA5uERkCDFDVW935G4HuqnqXj7xvA19p4WaobGApkA08\nq6on/IRJREYBowCaNWvWdfPmzcE4lDOG03xW0NTmT06OM64/K8sZkXHsmDMevE4dqFkTko85I3H2\nrWtHVtWddKh3IXt2hzHv11Q6dj+I1kylfe1exLQ8yjPvLkRzhWZHB5Fw2V5GzO/B8ezjcLQeXWpf\nRvamC1n70U3ExEJ2Rk169XJGCQG06LyDTc2eICfpbkJjFzCg+WDWrs1lw/KGtLjic+rntuXnmS0B\n6Dl4DUt+aETj9r+R/EM3QmunERaZxsjnp/HasEf9Hmv1jjPJWHbZCelh9VMJb76EI78MAkBCs7nx\nrUdZ8G1DkiffC8DwR+cxeXyvE9YFnKaqtLZ+9xtacy+aWZ3cTKcZI7zJajK3tXEWXvBP6Pgu/Gdh\nfv7IVr9yZPP5ZB+t7pT77FQydjU9YbulFV7zIGFNV3BkbY8TF3Z50xklWAJnjW3H7injYXNPOOpj\nNKGner/B3vPgd8/Dggdh4C3Q9iNCX00h57DbNxSaCTleTUiSg5y1Bt3VLnCB6q+F9FYlOoZTSbO7\n/8ijN19Yqiao4nZwn8rBoomqbhORc4A5QF9V3eBvf8EcDWVOnuIGrNLm90UVfkp1ttO7eQLdm8QX\n+kX0/zYnMfXH5Qzq3pHOZ11IrVoFPyQ8ftwZslq7thM4wRlKqwpVq8K2bc49qo4dc34w9nPqz8xc\nuoSB3ToTHx2PiDPscs8eiIhwgu2xY852Q0OdwFy7trO/nBznYVyL987OP95du6BePed2M7m5zisr\nC6pXd349PHu2Mzx39cGC96lb43hSU2HdoYW8My2Vo7ubEBG1nQtbnI+EKPPTphEReYjOMoIhCa35\n6CPnvmf9+jk/otsb/ivL9s9j2VJh5vHHyQ07gCwczTUdL6FDtSupVs35UVndurBpE+wKS+Kdpe8i\nAlc3H0lc427Ur++cu9nJP1B7x0CGD2jDuAmb2J6zgo5xR/j24xh2bq1JnYRJjLiiDXWPxrEp9Bt0\nXwzUSaFPbAKta8WzcKHzPvfq5QwtDo1eyEffr6ddg86MHtaGI0fgnx+uQ1t9xvmhlxIT0YXDh51h\ntFdc4axXp45Tlv/+9BGZGRG0rNWZ9CMH6BzVncu6dOTxf2xhe+RUdv8WQ9927Rn1+xZERzu3eGnf\nHlJTnXOflLyaTz7PIPxILN071GfcOOdcrtyfxISvv+WzBcvIafEF8svtXNLiYr7fMJuc3S0J7fw+\nF2Y/SEyDRpx3cRIcaI7W2srG6UM5q1pTdhzaRvKunVzVvybNa51Pt27O3QciI53PxYMPlu2eVqdC\nsIgHxqrqpe78wwCq+oyPvG/jFSxKshwsWBhzsiVtTaLvu33zRwnOvml2qQP2mcD7wqY8LnTKw6kQ\nLKoAvwF9gW3AIuB6VT1hqIh3MBCRukCGqh4XkQZAEjBIVU+8CZPLgoUxJ9+p8oVnSq/Cf2ehqtki\nchcwC2dk1CRVXSUi43A6VKaLyAXANKAucJWIPKmqbYHWwOsikotz/6pniwoUxpiKER8db0HiDGG/\n4DbGmDNYcWsWdtdZY4wxAVmwMMYYE5AFC2OMMQFZsDDGGBOQBQtjjDEBVZrRUCKSBpT2fh8NgD3l\nWJzTgR3zmcGO+cxQlmNurqpRgTJVmmBRFiKyuDhDxyoTO+Yzgx3zmeFkHLM1QxljjAnIgoUxxpiA\nLFg43qjoAlQAO+Yzgx3zmSHox2x9FsYYYwKymoUxxpiALFgYY4wJ6IwPFiIyQETWiUiyiIyp6PKU\nFxGJFpG5IrJaRFaJyD1uej0R+U5E1rt/67rpIiIT3PdhuYh0qdgjKB0RCRWRX0XkK3c+VkR+do/r\nIxEJd9OruvPJ7vKYiix3aYlIHRH5VETWisgaEYk/A87xve5neqWIfCgiEZXxPIvIJBHZLSIrPdJK\nfG5F5GY3/3oRubm05Tmjg4WIhAITgcuANsAwEWlTsaUqN9nA/araBrgQuNM9tjHAbFVtCcx258F5\nD1q6r1HAaye/yOXiHmCNx/xzwEuq2gLYB9zipt8C7HPTX3LznY5eAb5R1VZAR5xjr7TnWESaAKOB\nOFVth/OsnKFUzvP8NjDAK61E51ZE6gFPAN2BbsATeQGmxFT1jH0B8cAsj/mHgYcrulxBOtYvgEuA\ndUAjN60RsM6dfh0Y5pE/P9/p8gKauv9AFwNfAYLzq9Yq3ucb56Fc8e50FTefVPQxlPB4I4FN3uWu\n5Oe4CbAVqOeet6+ASyvreQZigJWlPbfAMOB1j/RC+UryOqNrFhR88PKkummVilv17gz8DJytqjvc\nRTuBs93pyvBevAw8COS68/WB/aqa7c57HlP+8brLD7j5TyexQBrwltv09h8RqUElPsequg14AdgC\n7MA5b0uo3OfZU0nPbbmd8zM9WFR6IlITmAr8WVUPei5T51KjUoydFpErgd2quqSiy3ISVQG6AK+p\namfgCAXNEkDlOscAbhPKIJxA2RiowYlNNWeEk31uz/RgsQ2I9phv6qZVCiIShhMo3lfVz9zkXSLS\nyF3eCNjtpp/u70UPYKCIpABTcJqiXgHqiEjes+Y9jyn/eN3lkUD6ySxwOUgFUlX1Z3f+U5zgUVnP\nMUA/YJOqpqlqFvAZzrmvzOfZU0nPbbmd8zM9WCwCWrojKcJxOsqmV3CZyoWICPBfYI2qvuixaDqQ\nNyLiZpy+jLz0m9xRFRcCBzyqu6c8VX1YVZuqagzOeZyjqjcAc4Ehbjbv4817H4a4+U+rK3BV3Qls\nFZHz3aS+wGoq6Tl2bQEuFJHq7mc875gr7Xn2UtJzOwvoLyJ13VpZfzet5Cq6A6eiX8DlwG/ABuDR\nii5POR7XRThV1OXAUvd1OU577WxgPfA9UM/NLzgjwzYAK3BGm1T4cZTy2BOAr9zpc4CFQDLwCVDV\nTY9w55Pd5edUdLlLeaydgMXuef4cqFvZzzHwJLAWWAm8B1StjOcZ+BCnXyYLpxZ5S2nOLfBH9/iT\ngZGlLY/d7sMYY0xAZ3ozlDHGmGKwYGGMMSYgCxbGGGMCsmBhjDEmIAsWxhhjArJgYUwAIpIjIks9\nXuV2d2IRifG8q6gxp6oqgbMYc8Y7qqqdKroQxlQkq1kYU0oikiIiz4vIChFZKCIt3PQYEZnjPldg\ntog0c9PPFpFpIrLMff3O3VSoiLzpPqPhWxGp5uYfLc7zSJaLyJQKOkxjAAsWxhRHNa9mqD94LDug\nqu2Bf+Lc9RbgVeAdVe0AvA9McNMnAD+oakecezitctNbAhNVtS2wH7jWTR8DdHa3c3uwDs6Y4rBf\ncBsTgIgcVtWaPtJTgItVdaN708adqlpfRPbgPHMgy03foaoNRCQNaKqqxz22EQN8p87DbBCRh4Aw\nVX1aRL4BDuPcxuNzVT0c5EM1xi+rWRhTNupnuiSOe0znUNCXeAXO/X66AIs87qpqzElnwcKYsvmD\nx98kd3oBzp1vAW4A5rvTs4E7IP9Z4ZH+NioiIUC0qs4FHsK5tfYJtRtjTha7UjEmsGoistRj/htV\nzRs+W1dEluPUDoa5aXfjPL3uLzhPshvppt8DvCEit+DUIO7AuauoL6HAZDegCDBBVfeX2xEZU0LW\nZ2FMKbl9FnGquqeiy2JMsFkzlDHGmICsZmGMMSYgq1kYY4wJyIKFMcaYgCxYGGOMCciChTHGmIAs\nWBhjjAno/wGVkooxFkdVNgAAAABJRU5ErkJggg==\n",
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEWCAYAAACJ0YulAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3Xl8FeXZ//HPlY19BxcgGlRUVgFT\nNKWW4Fa0Kg+VKhSLa608tba1tqXWWks3tf7UaqlP6WKrqMijjxZXWpEUtYAERBSQRYgmrCHssmS7\nfn/M5HAIJwshhxM43/frdV7Mcs8918yEc525Z+Yec3dEREQAUhIdgIiINB1KCiIiEqGkICIiEUoK\nIiISoaQgIiIRSgoiIhKhpCCNysxSzWyXmZ3UmGUTycxOM7NGv3fbzC40s4Ko8eVmdl59yjZgXX82\nszsbunwt9f7SzP7W2PVK4qQlOgBJLDPbFTXaEtgHVITj33T3pw6lPnevAFo3dtlk4O5nNEY9ZnYT\ncI2750bVfVNj1C3HPiWFJOfukS/l8JfoTe7+Rk3lzSzN3cuPRGwicuSp+UhqFTYPPGtmz5jZTuAa\nM8sxs7lmts3M1pvZI2aWHpZPMzM3s6xwfEo4/zUz22lmc8ysx6GWDedfYmYrzGy7mT1qZu+Y2XU1\nxF2fGL9pZqvMbKuZPRK1bKqZPWRmJWa2Ghhey/75iZlNrTZtkpk9GA7fZGbLwu35OPwVX1NdRWaW\nGw63NLMnw9iWAGdXK3uXma0O611iZleE0/sBvwfOC5vmNkft23uilr8l3PYSM3vRzE6sz76pi5mN\nDOPZZmZvmtkZUfPuNLN1ZrbDzD6K2tZzzWxhOH2jmf22vuuTOHB3ffTB3QEKgAurTfslUApcTvAj\nogXwOeAcgjPNU4AVwK1h+TTAgaxwfAqwGcgG0oFngSkNKHscsBMYEc67HSgDrqthW+oT4z+AdkAW\nsKVq24FbgSVAd6ATMDv4rxJzPacAu4BWUXVvArLD8cvDMgacD+wB+ofzLgQKouoqAnLD4QeAPKAD\ncDKwtFrZq4ATw2PytTCG48N5NwF51eKcAtwTDl8cxjgAaA78AXizPvsmxvb/EvhbONwrjOP88Bjd\nCSwPh/sAnwAnhGV7AKeEw/OBMeFwG+CcRP9fSOaPzhSkPt5295fcvdLd97j7fHef5+7l7r4amAwM\nrWX559w9393LgKcIvowOtexlwCJ3/0c47yGCBBJTPWP8jbtvd/cCgi/gqnVdBTzk7kXuXgLcW8t6\nVgMfEiQrgIuAre6eH85/yd1Xe+BNYCYQ82JyNVcBv3T3re7+CcGv/+j1TnP39eExeZogoWfXo16A\nscCf3X2Ru+8FJgBDzax7VJma9k1tRgPT3f3N8BjdS5BYzgHKCRJQn7AJck247yBI7j3NrJO773T3\nefXcDokDJQWpj8LoETM708xeMbMNZrYDmAh0rmX5DVHDu6n94nJNZbtGx+HuTvDLOqZ6xlivdRH8\nwq3N08CYcPhr4XhVHJeZ2Twz22Jm2wh+pde2r6qcWFsMZnadmb0fNtNsA86sZ70QbF+kPnffAWwF\nukWVOZRjVlO9lQTHqJu7Lwe+T3AcNoXNkSeERa8HegPLzexdM7u0ntshcaCkIPVR/XbMPxL8Oj7N\n3dsCdxM0j8TTeoLmHADMzDjwS6y6w4lxPZAZNV7XLbPTgAvNrBvBGcPTYYwtgOeA3xA07bQH/lnP\nODbUFIOZnQI8BowHOoX1fhRVb123z64jaJKqqq8NQTPV2nrEdSj1phAcs7UA7j7F3YcQNB2lEuwX\n3H25u48maCL8f8DzZtb8MGORBlJSkIZoA2wHPjOzXsA3j8A6XwYGmdnlZpYGfAfoEqcYpwHfNbNu\nZtYJ+FFthd19A/A28DdgubuvDGc1AzKAYqDCzC4DLjiEGO40s/YWPMdxa9S81gRf/MUE+fEbBGcK\nVTYC3asurMfwDHCjmfU3s2YEX85vuXuNZ16HEPMVZpYbrvsHBNeB5plZLzMbFq5vT/ipJNiAr5tZ\n5/DMYnu4bZWHGYs0kJKCNMT3gWsJ/sP/keCCcFy5+0bgauBBoAQ4FXiP4LmKxo7xMYK2/w8ILoI+\nV49lnia4cBxpOnL3bcD3gBcILtaOIkhu9fEzgjOWAuA14ImoehcDjwLvhmXOAKLb4f8FrAQ2mll0\nM1DV8q8TNOO8EC5/EsF1hsPi7ksI9vljBAlrOHBFeH2hGXA/wXWgDQRnJj8JF70UWGbB3W0PAFe7\ne+nhxiMNY0HTrMjRxcxSCZorRrn7W4mOR+RYoTMFOWqY2fCwOaUZ8FOCu1beTXBYIscUJQU5mnwB\nWE3QNPElYKS719R8JCINoOYjERGJ0JmCiIhEHHUd4nXu3NmzsrISHYaIyFFlwYIFm929ttu4gaMw\nKWRlZZGfn5/oMEREjipmVteT+YCaj0REJIqSgoiIRMQ1KYT3lS8P+2WfEGP+SWY2y8zeM7PF6ghL\nRCSx4nZNIXzidBJBV8JFwHwzm+7uS6OK3QVMc/fHzKw38CpB/+0i0kSUlZVRVFTE3r17Ex2K1EPz\n5s3p3r076ek1dX1Vu3heaB4MrKrqMz18O9UIgpeFVHGgbTjcjqDbAhFpQoqKimjTpg1ZWVkEndNK\nU+XulJSUUFRURI8ePepeIIZ4Nh9148D+4Is4uKvjewhe71hEcJbw7VgVmdnNZpZvZvnFxcXxiFVE\narB37146deqkhHAUMDM6dep0WGd1ib7QPIbgVX7dCXpKfDLsg/0A7j7Z3bPdPbtLlzpvs41pTuEc\nfvPWb5hTOOfwIhZJQkoIR4/DPVbxbD5ay4EvCYm8bCPKjYQvRXf3OeGLNToTvD+20cwpnMMFT1xA\naUUpGakZzBw3k5zMnMZchYjIMSGeZwrzCd672sPMMgjf31qtzKeELx0JX4TSnKCzs0aVV5BHaUUp\nFV5BaUUpeQV5jb0KEYmTkpISBgwYwIABAzjhhBPo1q1bZLy0tH6vXbj++utZvnx5rWUmTZrEU089\n1Rgh84UvfIFFixY1Sl1HWtzOFNy93MxuBWYQvHrvr+6+xMwmAvnuPp3gRSh/MrPvEVx0vs7j0ENf\nblYuGakZkTOF3Kzcxl6FiMRJp06dIl+w99xzD61bt+aOO+44oIy74+6kpMT+nfv444/XuZ5vfetb\nhx/sMSCu1xTc/VV3P93dT3X3X4XT7g4TAu6+1N2HuPtZ7j7A3f8ZjzhyMnOYOW4mvxj2CzUdiRwB\nR+Ia3qpVq+jduzdjx46lT58+rF+/nptvvpns7Gz69OnDxIkTI2WrfrmXl5fTvn17JkyYwFlnnUVO\nTg6bNgWt1XfddRcPP/xwpPyECRMYPHgwZ5xxBv/5z38A+Oyzz7jyyivp3bs3o0aNIjs7u84zgilT\nptCvXz/69u3LnXfeCUB5eTlf//rXI9MfeeQRAB566CF69+5N//79ueaaaxp9n9XHUdf3UUPlZOYo\nGYgcAUfyGt5HH33EE088QXZ2NgD33nsvHTt2pLy8nGHDhjFq1Ch69+59wDLbt29n6NCh3Hvvvdx+\n++389a9/ZcKEg56txd159913mT59OhMnTuT111/n0Ucf5YQTTuD555/n/fffZ9CgQbXGV1RUxF13\n3UV+fj7t2rXjwgsv5OWXX6ZLly5s3ryZDz74AIBt27YBcP/99/PJJ5+QkZERmXakJfruIxE5xhzJ\na3innnpqJCEAPPPMMwwaNIhBgwaxbNkyli5detAyLVq04JJLLgHg7LPPpqCgIGbdX/nKVw4q8/bb\nbzN69GgAzjrrLPr06VNrfPPmzeP888+nc+fOpKen87WvfY3Zs2dz2mmnsXz5cm677TZmzJhBu3bt\nAOjTpw/XXHMNTz31VIMfPjtcSgoi0qiqruGlWmrcr+G1atUqMrxy5Up+97vf8eabb7J48WKGDx8e\n8379jIyMyHBqairl5eUx627WrFmdZRqqU6dOLF68mPPOO49JkybxzW9+E4AZM2Zwyy23MH/+fAYP\nHkxFRUWjrrc+lBREpFEl6hrejh07aNOmDW3btmX9+vXMmDGj0dcxZMgQpk2bBsAHH3wQ80wk2jnn\nnMOsWbMoKSmhvLycqVOnMnToUIqLi3F3vvrVrzJx4kQWLlxIRUUFRUVFnH/++dx///1s3ryZ3bt3\nN/o21CVprimIyJGTiGt4gwYNonfv3px55pmcfPLJDBkypNHX8e1vf5tx48bRu3fvyKeq6SeW7t27\n84tf/ILc3Fzcncsvv5wvf/nLLFy4kBtvvBF3x8y47777KC8v52tf+xo7d+6ksrKSO+64gzZt2jT6\nNtTlqHtHc3Z2tuslOyJHzrJly+jVq1eiw2gSysvLKS8vp3nz5qxcuZKLL76YlStXkpbWtH5fxzpm\nZrbA3bNrWCSiaW2JiEgTtmvXLi644ALKy8txd/74xz82uYRwuI6trRERiaP27duzYMGCRIcRV7rQ\nLCIiEUoKIiISoaQgIiIRSgoiIhKhpCAiTdqwYcMOehDt4YcfZvz48bUu17p1awDWrVvHqFGjYpbJ\nzc2lrlvcH3744QMeIrv00ksbpV+ie+65hwceeOCw62lsSgoi0qSNGTOGqVOnHjBt6tSpjBkzpl7L\nd+3aleeee67B66+eFF599VXat2/f4PqaOiUFEWnSRo0axSuvvBJ5oU5BQQHr1q3jvPPOizw3MGjQ\nIPr168c//vGPg5YvKCigb9++AOzZs4fRo0fTq1cvRo4cyZ49eyLlxo8fH+l2+2c/+xkAjzzyCOvW\nrWPYsGEMGzYMgKysLDZv3gzAgw8+SN++fenbt2+k2+2CggJ69erFN77xDfr06cPFF198wHpiWbRo\nEeeeey79+/dn5MiRbN26NbL+qq60qzri+/e//x15ydDAgQPZuXNng/dtLHpOQUTq7bvfhcZ+odiA\nARB+n8bUsWNHBg8ezGuvvcaIESOYOnUqV111FWZG8+bNeeGFF2jbti2bN2/m3HPP5YorrqjxPcWP\nPfYYLVu2ZNmyZSxevPiArq9/9atf0bFjRyoqKrjgggtYvHgxt912Gw8++CCzZs2ic+fOB9S1YMEC\nHn/8cebNm4e7c8455zB06FA6dOjAypUreeaZZ/jTn/7EVVddxfPPP1/r+xHGjRvHo48+ytChQ7n7\n7rv5+c9/zsMPP8y9997LmjVraNasWaTJ6oEHHmDSpEkMGTKEXbt20bx580PY23XTmYKINHnRTUjR\nTUfuzp133kn//v258MILWbt2LRs3bqyxntmzZ0e+nPv370///v0j86ZNm8agQYMYOHAgS5YsqbOz\nu7fffpuRI0fSqlUrWrduzVe+8hXeeustAHr06MGAAQOA2rvnhuD9Dtu2bWPo0KEAXHvttcyePTsS\n49ixY5kyZUrkyekhQ4Zw++2388gjj7Bt27ZGf6JaZwoiUm+1/aKPpxEjRvC9732PhQsXsnv3bs4+\n+2wAnnrqKYqLi1mwYAHp6elkZWXF7C67LmvWrOGBBx5g/vz5dOjQgeuuu65B9VSp6nYbgq6362o+\nqskrr7zC7Nmzeemll/jVr37FBx98wIQJE/jyl7/Mq6++ypAhQ5gxYwZnnnlmg2OtTmcKItLktW7d\nmmHDhnHDDTcccIF5+/btHHfccaSnpzNr1iw++eSTWuv54he/yNNPPw3Ahx9+yOLFi4Gg2+1WrVrR\nrl07Nm7cyGuvvRZZpk2bNjHb7c877zxefPFFdu/ezWeffcYLL7zAeeedd8jb1q5dOzp06BA5y3jy\nyScZOnQolZWVFBYWMmzYMO677z62b9/Orl27+Pjjj+nXrx8/+tGP+NznPsdHH310yOusjc4UROSo\nMGbMGEaOHHnAnUhjx47l8ssvp1+/fmRnZ9f5i3n8+PFcf/319OrVi169ekXOOM466ywGDhzImWee\nSWZm5gHdbt98880MHz6crl27MmvWrMj0QYMGcd111zF48GAAbrrpJgYOHFhrU1FN/v73v3PLLbew\ne/duTjnlFB5//HEqKiq45ppr2L59O+7ObbfdRvv27fnpT3/KrFmzSElJoU+fPpG3yDUWdZ0tIrVS\n19lHn8PpOlvNRyIiEhHXpGBmw81suZmtMrMJMeY/ZGaLws8KMzv8xwRFRKTB4nZNwcxSgUnARUAR\nMN/Mprt75D4vd/9eVPlvAwPjFY+INFzVayOl6TvcSwLxPFMYDKxy99XuXgpMBUbUUn4M8Ewc4xGR\nBmjevDklJSWH/WUj8efulJSUHNYDbfG8+6gbUBg1XgScE6ugmZ0M9ADerGH+zcDNACeddFLjRiki\nterevTtFRUUUFxcnOhSph+bNm9O9e/cGL99UbkkdDTzn7hWxZrr7ZGAyBHcfHcnARJJdeno6PXr0\nSHQYcoTEs/loLZAZNd49nBbLaNR0JCKScPFMCvOBnmbWw8wyCL74p1cvZGZnAh2AOXGMRURE6iFu\nScHdy4FbgRnAMmCauy8xs4lmdkVU0dHAVNdVLBGRhIvrNQV3fxV4tdq0u6uN3xPPGEREpP70RLOI\niEQoKYiISISSgoiIRCgpiIhIhJKCiIhEKCmIiEiEkoKIiEQoKYiISISSgoiIRCgpiIhIhJKCiIhE\nKCmIiEiEkoKIiEQoKYiISISSgoiIRCgpiIhIhJKCiIhEKCmIiEiEkoKIiEQoKYiISISSgoiIRCgp\niIhIRFyTgpkNN7PlZrbKzCbUUOYqM1tqZkvM7Ol4xiMiIrVLi1fFZpYKTAIuAoqA+WY23d2XRpXp\nCfwYGOLuW83suHjFIyIidYvnmcJgYJW7r3b3UmAqMKJamW8Ak9x9K4C7b4pjPCIiUod4JoVuQGHU\neFE4LdrpwOlm9o6ZzTWz4bEqMrObzSzfzPKLi4vjFK6IiCT6QnMa0BPIBcYAfzKz9tULuftkd892\n9+wuXboc4RBFRJJHPJPCWiAzarx7OC1aETDd3cvcfQ2wgiBJiIhIAsQzKcwHeppZDzPLAEYD06uV\neZHgLAEz60zQnLQ6jjGJiEgt4pYU3L0cuBWYASwDprn7EjObaGZXhMVmACVmthSYBfzA3UviEc8b\nb8C3vw2lpfGoXUTk2GDunugYDkl2drbn5+cf8nK//S388Iewaxe0ahWHwEREmjAzW+Du2XWVS/SF\n5iMmJdzSiorExiEi0pQlXVKorExsHCIiTVnSJIXU1OBfJQURkZolTVJQ85GISN2SLinoTEFEpGZJ\nkxTUfCQiUrekSQpqPhIRqVvSJQWdKYiI1CxpkoKaj0RE6pY0SUHNRyIidUu6pKAzBRGRmiVNUlDz\nkYhI3ZImKaj5SESkbkmXFHSmICJSs6RJCmo+EhGpW9IkBTUfiYjULemSgs4URERqljRJQc1HIiJ1\nS5qkoOYjEZG6JV1S0JmCiEjNkiYpqPlIRKRuSZMU1HwkIlK3uCYFMxtuZsvNbJWZTYgx/zozKzaz\nReHnpnjFouYjEZG6pcWrYjNLBSYBFwFFwHwzm+7uS6sVfdbdb41XHFXUfCQiUrd4nikMBla5+2p3\nLwWmAiPiuL5aqflIRKRu8UwK3YDCqPGicFp1V5rZYjN7zswyY1VkZjebWb6Z5RcXFzcoGDUfiYjU\nrV5JwcxONbNm4XCumd1mZu0bYf0vAVnu3h/4F/D3WIXcfbK7Z7t7dpcuXRq0oqrmo6mLpzGncE7D\nohUROcbV90zheaDCzE4DJgOZwNN1LLM2LFelezgtwt1L3H1fOPpn4Ox6xnPIPty0GIAn33+aC564\nQIlBRCSG+iaFSncvB0YCj7r7D4AT61hmPtDTzHqYWQYwGpgeXcDMouu4AlhWz3gO2cIN+QBUVjql\nFaXkFeTFa1UiIket+iaFMjMbA1wLvBxOS69tgTCJ3ArMIPiyn+buS8xsopldERa7zcyWmNn7wG3A\ndYe6AfU1ODM4CUkhjYzUDHKzcuO1KhGRo1Z9b0m9HrgF+JW7rzGzHsCTdS3k7q8Cr1abdnfU8I+B\nH9c/3IYb2PUsAEb3Hsut4+4gJzPnSKxWROSoUq+kED5bcBuAmXUA2rj7ffEMrLFV3X004oyvkBPz\nHicREanv3Ud5ZtbWzDoCC4E/mdmD8Q2tcVXdfaTnFEREalbfawrt3H0H8BXgCXc/B7gwfmE1vvTw\nCkh5eWLjEBFpyuqbFNLCO4WuYv+F5qNKVVIoLU1sHCIiTVl9k8JEgruIPnb3+WZ2CrAyfmE1voyM\n4F8lBRGRmtX3QvP/Av8bNb4auDJeQcVDVVIoK0tsHCIiTVl9LzR3N7MXzGxT+HnezLrHO7jGpDMF\nEZG61bf56HGCp5G7hp+XwmlHDV1TEBGpW32TQhd3f9zdy8PP34CG9UyXIFVJQc1HIiI1q29SKDGz\na8wsNfxcA5TEM7DGlpICaWk6UxARqU19k8INBLejbgDWA6OIYz9F8ZKerqQgIlKbeiUFd//E3a9w\n9y7ufpy7/xdH2d1HEFxsVlIQEanZ4bx57fZGi+IIycjQNQURkdocTlKwRoviCNGZgohI7Q4nKXij\nRXGE6JqCiEjtan2i2cx2EvvL34AWcYkojtR8JCJSu1qTgru3OVKBHAnltpsP1n3KnMKtesmOiEgM\nh9N8dFSZUziHNTtWsGTDKi544gLmFM5JdEgiIk1O0iSFvII8PHUfVKRTWlFKXkFeokMSEWlykiYp\n5GblkpJaBpUZZKRmkJuVm+iQRESanHp1nX0syMnMYWC37WzeuYNnxs3UNQURkRiSJikAdGnbjpSy\nduRkZiY6FBGRJimuzUdmNtzMlpvZKjObUEu5K83MzSw7nvHollQRkdrFLSmYWSowCbgE6A2MMbPe\nMcq1Ab4DzItXLFX0RLOISO3ieaYwGFjl7qvdvRSYCoyIUe4XwH3A3jjGAuiJZhGRusQzKXQDCqPG\ni8JpEWY2CMh091dqq8jMbjazfDPLLy4ubnBAOlMQEaldwm5JNbMU4EHg+3WVdffJ7p7t7tldujT8\nhW+6piAiUrt4JoW1QPRtPt3DaVXaAH2BPDMrAM4FpsfzYrOaj0REahfPpDAf6GlmPcwsAxgNTK+a\n6e7b3b2zu2e5exYwF7jC3fPjFVDJvnXs2rNPXVyIiNQgbknB3cuBW4EZwDJgmrsvMbOJZnZFvNZb\nkzmFc3h+xdPs24f6PhIRqUFcH15z91eBV6tNu7uGsrnxjCWvII+K1DKoaMa+sjLyCvL0VLOISDVJ\n1fdRanpwlTmDNur7SEQkhqRJCjmZOXwr5wYAXrzydZ0liIjEkDRJAaBX1x4A9O88OMGRiIg0TUmV\nFJo3D/7dG/dnp0VEjk5KCiIiEpGUSWHPnsTGISLSVCVlUtCZgohIbEoKIiISkVRJoUWL4N8n8qfp\niWYRkRiSKiks3/Y+AE8unKauLkREYkiqpLBo81wAKssyKK0oJa8gL7EBiYg0MUmVFL5wStArt5W3\nIiM1Q11diIhUk1RJYcgpZwNw2SmjmDluprq6EBGpJq69pDY1VXcfDe3+JXIyay8rIpKMkupMQbek\niojULqmSQno6pKQoKYiI1CSpkoJZcLagpCAiEltSJQWAtIwy3l6dr2cURERiSKqkMKdwDjsqiplX\n8L4eXhMRiSGpkkJeQR6k7cHLmunhNRGRGJIqKeRm5WLp+6CihR5eExGJIameU8jJzOGM43fhrTN4\nXA+viYgcJK5nCmY23MyWm9kqM5sQY/4tZvaBmS0ys7fNrHc84wHo1LY13VuepoQgIhJD3JKCmaUC\nk4BLgN7AmBhf+k+7ez93HwDcDzwYr3iq6JZUEZGaxfNMYTCwyt1Xu3spMBUYEV3A3XdEjbYCPI7x\nAEFS0Os4RURii+c1hW5AYdR4EXBO9UJm9i3gdiADOD9WRWZ2M3AzwEknnXRYQelMQUSkZgm/+8jd\nJ7n7qcCPgLtqKDPZ3bPdPbtLly6Htb6dlcWs37pVzyiIiMQQz6SwFojui7R7OK0mU4H/imM8zCmc\nw8zCl9i6c58eXhMRiSGeSWE+0NPMephZBjAamB5dwMx6Ro1+GVgZx3jIK8ijMm0nlLbSw2siIjHE\n7ZqCu5eb2a3ADCAV+Ku7LzGziUC+u08HbjWzC4EyYCtwbbzigeDhtdRmsykva0l6ih5eExGpztzj\nfsNPo8rOzvb8/PwGLz/+R5/wP/efzKwVc8nteW4jRiYi0nSZ2QJ3z66rXMIvNB9pvbqdDEC/jkoI\nIiLVJV1SaNUq+PezzxIbh4hIU5R0SaFly+BfJQURkYMlXVIo3P0RAHM+XpzgSEREmp6kSgpzCufw\n03duB2D8i3foOQURkWqSKinkFeRRnrodgPK9zfScgohINUmVFHKzcklvXgpAWkVbPacgIlJNUiWF\nnMwcplz9RwB+8Lmf650KIiLVJFVSADiv5yAATmx2WoIjERFpepIuKVQ9p/Dyh2/qQrOISDVJlxTe\nL5kDVsGMJfPUU6qISDVJlxRmf5oHLbbge9qrp1QRkWri+ea1Jik3KxdruQX2dCYjVT2liohES7qk\nkJOZQ9+Td/CZN2PKuJm6A0lEJErSNR8BtOtQzp4dLRIdhohIk5N0SWFO4RzmlLzC+k2lutAsIlJN\n0iWFvII8KlsUw+5OutAsIlJN0iWF3KxcUlttg/KWpFeqqwsRkWhJlxRyMnMYc84FAPz07N/rQrOI\nSJSkSwpzCufw7Np7Afj5C0/qmoKISJSkSwp5BXmUt1sBQFlJpq4piIhESbqkkJuVS0aHTZBSBltP\npVPLTokOSUSkyYhrUjCz4Wa23MxWmdmEGPNvN7OlZrbYzGaa2cnxjAeCawq/+/L/g/YF+JZT+O7r\n31UTkohIKG5JwcxSgUnAJUBvYIyZ9a5W7D0g2937A88B98crnmglu0ug0wooPpN95fvUhCQiEorn\nmcJgYJW7r3b3UmAqMCK6gLvPcvfd4ehcoHsc44no1LITHL8YNp9JZXmqmpBERELxTArdgMKo8aJw\nWk1uBF6LNcPMbjazfDPLLy4uPuzA3lv/Hhz3AVSmQ8npwbiIiDSNC81mdg2QDfw21nx3n+zu2e6e\n3aVLl8ZZ6YlhIvjki2zYtaFx6hQROcrFMymsBTKjxruH0w5gZhcCPwGucPd9cYwnYtxZ40g77mPo\nsgQWf53py1/SxWYREeKbFObGnLyAAAASQklEQVQDPc2sh5llAKOB6dEFzGwg8EeChLApjrEcICcz\nh89n5sDn/gBFOVQuuI4Jbxx0c5SISNKJW1Jw93LgVmAGsAyY5u5LzGyimV0RFvst0Br4XzNbZGbT\na6iu0RXvLobs/4EeM+G1R5k9K53ZH89l164jFYGISNNj7p7oGA5Jdna25+fnH3Y9I58dyYsfvQg7\nToQn3oCdXWFfewAqK2H9emjdGtq2PexViYgknJktcPfsuso1iQvNifDDz/8wGGi7HkaPhLS9kXmZ\nlzxLt25w3nkJCk5EJEGSNinkZObwwyFhYui8Am4ZCB1XArB2xtUALF4M4x/4Jzt2wN69NdVUszVr\n4OmnGytikfp55RVIS4Pt2xMdiTSWrVuha1d48sn4rytpkwLAfRfex8WnXByMtNkAt50Ot50KPd6I\nlPmfH1xMu3bQogWk9f0HJ4/4G68ums/dd0NJCezeDQUFcPnlsGXLgfV/8YswdiwsWACffrp/+q5d\nQRNVLHv3wrx5wb8lJY27vQ21YQO8/XbN8xcsgC5d4C9/gS99CY6yFsljSmUlXHYZVFTAypWNX/97\n7wV1H03mzoWrr45/3GvXxv7bb4z/D5s2BU3aKUfiG9vdj6rP2Wef7Y3t4icudu7hwM8POzq5dzvH\nL/LgsNb/Y603xJz+l7+4X3VVMPyNb7gXFrq/84776ae7v/lmEMtNNwXz+/YN/q2sdC8vd3/33WC4\nsjL2Nqxf73733e7btx84fdMm96VLG75vNm50P+mkIJaKithlRo8+cDu3bm34+hJtw4ZgG1599fDq\n2bvXfcWKQ1/3vn2Ht94HH9x/HG64wf2NNw4us3mz+5Yth1bvyy+733prUO899+yfXlkZ7Ktnnqn5\nb/NwVdW7ZUvs7anJm28G/3eOPz6Iu6io4THMmuXerdvB/7+qvP9+sI7HHjtw+tatwfTvfz/Yjkce\ncf/Nbw59/W+9FdTzz38e+rJVgHyvx3dswr/kD/UTj6Tg7j72+bEHJ4aqz09TnW+d4VybGySK7D84\nrdcdcrKo89Nic+zpGTsjw6nt13rHSx/ylI6rPSP7SW/zjf/yZiO+c9AyF497z5cu3T/+3w++5j//\n/XK/9GeP+lcfneizVsz10lL3V14J5j/8sHtpafCf7qKL3L/znSChRNc5dap7jx7B/Fmzgj/y/Hz3\nq68+sNxHH7l//HHwKSlxnzbNfceOIGllZgb/sWbODJLMO++4/+AH7qtXuz/7rPuvf+2+c6f7lCnu\n//3fB3/RFBa6FxcHw5WV7i+95L57t/s//uHeu3dQ9wsv1PwFtXp1kJD37Nk/beFC92XLgi+3qm24\n8ML980tKgi+98vKa/342bXL/+9/d161z//RT97Fjg3p27txfZssW9yVLgmTx618H279ypfvtt7uf\nfXZQ/vzzD479o4+CT3VlZQdP+/rXD/77mTrV/b33gvk/+UkwrXnzmrflrbfcJ048cFr1OquS1+TJ\n+6fNmuX+ySfBl/C0abHrLi0N/p0wITjG1c2aFRyPqmO8aFFQ9+zZ7sOHB8MffOB+7bXuL77o/u9/\nB/vxuefcn3wyWKay0v3pp4OyDzzg3rnz/uUOxYgR7pdeGgzn5AR15OUdWOb994P6b7ghmD98eBD7\nxo3B/P/8Z//+mTRp/7C7+4wZ+8dff73mOMrKgu2D/cexIeqbFJL27qNYJi+YzMNzH+aTbZ+wu3x3\n3QsAOOAGGOw6AXZ0gz2doCId1p8N5c2g6FzoshR2nQgrLgu614iWUgqVGY29OYfv1H/BxxclNAQ7\nYTHpZ/wLT92DnfwOpY8HPaF0HPJ/pKVVsunfo+pVz3Gff509GzPZ+XEfAIYOL2FF4RY69J/D0mfG\nxVym06C3KFm4/26DFp2K6fOludjmXnz+mjc5/8xB5L3WiXcWbOXd6YNi1vHVu6bz3+OO44SKc+nV\n68B5t94Kv//9wcuMHBk0efTrBxkZ8PLLwfTzxj9Dy47byPj4K5zb73h+8hO46ir46U/huOPgjTfg\nZz+DVati74Pnn4crr9w/vmAB/Ou9j1jsT1H4Xi/e+sPXaNYM9oWPkP7o+Ufp1fKLpJWcxTXXHFiX\nWfB11r5jGdu2BH/P3/oWTJq0v8yUKUGzY6tWcMklcPHFQfPWwoUwKNxdv/xlMP9vf4Phw+G++/Yv\n/9BDQX01bU+Vi276N//681AA/vxnuOmm/fOatdzHvt3NAHj2WWjeHJ56Kti/n34Knx81n1vHp9N8\n+wDmzoV77w22be1a6B72xLZlC3TsGAzfcQcMHAgXXBAsP3hwzXGVlwfH97vfDcZ79YJly4LhRYtg\n/HiYEz4z26ULFBXBOwXzeGn+IoadcTYttmWzZMn+5SFoQjrhhNr3R03qe/eRkkINJi+YzK/f+jXF\nnxVTWllKeWV53NeJA/vaBu962HUCtCuEbVmwtx2k74YdmcFdUttPCpJO2yJY9znotBw294JNfWFr\nDxj6C/j4YtjRHU57DVpvgDd/Bbs7Q9f5UHI6NNsZzN91YrBuK4fOy6G4D6TuhYrm0K4AtmdB2m4o\nbwltC4MYRCQhvvj4UO694N4GvUZYSaGRzSmcw/3v3M/corls3buV8spyzAzgyCSMpqoyBSoygmRV\nmQrmwb9lraDFNqhIg8o0SC0Fqwzm7ewKLTcHZ1TNdgTjbdZB2h7Y1w4+Ow4ydgXJMGNXkOw6rYTt\nmZC2L1hfy+IgobUoAU+B4z8I6t4wIKi3Mg3ar4FVw+G4D6G8BaSUB8m0y1J47wZoXxDE3b4AdnYL\nuj3pugA+vBrKm0P7T4KkXN48SKDHvx8OZwb1pO2Dkp5w3JJgXfvaQWmrIP7SNvBZl2C9nZZDu0+D\n9Ww8CzafCSfPhnXZsKcDnPxW8GNgR/dgf2TsCs4o2xfAJbfB4rFB2Y4fw5ZTofDzcPrLwXYtuTrY\ndy03B/NalgQ/KloVw/pBwY+LDquDuvDgx8Wp/4LiXrD9ZChtHezPytRg/5w2IzizrUwNznRLekKv\n/4PVF0GrTcEPg33tgviLewUxp5bCKW8EPzaqtn3nicE6i3sH+2dHNxj0Z/js+CDOohw4/aVg/78/\nLpje81Xo8yxs7A9bTw1iTd0HHdYEP2g29gu2tfWGYNtbbIU1w4LjbRVB/FYJW06DLT2Dv6eUiqCe\ntkXBMSttDZd+OzjGhUOC6ZWpkL4nqOvEheHfSbPgOLUvCJZvvg1abQz+TlZcFow32w79nobF1wTb\n3GVp8De0LSs47nvbB8ey+9xg3uYzg/2zrw0sugH6TQnW3+5TWHZlsH89BTL/A2vOh+Vhp9K9ngvq\ny70HuuWTlpLG7OtmH3JiUFI4guYUziGvII/crFyASPLYsW8H5X5gwqj0yqDdLkwoVcMplhKZ5+5U\nUsPtSSKS9H59/q/58Xk/PqRl6psU0hoclUTkZOYckLVfGP3CYddZdX3DzLjs9MtYsXkFy0uW0yyt\nGfvK99GlVRc6Nu/ICa1PoG3ztjz74bPs2LeDzi07s3PfzkhCSrVU0lLSqKisOGi8tLK0zgQVazy6\nrLtT4UfZPYoiR7G0lLTID9B40JmCHLbJCybz/NLnGXDiANo3a0+nlp14b/17LC1eyt7yveT2yGXH\n3h1A0EMtQF5BHtv2bSNvTR5d23aNPGH+xPtPRLoyP6H1CQw8cWDkfRdVw3OL5rJ251qapTZjV+ku\nSitKKa0oBYMUSyHVUmmR3gKAsooy0lPTyUgNLuSXVpSyt2wvjpOWEvwmqppfWlFKWUUZrTJa0Saj\nDcWfFZMS3hheVQ/A3rK9B50BVolOnukp6RgWia1KRkoYS2UpAC3SWpCemn5AvdWTcHS5z0o/O6D5\nMlbyTk1JJc3SKK8sj2xnaWUplZWVB5yFpqWk0bZZW8oqythTtieyD6tvE0CqpbKvYn9HxqmWSmpK\nao3bHx0bHNjMmmrBcnX96KiaV32/Rc+PtX4g5o8Vw3D2f+c1S21GqqVG6qy+XAopB+yvFFIws4Pi\nrr5cqqVGfjTVNi/W9kfvp6pj5+5kpGbwuW6f0zWF6pQURA5PdHPnoX65JGrZhqhaX6eWnSjZXRJZ\nb11xVJ9f9aPnyt5XcvPZN9e5vlj1Hsq2x2s/KSmIiEiEOsQTEZFDpqQgIiIRSgoiIhKhpCAiIhFK\nCiIiEqGkICIiEUfdLalmVgx80sDFOwObGzGco4G2OTlom5PD4Wzzye7epa5CR11SOBxmll+f+3SP\nJdrm5KBtTg5HYpvVfCQiIhFKCiIiEpFsSWFyogNIAG1zctA2J4e4b3NSXVMQEZHaJduZgoiI1EJJ\nQUREIpIiKZjZcDNbbmarzGxCouNpLGaWaWazzGypmS0xs++E0zua2b/MbGX4b4dwupnZI+F+WGxm\ngxK7BQ1nZqlm9p6ZvRyO9zCzeeG2PWtmGeH0ZuH4qnB+ViLjbigza29mz5nZR2a2zMxyjvXjbGbf\nC/+uPzSzZ8ys+bF2nM3sr2a2ycw+jJp2yMfVzK4Ny680s2sPJ6ZjPimYWSowCbgE6A2MMbPeiY2q\n0ZQD33f33sC5wLfCbZsAzHT3nsDMcByCfdAz/NwMPHbkQ2403wGWRY3fBzzk7qcBW4Ebw+k3AlvD\n6Q+F5Y5GvwNed/czgbMItv2YPc5m1g24Dch2975AKjCaY+84/w0YXm3aIR1XM+sI/Aw4BxgM/Kwq\nkTRI1Yvij9UPkAPMiBr/MfDjRMcVp239B3ARsBw4MZx2IrA8HP4jMCaqfKTc0fQBuof/Wc4HXiZ4\n2eVmIK36MQdmADnhcFpYzhK9DYe4ve2ANdXjPpaPM9ANKAQ6hsftZeBLx+JxBrKADxt6XIExwB+j\nph9Q7lA/x/yZAvv/uKoUhdOOKeHp8kBgHnC8u68PZ20Ajg+Hj5V98TDwQ4i8PLcTsM098uLk6O2K\nbHM4f3tY/mjSAygGHg+bzP5sZq04ho+zu68FHgA+BdYTHLcFHNvHucqhHtdGPd7JkBSOeWbWGnge\n+K6774ie58FPh2PmvmMzuwzY5O4LEh3LEZQGDAIec/eBwGfsb1IAjsnj3AEYQZAQuwKtOLiZ5ZiX\niOOaDElhLZAZNd49nHZMMLN0goTwlLv/Xzh5o5mdGM4/EdgUTj8W9sUQ4AozKwCmEjQh/Q5ob2Zp\nYZno7Ypsczi/HVByJANuBEVAkbvPC8efI0gSx/JxvhBY4+7F7l4G/B/BsT+Wj3OVQz2ujXq8kyEp\nzAd6hnctZBBcrJqe4JgahZkZ8Bdgmbs/GDVrOlB1B8K1BNcaqqaPC+9iOBfYHnWaelRw9x+7e3d3\nzyI4lm+6+1hgFjAqLFZ9m6v2xaiw/FH1i9rdNwCFZnZGOOkCYCnH8HEmaDY618xahn/nVdt8zB7n\nKId6XGcAF5tZh/AM6+JwWsMk+iLLEbqQcymwAvgY+Emi42nE7foCwanlYmBR+LmUoC11JrASeAPo\nGJY3gjuxPgY+ILizI+HbcRjbnwu8HA6fArwLrAL+F2gWTm8ejq8K55+S6LgbuK0DgPzwWL8IdDjW\njzPwc+Aj4EPgSaDZsXacgWcIrpmUEZwR3tiQ4wrcEG77KuD6w4lJ3VyIiEhEMjQfiYhIPSkpiIhI\nhJKCiIhEKCmIiEiEkoKIiEQoKYiEzKzCzBZFfRqtR10zy4ruCVOkqUqru4hI0tjj7gMSHYRIIulM\nQaQOZlZgZveb2Qdm9q6ZnRZOzzKzN8O+7Wea2Unh9OPN7AUzez/8fD6sKtXM/hS+I+CfZtYiLH+b\nBe/EWGxmUxO0mSKAkoJItBbVmo+ujpq33d37Ab8n6KUV4FHg7+7eH3gKeCSc/gjwb3c/i6CPoiXh\n9J7AJHfvA2wDrgynTwAGhvXcEq+NE6kPPdEsEjKzXe7eOsb0AuB8d18ddkC4wd07mdlmgn7vy8Lp\n6929s5kVA93dfV9UHVnAvzx4cQpm9iMg3d1/aWavA7sIuq940d13xXlTRWqkMwWR+vEahg/Fvqjh\nCvZf0/syQZ82g4D5Ub2AihxxSgoi9XN11L9zwuH/EPTUCjAWeCscngmMh8i7pNvVVKmZpQCZ7j4L\n+BFBl88Hna2IHCn6RSKyXwszWxQ1/rq7V92W2sHMFhP82h8TTvs2wdvQfkDwZrTrw+nfASab2Y0E\nZwTjCXrCjCUVmBImDgMecfdtjbZFIodI1xRE6hBeU8h2982JjkUk3tR8JCIiETpTEBGRCJ0piIhI\nhJKCiIhEKCmIiEiEkoKIiEQoKYiISMT/B/n8F4P6CaSBAAAAAElFTkSuQmCC\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
@@ -454,11 +472,11 @@
"## Look closer at the data\n",
"The graph shows the _loss_ (or the difference between the model's predictions and the actual data) for each epoch. There are several ways to calculate loss, and the method we have used is _mean squared error_. There is a distinct loss value given for the training and the validation data.\n",
"\n",
- "As we can see, the amount of loss rapidly decreases over the first 25 epochs, before flattening out. This means that the model is improving and producing more accurate predictions!\n",
+ "As we can see, the amount of loss rapidly decreases over the first 50 epochs, before flattening out. This means that the model is improving and producing more accurate predictions!\n",
"\n",
"Our goal is to stop training when either the model is no longer improving, or when the _training loss_ is less than the _validation loss_, which would mean that the model has learned to predict the training data so well that it can no longer generalize to new data.\n",
"\n",
- "To make the flatter part of the graph more readable, let's skip the first 50 epochs:"
+ "To make the flatter part of the graph more readable, let's skip the first 100 epochs:"
]
},
{
@@ -466,7 +484,7 @@
"metadata": {
"id": "Zo0RYroFZYIV",
"colab_type": "code",
- "outputId": "e6841332-0541-44bb-a186-ae5b46781e51",
+ "outputId": "69322f09-01af-4c63-b33b-934acecc9e7d",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 295
@@ -474,7 +492,7 @@
},
"source": [
"# Exclude the first few epochs so the graph is easier to read\n",
- "SKIP = 50\n",
+ "SKIP = 100\n",
"\n",
"plt.plot(epochs[SKIP:], loss[SKIP:], 'g.', label='Training loss')\n",
"plt.plot(epochs[SKIP:], val_loss[SKIP:], 'b.', label='Validation loss')\n",
@@ -489,7 +507,7 @@
{
"output_type": "display_data",
"data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZIAAAEWCAYAAABMoxE0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi40LCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcv7US4rQAAIABJREFUeJzsnXl4lNXZuO9nZhJQQbGRFpFAcKkC\nRhYjGgEJov1A0Wqx1q3giqJYqbV1aVWq9odrxQUVKiKpWvWTT9xArUDYDPsiRURRIomCQiooAknm\nfZ/fH2dmMjOZJJNkJpkk576uXJl3O+95t+c5z3LOEVXFYrFYLJb64mnqClgsFouleWMVicVisVga\nhFUkFovFYmkQVpFYLBaLpUFYRWKxWCyWBmEVicVisVgahFUkliZHRLwiskdEuiZy36ZERI4WkYTn\n1ovIGSJSFLa8SUQGxbNvPc71rIjcUd/jayj3PhF5PtHlWpoOX1NXwNL8EJE9YYsHAmWAE1i+VlVf\nrEt5quoA7RK9b2tAVY9NRDkicjVwmarmhZV9dSLKtrR8rCKx1BlVDQnyQIv3alX9oLr9RcSnqv7G\nqJvFYml8rGvLknACrotXRORfIvIDcJmI5IrIUhHZJSLbRORxEUkL7O8TERWRrMDyC4Htc0TkBxEp\nFJHudd03sH24iHwqIrtF5AkRWSIil1dT73jqeK2IbBaR70Tk8bBjvSLyqIiUisgXwLAa7s+fReTl\nqHWTReTvgd9Xi8jGwPV8HrAWqiurRETyAr8PFJF/Buq2ATgxat+/iMgXgXI3iMi5gfXZwJPAoIDb\ncGfYvZ0Qdvx1gWsvFZFZInJ4PPemNkTk/EB9donIPBE5NmzbHSLytYh8LyKfhF3rKSKyOrD+GxF5\nKN7zWZKAqto/+1fvP6AIOCNq3X1AOXAOprFyAHAScDLGCj4S+BQYF9jfByiQFVh+AdgJ5ABpwCvA\nC/XY96fAD8AvA9tuBiqAy6u5lnjq+AZwCJAF/Dd47cA4YAPQBcgAFprPK+Z5jgT2AAeFlf0tkBNY\nPiewjwCnA/uAEwLbzgCKwsoqAfICvx8GCoBDgW7Ax1H7XggcHngmlwTq8LPAtquBgqh6vgBMCPz+\nRaCOfYC2wFPAvHjuTYzrvw94PvC7R6Aepwee0R3ApsDvXsCXQKfAvt2BIwO/VwAXB363B05u6m+h\nNf9Zi8SSLBar6luq6qrqPlVdoarLVNWvql8AU4HBNRz/mqquVNUK4EWMAKvrviOAtar6RmDboxil\nE5M46zhRVXerahFGaAfPdSHwqKqWqGopcH8N5/kC+A9GwQGcCXynqisD299S1S/UMA+YC8QMqEdx\nIXCfqn6nql9irIzw876qqtsCz+QlTCMgJ45yAS4FnlXVtaq6H7gNGCwiXcL2qe7e1MRFwJuqOi/w\njO7HKKOTAT9GafUKuEe3BO4dmAbBMSKSoao/qOqyOK/DkgSsIrEki+LwBRE5TkTeEZHtIvI9cA9w\nWA3Hbw/7vZeaA+zV7ds5vB6qqpgWfEzirGNc58K0pGviJeDiwO9LAsvBeowQkWUi8l8R2YWxBmq6\nV0EOr6kOInK5iKwLuJB2AcfFWS6Y6wuVp6rfA98BR4TtU5dnVl25LuYZHaGqm4A/YJ7DtwFXaafA\nrlcAPYFNIrJcRM6K8zosScAqEkuyiE59nYJphR+tqgcDd2FcN8lkG8bVBICICJGCL5qG1HEbkBm2\nXFt68qvAGSJyBMYyeSlQxwOA14CJGLdTB+D9OOuxvbo6iMiRwNPAWCAjUO4nYeXWlqr8NcZdFiyv\nPcaF9lUc9apLuR7MM/sKQFVfUNUBGLeWF3NfUNVNqnoRxn35CDBTRNo2sC6WemIViaWxaA/sBn4U\nkR7AtY1wzreBfiJyjoj4gJuAjkmq46vAeBE5QkQygFtr2llVtwOLgeeBTar6WWBTGyAd2AE4IjIC\nGFqHOtwhIh3E9LMZF7atHUZZ7MDo1GswFkmQb4AuweSCGPwLuEpEThCRNhiBvkhVq7Xw6lDnc0Uk\nL3DuP2LiWstEpIeIDAmcb1/gz8VcwG9F5LCABbM7cG1uA+tiqSdWkVgaiz8AozFCYgomKJ5UVPUb\n4DfA34FS4ChgDabfS6Lr+DQmlrEeEwh+LY5jXsIEz0NuLVXdBfweeB0TsL4AoxDj4W6MZVQEzAHy\nw8r9CHgCWB7Y51ggPK7wb+Az4BsRCXdRBY9/F+Niej1wfFdM3KRBqOoGzD1/GqPkhgHnBuIlbYAH\nMXGt7RgL6M+BQ88CNorJCnwY+I2qlje0Ppb6IcZtbLG0fETEi3GlXKCqi5q6PhZLS8FaJJYWjYgM\nC7h62gB3YrJ9ljdxtSyWFoVVJJaWzkDgC4zb5H+A81W1OteWxWKpB9a1ZbFYLJYGYS0Si8VisTSI\nVjFo42GHHaZZWVlNXQ2LxWJpVqxatWqnqtaUMg+0EkWSlZXFypUrm7oaFovF0qwQkdpGaACsa8ti\nsVgsDcQqEovFYrE0CKtILBaLxdIgWkWMxGKxNC4VFRWUlJSwf//+pq6KJQ7atm1Lly5dSEurbqi1\nmrGKxGKxJJySkhLat29PVlYWZtBlS6qiqpSWllJSUkL37t1rPyAG1rVlsVgSzv79+8nIyLBKpBkg\nImRkZDTIerSKxGJpJAoLYeJE8781YJVI86Ghz8q6tiyWRqCwEIYOhfJySE+HuXMhN7epa2WxJIak\nWiSBkVc3ichmEbktxvbTRGS1iPhF5IKobQ+KyAYR2Sgijwdmt0NEThSR9YEyQ+stllSmoMAoEccx\n/wsKmrpGLZvS0lL69OlDnz596NSpE0cccURoubw8vmlLrrjiCjZt2lTjPpMnT+bFF19MRJUZOHAg\na9euTUhZjU3SLJLA3A+TgTMxczCvEJE3VfXjsN22ApcDt0QdeyowADghsGoxMBgowEyAcw1mUp7Z\nmIlw5iTrOiyWRJCXZyyRoEWSl9fUNWrZZGRkhITyhAkTaNeuHbfcEiFmUFVUFY8ndnt6+vTptZ7n\nhhtuaHhlWwDJtEj6A5tV9YvAzGUvY+amDqGqRYGZ26KnyFSgLWbK0TZAGmbmtsOBg1V1qZphi/OB\n85J4DRZLQsjNNe6se++1bq3qKCwuZOKiiRQWJy+ItHnzZnr27Mmll15Kr1692LZtG2PGjCEnJ4de\nvXpxzz33hPYNWgh+v58OHTpw22230bt3b3Jzc/n2228B+Mtf/sKkSZNC+992223079+fY489lg8/\n/BCAH3/8kZEjR9KzZ08uuOACcnJyarU8XnjhBbKzszn++OO54447APD7/fz2t78NrX/88ccBePTR\nR+nZsycnnHACl112WcLvWTwkM0ZyBFActlwCnBzPgapaKCLzMVN6CvCkqm4UkZxAOeFlHhGrDBEZ\nA4wB6Nq1a91rb7EkmNxcq0Cqo7C4kKH5Qyl3ykn3pjN31FxyM5Nzsz755BPy8/PJyckB4P777+cn\nP/kJfr+fIUOGcMEFF9CzZ8+IY3bv3s3gwYO5//77ufnmm3nuuee47bYq3npUleXLl/Pmm29yzz33\n8O677/LEE0/QqVMnZs6cybp16+jXr1+N9SspKeEvf/kLK1eu5JBDDuGMM87g7bffpmPHjuzcuZP1\n69cDsGvXLgAefPBBvvzyS9LT00PrGpuUzNoSkaOBHkAXjKI4XUQG1aUMVZ2qqjmqmtOxY62DV1os\nliakoKiAcqccRx3KnXIKigqSdq6jjjoqpEQA/vWvf9GvXz/69evHxo0b+fjjj6scc8ABBzB8+HAA\nTjzxRIqKimKW/atf/arKPosXL+aiiy4CoHfv3vTq1avG+i1btozTTz+dww47jLS0NC655BIWLlzI\n0UcfzaZNm/jd737He++9xyGHHAJAr169uOyyy3jxxRfr3aGwoSRTkXwFZIYtdwmsi4fzgaWqukdV\n92BiILmB47vUs0yLxZKi5GXlke5Nxyte0r3p5GXlJe1cBx10UOj3Z599xmOPPca8efP46KOPGDZs\nWMz+FOnp6aHfXq8Xv98fs+w2bdrUuk99ycjI4KOPPmLQoEFMnjyZa6+9FoD33nuP6667jhUrVtC/\nf38cx0noeeMhmYpkBXCMiHQXkXTgIuDNOI/dCgwWEZ+IpGEC7RtVdRvwvYicEsjWGgW8kYzKWyyW\nxiM3M5e5o+Zy75B7k+rWiub777+nffv2HHzwwWzbto333nsv4ecYMGAAr776KgDr16+PafGEc/LJ\nJzN//nxKS0vx+/28/PLLDB48mB07dqCq/PrXv+aee+5h9erVOI5DSUkJp59+Og8++CA7d+5k7969\nCb+G2khajERV/SIyDngP8ALPqeoGEbkHWKmqb4rIScDrwKHAOSLyV1XtBbwGnA6sxwTe31XVtwJF\nXw88DxyAsVRsxpbF0gLIzcxtNAUSpF+/fvTs2ZPjjjuObt26MWDAgISf48Ybb2TUqFH07Nkz9Bd0\nS8WiS5cu3HvvveTl5aGqnHPOOZx99tmsXr2aq666ClVFRHjggQfw+/1ccskl/PDDD7iuyy233EL7\n9u0Tfg210SrmbM/JyVE7sZXF0nhs3LiRHj16NHU1UgK/34/f76dt27Z89tln/OIXv+Czzz7D50ut\n/uCxnpmIrFLVnGoOCZFaV2KxWCwtjD179jB06FD8fj+qypQpU1JOiTSUlnU1FovFkmJ06NCBVatW\nNXU1kkpKpv9aLBaLpflgFYnFYrFYGoRVJBaLxWJpEFaRWCwWi6VBWEVisVhaHEOGDKnSuXDSpEmM\nHTu2xuPatWsHwNdff80FF1wQc5+8vDxq604wadKkiI6BZ511VkLGwZowYQIPP/xwg8tJNFaRWCyW\nFsfFF1/Myy+/HLHu5Zdf5uKLL47r+M6dO/Paa6/V+/zRimT27Nl06NCh3uWlOlaRWCyWlCCRUxFf\ncMEFvPPOO6FJrIqKivj6668ZNGhQqF9Hv379yM7O5o03qo6yVFRUxPHHHw/Avn37uOiii+jRowfn\nn38++/btC+03duzY0BD0d999NwCPP/44X3/9NUOGDGHIkCEAZGVlsXPnTgD+/ve/c/zxx3P88ceH\nhqAvKiqiR48eXHPNNfTq1Ytf/OIXEeeJxdq1aznllFM44YQTOP/88/nuu+9C5w8OKx8cLHLBggWh\nib369u3LDz/8UO97G5Pg5C4t+e/EE09Ui8XSeHz88cd12v/DD1UPOEDV6zX/P/yw4XU4++yzddas\nWaqqOnHiRP3DH/6gqqoVFRW6e/duVVXdsWOHHnXUUeq6rqqqHnTQQaqqumXLFu3Vq5eqqj7yyCN6\nxRVXqKrqunXr1Ov16ooVK1RVtbS0VFVV/X6/Dh48WNetW6eqqt26ddMdO3aE6hJcXrlypR5//PG6\nZ88e/eGHH7Rnz566evVq3bJli3q9Xl2zZo2qqv7617/Wf/7zn1Wu6e6779aHHnpIVVWzs7O1oKBA\nVVXvvPNOvemmm1RV9fDDD9f9+/erqup3332nqqojRozQxYsXq6rqDz/8oBUVFVXKjvXMMMNZ1Spj\nrUVisVianGRMRRzu3gp3a6kqd9xxByeccAJnnHEGX331Fd9880215SxcuDA0YdQJJ5zACSecENr2\n6quv0q9fP/r27cuGDRtqHZBx8eLFnH/++Rx00EG0a9eOX/3qVyxatAiA7t2706dPH6DmoerBzI+y\na9cuBg8eDMDo0aNZuHBhqI6XXnopL7zwQqgH/YABA7j55pt5/PHH2bVrV8J71ltFYrFYmpzgVMRe\nb+KmIv7lL3/J3LlzWb16NXv37uXEE08E4MUXX2THjh2sWrWKtWvX8rOf/Szm0PG1sWXLFh5++GHm\nzp3LRx99xNlnn12vcoIEh6CHhg1D/84773DDDTewevVqTjrpJPx+P7fddhvPPvss+/btY8CAAXzy\nySf1rmcsrCKxWCxNTjKmIm7Xrh1DhgzhyiuvjAiy7969m5/+9KekpaUxf/58vvzyyxrLOe2003jp\npZcA+M9//sNHH30EmCHoDzroIA455BC++eYb5sypHIi8ffv2MeMQgwYNYtasWezdu5cff/yR119/\nnUGD6jRnHwCHHHIIhx56aMia+ec//8ngwYNxXZfi4mKGDBnCAw88wO7du9mzZw+ff/452dnZ3Hrr\nrZx00kkJVyR2rC2LxZISJGMq4osvvpjzzz8/IoPr0ksv5ZxzziE7O5ucnByOO+64GssYO3YsV1xx\nBT169KBHjx4hy6Z379707duX4447jszMzIgh6MeMGcOwYcPo3Lkz8+fPD63v168fl19+Of379wfg\n6quvpm/fvjW6sapjxowZXHfddezdu5cjjzyS6dOn4zgOl112Gbt370ZV+d3vfkeHDh248847mT9/\nPh6Ph169eoVme0wUdhh5i8WScOww8s2Phgwjb11bFovFYmkQVpFYLBaLpUFYRWKxWJJCa3CbtxQa\n+qysIrFYLAmnbdu2lJaWWmXSDFBVSktLadu2bb3LsFlbFosl4XTp0oWSkhJ27NjR1FWxxEHbtm3p\n0qVLvY+3isRisSSctLQ0unfv3tTVsDQS1rVlsVgslgaRVEUiIsNEZJOIbBaR22JsP01EVouIX0Qu\nCFs/RETWhv3tF5HzAtueF5EtYdv6JPMaLBaLxVIzSXNtiYgXmAycCZQAK0TkTVUNH9VsK3A5cEv4\nsao6H+gTKOcnwGbg/bBd/qiq9Z8swGKxWCwJI5kxkv7AZlX9AkBEXgZ+CYQUiaoWBba5NZRzATBH\nVffWsI/FYrFYmohkuraOAIrDlksC6+rKRcC/otb9TUQ+EpFHRaRNrINEZIyIrBSRlTZzxGKxWJJH\nSgfbReRwIBsIn3z5duA44CTgJ8CtsY5V1amqmqOqOR07dkx6XS0Wi6W1kkxF8hWQGbbcJbCuLlwI\nvK6qFcEVqrotMHlXGTAd40KzWCwWSxORTEWyAjhGRLqLSDrGRfVmHcu4mCi3VsBKQUQEOA/4TwLq\narFYLJZ6kjRFoqp+YBzGLbUReFVVN4jIPSJyLoCInCQiJcCvgSkisiF4vIhkYSyaBVFFvygi64H1\nwGHAfcm6BovFYrHUjp2PxGKxWCwxsfORWCwWi6VRsIrEYrFYLA3CKhKLxWKxNAirSCwWiyXFKCyE\niRPN/+aAHUbeYmnGFBZCQQHk5UFublPXxpIICgth6FAoL4f0dJg7N/WfrVUkFkszpTkKnETTEhVp\nQYF5po5j/hcUpP61WUVisTRTmqPASSQtVZHm5ZnrCV5XXl5T16h2rCKxWJopzVHgJJKWqkhzc41S\nbE6WllUkFkszpTkKnETSkhVpbm7zep5WkVgszZjmJnASSWtXpKmEVSQWi6XZkixF2hKD+MnEKhKL\nxWIJIxjELysDjwcmT4YxY5q6VqmN7ZDYSDS3DkYWS2uloMAoEdcFvx/GjbPfbW1Yi6QRaKlpipbm\ni3XdVE9enrFEXNcsO07LyQhLFtYiaQRipSlaLE1FsGFz553mv21tR5Kba9xZaWlGobRp07IywpKB\nVSR1pD4uqmCaotfb8tIUG5tEuAhbu5vRNmxqZ8wYWLAA7rvPehDiwbq26kB9XVQ2TTExJMJFaN2M\nLbv/RSJpzanVdcUqkjrQkJ609qVsOInoydxSe0PXBduwsSQaq0jqgG3JNS2JuP/2GRpsw8aSSKwi\nqQO2Jde0JOL+22dosSQeUdWmrkPSycnJ0ZUrVzZ1NSwWi6VZISKrVDWntv1s1pbFYrFYGkRSFYmI\nDBORTSKyWURui7H9NBFZLSJ+EbkgbP0QEVkb9rdfRM4LbOsuIssCZb4iIunJvIa60tpTSy0WS+sj\naYpERLzAZGA40BO4WER6Ru22FbgceCl8parOV9U+qtoHOB3YC7wf2PwA8KiqHg18B1yVrGuoK7aj\nl8ViaY0k0yLpD2xW1S9UtRx4Gfhl+A6qWqSqHwFuDeVcAMxR1b0iIhjF8lpg2wzgvMRXvX7Yjl4W\ni6U1kkxFcgRQHLZcElhXVy4C/hX4nQHsUlV/bWWKyBgRWSkiK3fs2FGP09Yd24PdYrG0RlI6/VdE\nDgeygffqeqyqTgWmgsnaSnDVYmJTSy2W5o8d0LLuJFORfAVkhi13CayrCxcCr6tqRWC5FOggIr6A\nVVKfMpOK7ehlsTRf7BA69SOZrq0VwDGBLKt0jIvqzTqWcTGVbi3UdHqZj4mbAIwG3khAXWvFZmNZ\nLC0fG+esH0mzSFTVLyLjMG4pL/Ccqm4QkXuAlar6poicBLwOHAqcIyJ/VdVeACKShbFoFkQVfSvw\nsojcB6wBpiXrGoLYVorF0jqwQ+jUj6TGSFR1NjA7at1dYb9XYNxTsY4tIkYgXVW/wGSENRp2oL/U\nJejPzsiA0lLr17Y0DBvnrB8pHWxPFWwrJTUJn1vbdSsnIbIWo6Uh2Dhn3bFDpMRBsJVy771WSKUS\nQUsxOCWq61q/dkvFxihTG2uRxIltpaQeQUsx3CKxFmPLw8YoUx+rSBqRROSn2xz3SsL92YmKkdj7\nm3rYGGXqYxVJI2GniU0OibQU7f1NTWyMMvWxMZJGIhH56TbHPbnY+5ua2Bhl6mMtkkYivFXl9cLW\nraYFXJePwrbMkkuq39/W7HazMcrUxs6Q2IgUFkJ+PkyfDn5//dwnrVmYNAapen+t283SFMQ7Q6K1\nSBqR3FwjpPz++gcObcssuaTq/bUBZ0sqY2MkjYwdat5SH+x7Y0llrEXSyLTmIRjicRsl2rWUqq6q\nutKa3xtL6mNjJJZGIR4ff6LjAKkYV2gpis3SOog3RmJdW3Fgh2doOPGk1iY6/TbV0nmDiu3OO83/\nZLxP9l21NAXWtVULqdiqbY7Ek1qb6PTb6PIyMoyQbSprINkBc/uuWpoKq0hqoTGyZVqDuyMeH3+i\n4wDRQ6iMH9+0QjbZ/VRSIbOrNbzLlqpYRVILyf74W1MrMp7U2kSn3wbLmzix6YVssgPm9X1XEyX8\nW9O7bInEKpJaSPbHnwqtyNZAqvRaT2Y/lfq8q4kU/vZdbr1YRRIHyfz4U0XAtXSS1SBINVdOXd/V\nRAp/+y63XqwiaWJs/4DGI9ENgpbgykmk8LfvcuvFKpIUIFWH5Whp1GQ91MeyaAmunGQkODS3e2Bp\nOHEpEhE5CihR1TIRyQNOAPJVdVcyK2exxEttiqAm66G+lkVLceVY4W9pKPF2SJwJOCJyNDAVyARe\nqu0gERkmIptEZLOI3BZj+2kislpE/CJyQdS2riLyvohsFJGPRSQrsP55EdkiImsDf33ivAZLEkiF\nDnDxdPSrqXNifTsu2nkyEkMqvEOWhhGva8tVVb+InA88oapPiMiamg4QES8wGTgTKAFWiMibqvpx\n2G5bgcuBW2IUkQ/8TVX/LSLtADds2x9V9bU4654QUi2omgqkSowgHhdTTdZDQywL25pvGKnyDiWD\n1iQz4lUkFSJyMTAaOCewLq2WY/oDm1X1CwAReRn4JRBSJKpaFNgWriQQkZ6AT1X/HdhvT5z1TAot\n+WVvCKkSI4hHEdQUC4jeBk3bA741UdM71JwFcWuTGfEqkiuA6zAWwhYR6Q78s5ZjjgCKw5ZLgJPj\nPN/PgV0i8n9Ad+AD4DZVdQLb/yYidwFzA+vLogsQkTHAGICuXbvGedrYpIrATDVSJUYQb8C4Jush\nuK21CYCmprp3qLk/h9YmM+JSJAF31O8ARORQoL2qPpDkeg0C+mLcX69gXGDTgNuB7UA6Jl5zK3BP\njDpPDWwnJyenQUMcp4rATDVSKd0zUS6m1iYAmprq3qHm/hxam8yIN2urADg3sP8q4FsRWaKqN9dw\n2FeYoHyQLoF18VACrA1zi80CTgGmqeq2wD5lIjKd2PGVhJJKAjORJMJ10NJiBK1NAKQCsd6h5v4c\nWqrMqI54XVuHqOr3InI1Ju33bhH5qJZjVgDHBNxgXwEXAZfEeb4VQAcR6aiqO4DTgZUAInK4qm4T\nEQHOA/4TZ5kNoqUJzObuOkgWrU0ApCqp/hziaYS1NJlRE/EqEp+IHA5cCPw5ngMCWV7jgPcAL/Cc\nqm4QkXuAlar6poicBLwOHAqcIyJ/VdVequqIyC3A3IDCWAX8I1D0iyLSERBgLSZ2k9KkYtCwubsO\nkklrEgCpTKo+B9sIq0q8iuQejEJYoqorRORI4LPaDlLV2cDsqHV3hf1egXF5xTr235iOj9HrT4+z\nzilBqr50iXAdpKKCtLQumuIdbGgjrCV+N/EG2/8X+N+w5S+AkcmqVEsiVVv+DXUdpKqCbApaomBo\nCup6H5vqHWxII6ylfjfxBtu7AE8AAwKrFgE3qWpJsiqWitRHYKRy0LAhroNUVZCNSWEh5OfD9Ong\n97cswdDY1EfANtU72JBGWEv9buJ1bU3HDIny68DyZYF1ZyajUqlIfVsSqR40rC+prCAbg+D7sH8/\naCC5vCUJhsamPgK2Kd/B+jbCWup3E68i6aiq08OWnxeR8cmoUKpSUABlZeC65n9dBEaygoZN6VJp\nqQoyXoKCL6hERFqWYGhs6iNgm+M72BzrHA/xKpJSEbkM+Fdg+WKgNDlVSk0yMowSAfM/I6Np65MK\nvtZUzappDMIFn9cLV14Jo0a13vtRE/GmytZHwNb0DqZq7Kq+302qXg/Er0iuxMRIHgUU+BDT07zV\nUFoKHo9RIh6PWW5KWqqvtSmpy4famC3LVBYgtVGXBk8iGyap0NBKJKl+PfFmbX2J6dkeIuDampSM\nSqUieXnQpk3ifZv1FRIt1dfaVNTnQ61N8CVCAaS6AKmNpmrwNNeGVnXvTKpfT0NmSLyZVqRIEtkC\nDb4sGRkwfnz9hERL9bU2FYn8UBOZzRWrXuvXw8yZMHIkjBlTvzo2Fk3V4GmODa2aGg2pfj0NUSSS\nsFo0ExJhek+dCuPGGcEQdJW5bv2EV2uOUSSC8NZfbR9qvNZForO5ouu1axfccYfZ9v775n8qK5Om\navA0x4ZWTY2ZVL+ehiiSBo2o2xwoLC6koKiAjNIRlG7MToglcsMNppUKRtD4fMnL+GnOvvVkE6v1\nV92HWhf3UqKzuaIFyIQJkdtnzmx6RVLbe9ZUDZ7GcD0mktoaM6nccKxRkYjID8RWGAIckJQapQiF\nxYUMzR9KWVE/3Bk34XGVNul2zFsRAAAgAElEQVRSrRCJ56UsKKjM/AKjRJ580gTuE/0yN3fferKJ\n1fq7/fbY96gubq9kZHOFC5CRIystkeByU9Jc37NUrHddrI5UU4I1KhJVbd9YFUk1CooKKHfKcbcM\nAn86rkq1QiTelzIYsC8rM26tJ59MXmsy1YNzQZrqg6iLz7ku+ybbBRF8X1IlRtJc3rNoUrXe8Vgd\nqagEG+LaatHkZeWR7k2nrPsiXF85HtdLerrEFCLxvpT1FTItbWiWIE35QcR6FtXd57o+t2S7IMaM\naXoFEiQZ71ljNC6aw/dRHamoBK0iqYbczFzmjppLQVEBu059n7VLOzByeAa5udlV9q1ri7UuD72h\nQ7Pk58d/rsamqT+I8GdR231OZf90U5JoC6yxGhepHryuiVRUglaR1EBuZi7rv13P3SUX4ndPYt5T\nQwHIPnEPBUUF5GXlkZuZm9SXsqHCdsYMc9yMGalhAoeTl2fiCK5r/jflB9HUSq05k0glW1BQ/6GI\n6kpzbRykohK0iqQGpq6ayth3xuJu7Q8z/o3fSef6BS7ey3+Bc8Ri0r3pzB01N6RMkvFAG9L6aA7C\nUSTyf3Uk292Riq281kiqDUWUqqSaErSKpBoKiwu5YfYNuOpCUR446aA+HL8f9/NT0c4LKCvqx4T7\nyphwefLiHQ1pfaS6cCwoMKnQquZ/dYquMdwdjdnKS7WMm1Qi1YYissSHVSTVUFBUgOM6ZiGrALzl\n4Adw0QN2QHEu7oz3+UAPYNE/4xNuDYl31EfgpKIJHE5Q0ZWVGYukutZnY1lWjdHKS8WMm1QiWUMR\nWZKLp6krkKrkZeXh9XjNQuZSGHYTeFxQD7z7GKz7LTjpuI5JC86f9SUTF02ksLiw2jJjCcT6UFgI\nEyea/7WRm1t9/4imJjcXJk2qjJOMHx/7moIKx+ttHOFSl/tbVxL1DrRUgo2fe++1ShaS+y4mEmuR\nVENuZi6Tz5rM9e9cj6MO7DvMKBH1gV9gz8/AW464gi8Nnts1Gv+8RXg8HiafNZkxJ1bNz4zX1VST\n66OltWhLS2sfJqahllVdXEnJvr/VvQPW3VVJoi3D5npvm9O3bhVJDWT/NBufx4fjOMa95fGD4wU8\n8NlZ+EbczNXH3sr2jq8w68cFALiuy7jZ48j+aTa5mZFPvTaBGM9gf80hgF4X4nVv1Ve41PVjTPb9\nra7/SnMRGM2N5nxvm9O3bl1bNVBQVIDfDQyMlbkU+k4HXEDA9TKiy+WMGvc1s/ffGXGcow4FRQUx\nywy6miDSZA2+8FOmGKFaneujsd08ySZe91aQupr6dXUlNcb9jXY3WndX8mjO97Y5fetJtUhEZBjw\nGOAFnlXV+6O2n4YZiv4E4CJVfS1sW1fgWSATM97XWapaJCLdgZeBDGAV8FtVLU9G/UO92/1luLjQ\nOx/WjgYnDbwVLOCv/HfuHiqcisprQmjjbUNeVl5o0Mdgf5PwQSDHX5Id0UoKvvC1DfaX6gH0+hCP\newvq17qsa+ZaU9zf6DpmZBhl2VKeb1OS6pmLNdGcvnVRTc4gviLiBT4FzgRKgBXAxar6cdg+WcDB\nwC3Am1GKpAD4m6r+W0TaAa6q7hWRV4H/U9WXReQZYJ2qPl1TXXJycnTlypX1uo7C4kLGvzue5V8v\nNyuKTzHpwAfshO39zLre+ZC5FA8ezj3uXIYfPZw129Ywfe10KpwKRIQBXQewrGQZftePLL4Dd95f\ncR3B6zWBxby8SiHZkqdujeWvjldBTJwId95ZOQT/GWeY0XDjiXuk+seYiDlqLLFpDs8/VRGRVaqa\nU+t+SVQkucAEVf2fwPLtAKo6Mca+zwNvBxWJiPQEpqrqwKj9BNgBdFJVf/Q5qqMhigTg/FfOZ9Yn\nsypXrLwa3nnKBN4BvGVw+RDIXIrP40MQKtyK2IUBFOfi/WcBOOkRwqKlv/A1KYx4rj14fLDns8dj\nUkVbkrANV5bBRkbQFWqxNDbxKpJkxkiOAIrDlksC6+Lh58AuEfk/EVkjIg8FLJwMYJeq+msrU0TG\niMhKEVm5Y8eOel4CTJ21nree7WUsETD/Z08OKBExf06asVIAv+uvXokUnwKLbgMUz+gzueYPX0YI\nwVRO1U0ENfmr47n2oKl/xhmVndaam9+7NpqTX9xiCZKqwXYfMAjj8joJOBK4vC4FqOpUVc1R1ZyO\nHTvWqxKFhTDuouNw5k6AGXOhOJeee64H9WKUiJo/b4XJ6qqJ4lNMGfPuhRlzcdSh64iXamx9N4f8\n8bqQCCGZm2vcWW3atExha/tRWBpKU8iOZAbbv8IEyoN0CayLhxJgrap+ASAis4BTgOeADiLiC1gl\ndSmzzhQUgOP3gQo4im/rGdx0fR/Gv+llf5mLqgPHvgUDHjJZXVF4xYtHPMZCCRtmBUehaDAZB8bO\nda3RBRQVwK8rDT2+ISQqeFjfcpqL6zDVxlGKh+Zyb1s6TZXunExFsgI4JpBl9RVwEXBJHY7tICId\nVXUHcDqwUlVVROYDF2Ayt0YDbyS+6oa8PGiTLpSVK14fPHn9rxlzXja8tJ4bnvpf/F3/jWQu48wj\nz+TAtPN4Y9MbaNiEkhkHZHB538v5fv/3TPvqQyoWlBsl4q3A7TaPcbNXAVC6tzRCsFeXPx6ctbHc\nKY8YMDJeGnp8IkiUkKxrOanWn6CugjeVBXWq3dt4SOX72RCaqu9J0hRJIBg+DngPk/77nKpuEJF7\nMErhTRE5CXgdOBQ4R0T+qqq9VNURkVuAuYEA+yrgH4GibwVeFpH7gDXAtGRdQ2XLV8jLSyM3N5vC\n4kJm/jAB/wAz36kC73/xPqd1O61SiQQyu77NKuDBvQ/ypwF/4qpze7L950+ybMkBbMt4CTKXUuHC\n2HfGIkiEYK8uZTE4a6OjDuVOOQVFBXVSBA09vjkTzwfWWMKlroI31QV1c+o4B8m/n02ppJoq3Tmp\n/UhUdTYwO2rdXWG/V2DcU7GO/Temf0n0+i+A/omtaXyE5nH3l1XZtmbbGvMjGAtx0s1Aj6OH8tCS\nh/CIB6/HS0WfCgizWlw1Y2bv29KH8X/5hknXVe+6CfZrCVoUeVl5dap/rOMbU3g2ZQuwtg+sMYV1\nXQVvqgvq5tZXI5n3s6mVflP1PbFDpNRA9Esx+pHPzDzuxf0r+5LsOwyyCvghGCOpEgvJQzOX4qiD\n67gRrq8QxafAjA9Y7qQz5FWHxx/zUloKGT3WU+B/G4or3V6je48GYFTvUVCSy8QX6jAkfdisj3lZ\neVCS2ygvfVN/XFD7B9aYwrqugjfVBXVN97apGxCxSOb9TAWl3xQxNqtIaiD6paBoMN5dA3FmzAZ/\nOuAFcU0/ktFDTcA9q8BYIk7VbK6YSgQilE9ZmcO4ceC4ius5Cs/od2iTdS+Thk1i/LvjQ9ZEX//1\njL+kHkPSZ+aGlNLEFxrnpU/Ux9VQoVTTB9aYwrqurcZktDITLeBj3dtENyDi7WtU2z7JbLU39D1K\nRcUbD1aR1ED0SzHqvG4wawZT3LZoMHNaveCkIUWn4+22En/mUqNUghZLoH9Jz37f88nOT0KuLA8e\nM+wKhA0IKYCL3/GgroCbhrtlEOWZS5n58cyI+MbMOaUNFs6NJTwTcZ5ooXTjjbB2LYwcCWPGNLyO\nje0SCAreYKpmbees6/410VgWYiJb5/HUuS7XlaxWe0Peo1Sw3OuLVSQ1sH49ZGWZca9uuin4ULsx\n44nI3tVen4e+B40kr8tJTPrqN5QH3VxhsZLDut5DG+8Wyp1yRMQolFgGiriopxzwgqcCshbg8/jo\nc3gf5hfNR1HSvemMHJ7Bon9CWbni8fnJ6PEJkF2n62ss4ZmI84QLpf374cEHzfr3Tc5DwpRJY364\nyQ66h7duofJ3Y7lfEtlQiafOqeBWgvq/R6lS//pgFUk1TJ0K115buTx2rPk/ZkylUMzIgDlz4K23\nvKx8ux/r/92PJ15ayRrfUyzceCofh8VKFi308sfbbqRDmw5kHJjBjXNupMKpwCMenKI8cH2AF9SF\nPs/CIVuNpZK5lHJHeLTwURzXwePxMGnYJMacWJmG7HSbx/gNq8k+sWo6b039RhrTjG6okA4XSqqV\ng1sCzJyZGEXS2CQz6B6udLxe0xgKTk0waVLjWKKJbKjEo5SSZWGHj4NWWpq876Wu9U8lN5hVJNUw\nc2bksuvCuHGQnR3pZrjhBvNhg7FSSjdm8/TtT1OYAYNeL8epqABvBZo1j0cLV7HgcjNviaqiqJk0\n64CdJtaC38RVeuebAgNuMc1cGhp2RVQo3Wsmsi7NeBsd+P9w1aHc8Uak8xYWF5K/Lp/pa6fjd/1V\n+o1MnbXe9Nr3+2iTLilvRocLpV27Ki0SMO6thtBUH2Qyg+7hSscNeFBVzbrS0sZz4yWy31BtdU5W\nLKmxxnerS/1TzQ1mFUk1jBxZ6TYJ4jhm4qlwF0HwIwXT8gt+3Lm58NQrmxg7+WXcbvMgcymOeigo\nKmDr7q2V43EVn2Km7nU9ZirfYTeZ9VEpxMGe8x7xhNJ+w4e5B1j+9XKmrpoaGnm43CkPBfjLnXLy\n1+WbYewPzOCGp0rwl98FajpcFhRIwl/EZAZ0jzrKKPuGxkiaIiAcpDbBEV1WXQRNuNKJtkjCy2tO\nxFPnRF9XUCEHv/PapjpoKPHWP9XcYFaRVENQOE2aBJs2md8+X+XshV4vnHVW5YRMXi88+WTkwxxz\nXjZkFnL9Mz7cRbfjOXIxW3dvZfue7WaH4lOg4G6jMPABfpNOHCOFOKhIMg7IYPy74+l8cGf+dOqf\nuPHkG3loyUO46jLrk1mRoxRHMW3NNFx1ERGcbv3Bexs4ptd+Xl5aQu9fsltMY8Ykxp3V2AHhaKoT\nHNWVFb1/dYorWukErzUV3CDNiaBCDrdIUiEFO9VSwq0iqYGgsAp+rFu3mtiJ6xrBMysgs0XM35w5\nsGZN1Dwiq8bA81ejjuJfUMYUPZO0bivxlAzAnfF+WBqxHzzllenC1aQQb/9xO9t/3A5fwxufvIGI\nVJ9WHMAjHlx1jRsN8KgHX9cV+EefiXyZx+8vOYnc3PPqfH9qir+kWoupOho7IJzIsmpTXNFKJxXv\nf6oTrpCTHSOpb71SoT5WkcRB8IMMKpFoVI2VElQs06fD/Pkm62vsWHBdATzgb4NuOQ1/5lJyym9h\nhdsGxQv44cgPIO+vlYM/BlOIsxbEHBASTL+UeOaTCaYcQ2AGR18bftX+IV4q+ho3az6PljzE929f\nxajeoyJmcgy60ILusPAxwWobtyvVWkzV0dgB4USUFd6waQ7KurmTqm7AmurV2HE/q0jqQGlp5TwY\n4YhEZhGVlZlZ7latCu4bHHLeAwfsxCterjr/KNa9CmVlJhgfVCKC4BEPAwam85MzPuGtT5fjKJUz\nMwYyuWokfBbHQM97yVxGmjeNs44+C0py+dcfr0YrvOD9MxWjhzLFncKMdTOYNGwSN059iYrPT0W6\n/xFv1+U4roOLGxoTbP7o+RHjdu337yd/XX6EImmKFlN9RzZOdEA4P79ux8X66GO5piZOjJxB0es1\n7lZIbWWdaqRStlMyaIpAvFUkdSAvz2RshE+H27evcWm98UakMlm+PPxIxSgTP7Lvpzx51pOMOTGb\nNX/PZ8rMTWjWvIhgukc8LNm6BMC4o6LH7xp2U0hBVFEqwX1DLjMHvOX0vvUWso7dxpzNcygv6GmU\niPrAL7BuFJq5lHKnnEn/u4zy52aDk456y3HDAv2KUuaUkb8un1G9R+H1eHEcB0WZvnZ6yKIJEi6g\nEzl8fayyUmFk4yAzZph3ZMaMhvUNCc8ODO4jUjm/PcA110DXro0jFFuCAE61bKdk0BRuZatI6kCs\nVnZhoWkh1o4DvnI83ReyZtvxFBYXMmrEMcz473Xs9+8PRTlcdXE1akyu8OC7n8AMjZ4qGV0R++ID\nNBSwX7v0ENalPWPKzZoHnjvB8QIeWHMF0ucFpOtyPln5s2oD/dEcfejRfLzzY6AyKyyW8K5OyEe4\n0Epy40t7rKasmkY2bsw5WJLRNyR8H4+nMgsrPT0qHpdEGiqAG2vY/NqOay6xu3Dqei+awq1sFUkd\nCT7I/PxKF0awk1w4Ho/5UwXHUfA4MOwmnC6LmbJqCTPWzWDuqLnMHTU3or+H1+M1c747FZFDqASD\n7xDovOiJLeiD+/ohFMQPBOxDyilzKfSdDivHmH1cLz1+uI5NugzNml8l0O/BE+qNn+ZNo+/hfcmb\nkUe5Ux46raL8Y/U/6Ht4X8acGJlOFUvIAyGF4P1qIJI/F3+F13SYe2k9a3xPAVSxcgqKCigr6oe7\nZRBl3ReFFEZeVh5ejxfXcfF6vKH4TmNYKuGKKi8vt/rYRgyFVp+OdpMm1R70TbTyrE0A19bxtTGG\nzY/nuOYSuwtS30zAxnYrW0VSRwoLzcMpD8hQr9f893iMvzro7lqzBlavhpUrATxmlsV9ZspfRUMt\n+K6HdGVU71GM6j0qIsA9oWACH2z5wATKM5fiufwXuGsvhdVXBuaLDyinsIwuwCiJ8LG+qnOB9c6H\ntaPNfPPeCooOnWHcaMHj140K7aooAzMHst+/n7bbh/DYwwdQflC/yDKLT8EpyuPardN5ceCL9Dys\nZ0gJZByYEcocExEyDsyIUC7O6t/AfgHMkC/XT34FZ+AzAExfO535o+eHhFNG6QjcGTeBPx3XV07G\n2Z+HqiCYMlSV/HVGyyd7DpZYimru3KrWVXUKLRkd7ZKhPGsM/tdyvmRYafU9LtWynWqjvveisRME\nrCKpIwUFUFFRuRzs1e7zwRNPVKYL/+53JugORsmkpXsYPqwDc/a3CVke0b3Obx90e6jcCXkTWLR1\nUejjnHTdKGZO/TkfrEnHRQDHWBWx3E6ZS2sPyIcrnKwC9naK2n/taOPiWjsaHT2UhSwMxF/uCsRq\nRla61aJiOAsZysLMZ5i+djqPD3+c8e+Ox+/6TU9+12H8u+OZNGySGR5m60mw5gqCCQnicXC6zQtV\no8wpY0LBBEa2f5jSjdls3ZqNx1VcFTyul9KN2XAe5K/LD3XArPjyRJ5ZcCjPHXU7T4y5JK45WOrb\ngo+lqG4flFvVPVWDQov+6KfOWs/MOaWMHJ5h+iLF2Aeqd3kkQ3nWJIDDz1fmN89rQt6EWq2u6uqf\n0WM9Ht9xKD58aS5bO7xIYfExtV5DvNZGlb44CbLeCgsrvRR9+1ZajVB/xdVcLCirSOpIXh6kpVVa\nJEFUzYsD5mUKKhGAnBy46iovpaV/YvjxwynNeJutu7fyj9X/wNl6EvuLTif/J5+ROzYsUB01d0hu\nZi7ZlxMaqNGVssqhVMIQaulXEp39FUvhVNchMt7160ZBUR5lWQuY+fFMyor6oVsGGfdaIKi/Ztsa\nY20V5QVcdUY5dhzwNtszCyPq+/7b5/H+mmPwoPi8QppP8APp6UJGj/WMffsppq2ZZq47TKmVLyhn\nTZ/XmDRsEjM/nsnIniNZv6od4y6qiBgahi71b8FXN9lYtHCKd1KyqbPWc+2FR4G/B+9PL4dX14eU\nSTjRY2ldeWVlvKShE6BVR3Wt3PARFlxcPtjyAYu2LqrR6qrOZVNYXMj4DUNxftsPKcrD7b6If+xY\nwoz82p9LfayNulpv1cX1INJTAZVeivBRBeoaW2ouFpRVJHUkN9c81Px82L7dZGwFLZTly80HEk3n\nzpUpm+np2dx414Gs3rgedgnMfhh10pm+ROh7+HpKM94mo3QEpRuzycvLJc+XS8ELQB7QpZDRj3wG\nRYPZ3vEVZn0isOg2JGshZw5ux8ieI1mzbQ0Lv1wYCoJHsPLqmgP1YATx7q5mWHs3ECc5YCcsus38\nj9VRMnysMI9jLAzXZ/ZtPxv3+bMiMs6cfR159/si3PZulflbth/1QGRdZswFfxvAg4vgYDKVOPhL\nPj7oacZ+9FBEP5lopfbxyo5M2/lLHHUo+LIAZ8GfcMrvjhgahoGRLfjwoWSi+85UabmW5DL6+42Q\ntYBRI44BYOzbY5m2ZlrI2gy65sIVWnXCauacUvD3CCRWKDPnlJJ9YtXzhrs8HEeZMgVmzAgoRiLr\nVF0CRE2t8Lq00oONnnB3bG1WV7TLJn/WlxT4X2Lr7q1m8rguS5AuH+JiXJXxWlbx9vwP1aMOSRrh\nSic6rjd6dKSnAkxmXXBdcJyz+gT36+qmaszkkiBWkdSD8Ac7dSpcf31lT/c5c8yQ88GhU9LSoFOn\nsCHQy5QH/5wJ2g04OxDvECoqlBue+l/cbvNwZ9yExzWt72BrxpfmoKNuxzliMekHp3NjxkuQ/wH4\n01FfOSNHfE72T/cw/t3xMacCpvgUo0TcNEAQx8txX9/P5m5nRo77FXRRefxw4rPQabUZCywq9bjL\nCZ/T/qjdfLp2IE74WGHHzIZN54YE+b/fOjhmxlnRgnIYvcWct88M8793fjUZaF5AEVHS04WD+8/i\nkeILQj31w6/R+/2RiA8cx8GXJizx/D+cwPWVO+XQbS54bwdHUY+f5WlPMPzAjFALPuhyDCY7CILX\n4+Xm3Jt5YtkToX2u7HNlYHKxbMrLu5GePoq+h69n/IaTA1l4xioMpksDoYnJFm1dRPZPjZURLagO\nPGY5ePsbxeqroM8puxgy46JQizmolPLyzDthXKseVIWycpf8fE8g/djUaVQfKCyJsgZqaYXH20qP\nFljR7thYllDQ/bN9e2UfGF+aw7Pf/RZn3mK8Hi8+jw9cQoknQYVcnbVXXZ2CM4CWlRnrYPJkM+hq\n+L2IOf10NQOehisd9/MBUC6oW2mFRHsqoi0SX5pTxU0Xb0ZWrE7C1V1/U6TBW0XSQEpLIzsolpXB\nww9Xjr81fjx8/715oczw52omwwoIxyDicYwS2TLIBJJVqAgbtdVV4PMBaOcFlDvlrF3aAY97QESs\noCBjomnN4eLBQ07nHDq370zR+sNZW3CuEfaBWIQqbJ47iN//+lXe2nsHG3dujGzNu2qGst93WKTb\nat9hMOh+SgDZKXiK7kCCPfS1Atp9E2FhaI//hS8HmmVRYw2FucA8665AnTTUU9VVJ1kL8aS5uH4H\nnw/OvnAH9M7nkeI7KvvXBN10ADPm4jjpiMfhhLNW06bvK6zwLol8YJmFodiQZhUw68elvPWOlz+c\n+ge+3/89q7etZuW2laGMOUXxu34e/vBhwKRnO47DM6uewbP4J2hZL9T1UF5urInyI8pjuhajW775\n6/KZsW5GZQwsbAZM3xVv8/Pvr+HnOdtYKu9Q5piGQVApBd1FV/z9RZ557seABejFlQo+3rGF8vJe\nlS39/Mp+LUHXSoG/5s6k8cRYqhNY0e7YiGOiElXS0uCci7ez6Yg/s/GARQD4XT8jjh1B/yP6xxSa\nNQnK6G2jv99IWVm3UL+bsWMrOw9XjuAbNf00JpswvDGwf0tfRv1hI78a3qOywXHUEnSxi79C8aXB\nqFFeRo2qjJEc3O1z1m4pZuTwDLJ/lk3+rC95btdo/rFjcchNZ9ystY/AXVhcyJAZQ8y74fHhEU/M\nEb3jfXbJwCqSBhIrZhJULI5jlAqY1okIuCoE4wFgBLsInHPhDt7LWs3+b3qh4uIRxecLt0hAj1qC\nI17Sven06Z7JfI+AQpt0MX7aLpGtq0nDJkFJLoMvr4AywSivyvNWVPh55KVVuAM/MZUMdzP5/MZl\ntXk4oBFpxEEUxc2ajy/tbly/F1+ah+G/+YG3+vwPzpaBlXGYn/2nMovs3ccqXWOA6/cZxappVVKZ\n07NW8fgrm1hTeDDbO77CnP13Uf5jeZVYCN5yY9UEFJ66yrp9b4L3sSrPSxA0KjbkqMNDSx7C5/FF\n9OIPVwiqis/jCw3/D5hRnT1/xiNtSU/3mMnGNlTGCoLWDEBGmNWT7k0HiAhQ37vg3pDw8hyxhM8y\nl7LxRz/6Y6RSWr1tNYXFheRm5jJqxDE8u/M0/L3zQwp1ifjwpRWgePD4/GzfU0p5eacIFxKDtppE\nB43dmTRmKz18kqwuhUwomECZU4arboQyCp/KOZroRJUKv8tb2yfjHj09Yr9O7TpFJJ7EKyijt23v\n+ArIHwi+764bvJdCWVmlmyli+ulFEyNGzab4FHTGv9nsT+fBmS6X3jaHXsM+NHMKcQZ8fir+7otY\nn/5bxpw4JtLiO6KcRRvSmXviXLqOKMCZvziiIfHsUx1rHIE7aIW8+/m7ocZEhVsRejdjNQKSFR+r\nDatIGkgwZjJ+fHRvdkP4XBDmhyAeJSPzv/y35DCjCNrAn244nOHfLGPcvcfhx4fHIzzxRLgp7oUu\nEykoKmDXkgt59K6jQqMQT5oUbMlUbRFOfAEcfzCY7YcjVsL2PuB6wVuB020uIcsokMnl+XIobttv\nYc4T4LQJXgmc/FiVmIpkLuXCB6eyY0OvQJbRnxj79haeWXV/5U4Bwd2nUx/W/uwMKBpslMw3x0fO\nwxKVynxFnyvI7r3HuIt+3E+1nTSD/WuqGehSEHweH66aPiaqWunOCz4WKtcJQuf2nel4UEfWf7M+\ndNzvc3/P0uKlLNy6MOJ+adEQbry0P2POOw8yTRykz+F9+HTnp7y56U2mrJqC1+NlxDEj6NSuE30P\n78uabWuMYnIUF5eSH0oq6ysSynKLZuW2lQzNH8qkYZMo3VvKb3r9hhfdF0PPxUXodP1lbF13JP7u\nBcz2pOFLmwt48aU5PLdrNM7qxaHrVJQKpyIi0yoYz5m2ehqdD+7M+lXt+N3FDuXlgjfNj/72T7hd\nloTqF1RGfQ/vGxFTiqay0RW4Lk85TrcPCLfMveLl4LYHM3HRxJjl1CQow7d5PV7m7L8LPetzeOcJ\n875TaZGLR8nL84SODQrtjCg350//+1tK/OmAsdL/9cAAFo8YTMHeifg7L0I7L8ABxs1eBsCabWtY\nvW11SMkGlV10vQHz7QVG4BavsrXDyyG3V2FxYZW+WqH3I/DcYjUCarMKk4XEM+hfvQsXGQY8hmkK\nP6uq90dtPw2YBJwAXMpip5UAACAASURBVKSqr4Vtc4D1gcWtqnpuYP3zwGBgd2Db5aq6tqZ65OTk\n6ErToSNpFBbCwIGxB3WEynGRKiqMZeLxVPZUnjzZpA1PnAh33mnWe71w771w++1Vz3PaacZKAXP8\nffdV3S98/6FDTaaXeCsY8Oe7KCwpxP/FQLxHLsLbdXkoHfmso8+iU7tOAEyZdCg6917MowPTb6UC\nrsgz7qEoBKGtry1zR80FYOBzAys7VAboeVhPNu7cGGlR+NNNbOWsGyDn2dC+aZ40zj7mbD4t/TR2\n4kDIIknDm+Yy4M93GwEfVFJRCq/HYT0Y3G1wSIhv37Odol1FrPtmXewst5DbbAGezGW4uHjFa6yw\nYHA/zLWW1m0Vv8/9vZnJUh18Hh8VTkWVstO96SG/v9fjpXO7zhTtLorY57xjz+OtT9+KiAGF+sgY\nWwOvx1utsglHEE5yfke/st9D1gL+seNKHHVCZYRbYD6Pj6v6XkXfw/tyw5QX8H8xwIzTVjQEnXeP\nUdpSAaffBYPur3KeoLIOulxijVYwddZ6rv3bEkCrxMQ84glZfB7x4PP4uLLPlRzc9mAKthTQNq0t\nPQ/rWUVhTV01NUJ5byrdxD7/Pr7c9WXlu7ZulHEBOj7wuPQY/RTT/noy679dz7TV01izfU1oBtLf\n9PoNO37cwcieI6E4l2tHHhuKLSJ+rvvjV4wa9zWnPX8aftcfqr9XvBHPzCMe2njbhNxPU1dNZdrq\nabRNawuKeV8D75C3+2LILAx5Eqatnsbyr6u2TD14OO6w40LfkQcPZxx5BiN7jqw9MaQeiMgqVc2p\ndb9kKRIR8QKfAmcCJcAK4GJV/ThsnyzgYOAW4M0oRbJHVdvFKPd54O3wfWujMRQJwK23Rs7cF8Tj\ngaefNr/HjTNKIHjbwxVGdErkpEmmYyNUpnZOnAh/+UulwkpLgwULwvpDxOojUVg12FpT4K6wuJC8\n+26n/Nn3wE0PXIX5iHpfMpPy3L9Suq+Ub3/8NuI6BeGkzifRuX1n3tj0RhUhF+wh76hjssDm3RsS\nTt6hf+Wcqzbw333/ZcfeHXxa+qnpYxKeqhxN4CP0dF9EWreVVDjGojjsoMOq1A2Mcgr6l0UEVQ19\n+IKQ5k2jz8/6sHyZp9qJxSLOHbHPGXi6LovMIItBuEIILmtUi/yps5/i+neujxBKl2Zfyv9t/D/K\nnXJzD12nViUSJCiUzzr6LN757B0q3Aq8YuJCBVsKIgSWIHhKBuA8/15kgsW7j4U6rwbvR5VU8zDl\n279zf9Y99Egoqyno/x/79lieWfVMrfelJtI8aaGZRh9c8iCzNlU/B0/w+l11TdbixpHQYyaek54L\nvQuxCLolJ581mRenH8jCpy8MZTv2v/0OJl3zGyavmMyL61+s9rxHH3o0v+r5Kzq06cCGHRt4af1L\ntU/5EFDw0RYzmHfDI54q24LPIai4bjz5Rh758JGQUg/v0FtX4lUkyXRt9Qc2q+oXgQq9DPwSCCkS\nVS0KbKv562smPPCAmblv2jSjAGJZHK4bOZyKiBnRFSJzxjMy4MYbK2MvwaHp8/KMKyyYiRI+mVa8\nkyFF+7GjX7LczFyeGP4E10/34gSfjDj40l02HDgZ/86NMa9f0ZitqPDt5/78XNPaDovHeNJcBgwy\nH/Syr5YZH3XxybUL84DLzAXKAjLXK15O7XJqSGCGE7Ec9T0rSteDu9LW1xaKTq19vLGiIVX2cWNY\natEEBVSwLtGC5aLjL2La6mlVMtLap7cPDaezfc923vz0zVqnEAgKZ3drf8qL8phVVACZ5ryOOjy2\n9DGGHz28yn1wtgysmmAR1nk1NIhn8SmVFiBEPK/lfWZAmYIaazh/1lYK/C9VsS4FQURC1khwnLma\n+kNVuBVc/871rP92fdXMvRgMzBzI4g8d3GD24Zen4f7sP7jVddotPgUtysOfVcC42eN48oonWVrx\nCyo+PxXNms8K7zJOe35ylfsfXefPv/ucB5fEaFlWgyB4PFWVm1e8XNPvGgCmrJpS7Tldddnn38dD\nSx6qkjWYbBdXMhXJEUBx2HIJcHIdjm8rIisxSaP3q2p4s+NvInIXMBe4TVWr5LuKyBhgDEDXrl3r\nWvd6Ez0ZVngHrK1bK1Meg9kjrmviK9mBPmfBY6IDk8Ec9Ntvr6GHcUHiBqQr3ZiNupWjFnuOms+I\na9fwxo+LI/aTklPRLYOR7gvQLh/WWKaiDD9mOJ3adWKKTkFD2VMLWagfwqawnaNjILGEeZAwF5On\n2yrmbJ4Ts0VXG5u/28zm7zZDVnm18RYwH++g01wWLgjfZ35c5zj32HPp1K5TzFY5UG0Ld/ue7aGU\n1Ihx2KgUxgA+j49TjjiF/f795HXP4++vFuKf8W5MhVzulNOpXaeQ7z5EVN+eCIuwKK9yvxkfxEx2\niI5ZuVLBP/57Ge68JVUsGA1kz9F1OaoaSrX+fv/3VaaLDmftN2vjnlqhsKQQ3XJLRP2kaAiSuTzS\n/RruAgv0g6oYPZSHljzE+F//ioItc1n+9XIUYloyQVdT1w5dK91qNeAVL1p8CrrlNLxHLubqc3sZ\nt+LsG0LlC8I1/a7h6RFPU1hcyNTVUyMUmIhJuIlIDInTUk0kqRxs76aqX4nIkcA8EVmvqp8DtwPb\ngXRgKnArcE/0wao6NbCdnJycRr+zEUOoR/VCPucc+PRT2LjRKJPg/CVr1lT2gL3ppkjLxeer7EFb\nXaerjIy6DadQkx81Lw+8Pj+uI+Bx0R6v0ek4D2lr/3975x5eRXXu/8+ayd4Bj1UwakEJBJEq2lQC\nFomUkIpFsag5pb961NOgUmnwSmvLkd4OWg+0tFbqpTZ4vECrrZ5S8Qbe0ACSILeAUdACEgIKFoOo\nFUiy96zfH2vWzJrZs5NAQETm+zzzJHv2zJp12+ud9X7fS8JbdBLvlMCf5pNqsbBfSeF89xycHkFz\n24BuX1g07mqkqHuRmuzujsJrprkw6MUshTIb7vx+dCNCKqYTb/geDUc9mnHZcUccx/Zd21vvEA2X\nRBf15yB6L8CRArFoEhQsQOQvITcnl8vPP4nqLed7PEKbIWlQKpaRfUdSeHwh/1v7v1nVKmEkrARP\nr3s66/UXn3IxE4dM9HYrz6x7hpSTYuW2lTgb/yurQBYItn2yjQmDJ/Db6t/6ajm3/QU7r6S+y0OR\nYXBaExzYLYr/MKzJ0j1aVw06rn9SqmABHxV9RPkZ5Wx7szfv1n2JLqeuYmXOPby/6/2s95sC0hxr\nzUfJzttBOAjhkJMQOL0X4YTVcobzKwivv9bn/4ppi6dxcteT2xwrB4eGnQ2tLubdjuzG4B6DGZl7\nKzdMPY2mZnAWtnBU8bOMG6UylV439zrSMk2unUv5GSruXXF+MZd++dLAy8a5vc9l/sb5WXdmAkFR\n96I2691RHEhB8g6Qb3zu4Z5rF6SU77h/3xZCVAFFwAYp5Vb3kiYhxIMofuUzDXOnICU89ZQfowvU\nrsS0+NKmiWbCrKIilXExiv8wna5++EPlt9IW2nJcKi6GH0xucJ0nLeSzd1B01QaqxpR7DnbsupkZ\nKRvpCFLNAjaWYOcvofCLhdS9V4dEmcyaTmV5R+Qxe83szAoZC4PIaUGWn6N089oT/9nfKzPisNVY\n/deRxoK2aXUBDA1xM8Li6NyjfUFiCCyR/2okl3PRud3oduQOjtr+I26vuIB0i9KPW1ecx/WXnM3s\nNbNJn/gKnLio7c524UiHa565hqsHXM0Pi38YXLwjIBBcfOrF7Ni9g4WbFma9rtuR3aj7Zx33PfG6\na3a9DfKXKMHTaz7YP/EW+Msv6sEnx5fx5FtP4uAw5805nlopgPwlbO65DCHd5Ta8Q/zXFz2LO5GT\nYuiFm1jsmn2LgoUM/VqSRZsWKVPrKJiqwRTwzB8BCXYz94nzuP/Jm2l5cJ4rKEZgXfEiVo8d/g6i\nlR3rlUVXes6jQgjSmwap+eNYYKcp/t6jLD6+JthmXZ727xISkZNSuyUX6z9Yn3UMTITnk0AwtOdQ\n9qT2sGJpkvc2ljC3z2K6dTmK5mYBjuJwpj38Ks8338LgEwdz9wV3U7u0k1IdbulFDcpJ8rE3HguU\n25oQ0Zjw7AQKjy88oOqtAylIlgF9hRC9UQLkP4DL2nOjEKIrsEtK2SSEOBYYAkxzv+supdwq1F6+\nDHj9gNR+P6K01N8pCBEUItnQqVNmoqylS9X9nTr5/EdVlRIi2unq9tvV7iWVaj2xUnscl7rIPlhC\n4ji+02Nxmc+pzEjV4YjdIBKeeseRDm/88w2klFiWxV0j74LNxcye10j/wTuZ8Oxl0Z73xsIgU9JX\noZgOjPWlWPlLvZD2tmXz5d49WGWYEMuClwkTII50/EUg9CY79Oe3Ui1u9972LWHxo7N/5C9ErxyH\nk7pQ+bqkFX9we/VvMnPGtBNpmaZyRSWdcjpx6Zcv5S+v/0UZxFlWBoEukcxbN4+Tup6UtTyB4OPm\nj7nmj38KEuT6DT0UnHOl/SFf+PgLAZWOI51IYZKWaf+8qe6y0rDuAi+awY9vfZdf3/wrZqyYwXVz\nryPlpFjcYHFM52No3N0YXXGdrkCnO5CqNaQE6VWXkT56c0BQOBuHYvWo8SyjRMFCsNPItPCjYG8e\njKj/Oku2n0bhx4/Qqc8Savgdsr4EnaNHOi0senMt1vHh+gTbV1BaxZCLNvDwB23vNE2EyXKdqE5H\nzk4/9AsvDtySKx4C+0rVRleFuGrbKlZtW0XinRKsP71EqsXmwTtVVIvmExYEniWR0ULEyJAqdx8b\nSLdwoHDABImUMiWEuA54DiXmH5BSviGEuBVYLqV8UgjxVeBxoCtwoRDiFinl6UA/oNIl4S0UR6JZ\nuoeFEMehlPergIoD1Yb9hdZI9CjYNpx2GrzySqY5sVaFaf6jtDSY/tdx/Pwoe/YoT9uMqLGba2j4\nsMELRZHNcam0VDk7KlWZyFCVNeY9DedvhLXfgn6zlSWP6+jm4CCkoHZpJ2beVEhzM8x/MIVzxu+Q\nZ8zEyl/KKbuvQNSX8tYXZpAuWBDNSxjnrN6LuHfUvRQeX6hs/htHccOU05QXvuWoHYyhtoEIfXHo\nTXbPhsHIPr4F1bgB4+iS28UTslavlxA5P0W2OF692kPwtgaJpCnVpN4uXSFyzwX3ADD+mfGBXUpT\nuoldLbtaLevhuodh483ZOSXDAXNtFg1hNuLeka5zpiGQxIcFyJXfA2wEaT76IIepi6bS8GGDJwzT\nMp1diGj0nwlbi+CdM/ESsWEpjmLk9aH5sMAzzX3sjcdoQbqhItQ4896X4dnfI1NJFs7XmUFHwJgF\nGZyPLHgpw1Q3LHDr85dwRM5prZL+UbjolIuYt36eZw5d3KOYhQ0LlRHKonMCY7Rq42YoPyeS52l5\n+2xEM0gH0lLChrMhJEgiEZEhNZxu4UDggHIkUsq5wNzQuV8Y/y9DqbzC91UDmSFP1Xfn7Odqfiow\neY3CwmDQR73wa/+SCy9UqixtnRUWJratBJLO4X322UrogB+KpaVF/X3wwWAWvUDgOUtZg4STR5l1\nbi3yaF7jKHi2j2cJY3Vby48uGeK9zSftJNuqR7Bnj7u7Stuw/GpYVY71zR/x9nN30dJi4YjRMOZc\n7CvOc8n3l8ktWM31Z13PU8dez+71Z9F/8E5Gnnwvjc8XQilMGlrM1KmQ0py6bFHWRS70rsWD8ZZm\nJ9I4KUEyaTH23/tQ94bvKKb10Z5TWq9lyDEjcN4+u91cSFuLj+cIqB0gpaB2ay09j+7JpV++NMNM\ndNOHm9p8JtkEcTuRtb4uIa7VgHav5QxouZalq5pAJpCWItLlS9WK+FX+flnbnEFoWymwU5AWeNyE\nY6vcPd7CvsA1rYZH33iUtJOG1d9V5shY6vrasS6/4aqmTIE69FcZVmfNmyKI+lDEgzXb1wTrHtE3\nZhm5di7djuzm+fc40uHdj9/1r4/i/bJF4C6oUmGDZEL5b2ljDveZomAhX+jzOh81h/TYnorOz5Bq\npls4UPgsk+2fW4SJeL1Tqa1VpsNPPAFz56r8Jo2N8MYb8Je/KIFiWfCd7yhyPixkLAsuuED9r3PI\nt7So8kH9beiyznvbxoGeR/dUDkxZgse1Fnm0cW0hllS5QYQjGHfMw/z63F6UnVLm7Riuv7WboaJT\nYVqEk8uAD/6HFS02ThqlGqsfBiW/4eqLv0zPo79JacFvYEsxXY6G0pvU3WHTZq0ybGqWOJbasVhu\nGBMtRLRfhPzTCzgtbkTiCybw/VNvprysF8XFhRQOnO95NWvjA+0d3PBhA5XpSjjR5yi0lZQZLgVQ\nQSM3ncuQkhYWOdMiHcaqXmni+fnN0Hm7l3RM5i/1ogXr8mxhc8qxp7B2+9p2vRH3/+puXhMjPAsg\nu2etZxK9L7CwkFtUeBCtLjv1pus5pf8OnnjrThjzaiaRLl1foYiFt0/XPpz0yeU8P3NiiNAGceJK\nhn65DzUvd6WlRako7d6LyOlVS0v+UiVbGs6C+mHIgoVqGtVe6ZYhFVeztcj/DJkhffKX+HxYK0R9\nNvQ9pi/rdqzzPostZ3t9YyXSXPQ/v2fiJUOp+2edUgciSdpJzmIC6xdt8QVWO3g/Xd8Mk2uj3tJu\n5qOoepvCChthOeQmrQOexyQWJAcZ5kI9frxv8tvcrBwVhw2Dxx4Lhlp59FGfuDfhOEqA2Lb/nePA\nzp3+IpyTuBy7/AE48RVPpTVjhnKUTKfVLqg9qVxBCT9bx/vKtSkv66Xa5PqpjP+vTbSktHbSzyaZ\nTNqMvbwrdctcIUAa8WEvxJazYQCByK1acIwZk2na7JtCC/L6baAx75s0fFjIfSvvCyziJ+2aQWUq\n4fMcn3SBob+iuPher75AICTF5YWX8+dv/ZkZK2ZkLIoSSY7Ioah7kQrwKB3YPBhr1stIJ5eaRWms\n7y5G5leTa+d6oUdmzKnj+V/0CagdsJtxxgzP8GmQSEp6lrCucV3AlFmgfC4uPOVCvpT3Jao2VlG7\nrZa69+rI6WVz1cWFlJ/xa+r+WefxFZawMoSSdsCMChdjCTVebCwNqGL+seIE3uz8YMDiTl/v7f4E\n9Mvrl+Evsv6D9WxY1BwktJHq/61fZdkOwQ9u2cDt8x8i3Ws+Ob1WcuPgG7mj5g5aNp3pmRpLu5mT\nz6lmvZHDhu618O6Z/udTn0CcuJyTB77DuiP8fv3KF7/C6vdWZ6g3rU3DuejcbsqooWEhYUgk63as\nU17lx53KjWfdSO1j51HpdEJKC5HOYVDLROpW1HHNPQ2kO41B7D6eoYVF/PXWi6BFxT0795Zf8fzu\npgDvJ+q/jui51IvzptVtCStBuufS4LxoxcDAE3SBDKmN9DtyKDde0p/i4kgFz35DLEg+w9iyBR4O\nuRVI6YdHiYLKER88Z1qNgc3VXWbS8+uPeAv2tdf6Ze7Zo4SK47SeiKemRu2KtNPl9Onq/NSpfmC/\nB3ZOQlpzQSbIzbW48b/rvYio48oKXRWf4P4HkqRWXk1qVTkz5Ahmrh7OmI/W0tzcyxMcEG3a7Avi\nQqCQms01gai6k0snw8m9uG96M2mD54D+gfboDIsaD9c9zIlHnUiX3C6Rb9gtTgsfN39MwkqohW7B\nfyNTSaRUwQHFxqHYPZcw/fzpnqC6//ENkO6HqXbI5iNjC5ui7kWIVcL7fNPZN9Elt0vAXHvqoqms\n2LpCBWBMS1ZuXemVYRoElPQsYcOODTSnm7GExcDuAyntXcrvl/iBLTUx7EhHCYaCl8D+qacucwpe\nCvSDQPDjIT+mT9c+XPPMNR5pb765m5CaYHcJ7eP7vMv29b2RjgpauGrjZhg6FWSalrTF39f8XS2s\n9cMCC+iGHevJSZaQakmB1QxF98N7X/HVekN+g8xfwjrw/Dr6d+vPyJNHct3c62gJcSY/uuxMyoZ8\nk6r6KgbnD6ZqYxXNTrMXZ81z+MNhXaPbtoIFJJOXk2pR8zEvD8Zf8iWc5smAjRRpnn/Z8YSGk5Ls\nfLM/FEwLPPvHlw+iy8m3eRylNuHudmQ31ry/JmitV1ClLBpTmerLQJ8bQn4N93HD67kUDtx37/b2\nIBYknyGUlytOw8yuuC8I71RWrPAdIZNJXJWOCs419c9B9Zi2KtOkfTanRi2cHEfdU1trJu+CMbev\nU+axrj/GyPOP5q6myV5E1MKB8ykuVrGYnLSFdACZwNk4lOb8Je4Ptdwrr7wcis7zU9CG37B81VxE\n0Lp8+MOjb3HNPY+R7jWf3IJays/4XZv9+Pc1f2fWv8+iU04n9mwsQtYPU7pq90f65vtvugmOXiTd\nkqOIX+Fbj0kpadzlE84nFP4jaKkUEVEZlND45pe+Se1WFf9Jo0tuFyYNnUTN5hovqGE4O+HSd5ey\n9N2lJKxEwJjiqE5HKdXZ5rNI15eyrPdCVm77nbeTsLA4t/e59E+N53d/WYnT88VAyH0KqsjpuYK0\nYwUiG3+0R+noc6wc5TwoZUaMNQ8hdc37wka+/bwad6uF405/A+tDV5BhWNpp/sflFmS3FTj9/wwb\nS/zx0BGmQzyWg0P9znoaPmzgufXPcfcFdzN7zWxeEN9AbizB6r2Ij44rZPismRmm8DovyX0r7/N2\nCiknxXVzr1NWg+UPcOEnf6HbF7ozb9E2nJZjCbwkyJQyAqEF7BZOKPwHnZtWsWfMNxCbSvnRZV/l\n11eWYZIXeiep47WZRgGJXiso/tktLF6UUAEfo8LURODTCCcfC5LPEIqLVRiUadNUkqz9BceBK6+M\n/q60NBhy5Yc/VNyMXsA1qR9Wc5kmzcmkOmeqnqgfRvKoJM09l5HsvZpup46heWWmuXEUz5G0k5SP\n6kt5/2Do8gkvDQ8KIiM5UJA/KWbS0OCPZlxZIYUD/0VV/RGUFtye8aMqP6OcGStmBBbBb532LRUJ\n93Q3KnOLjbT2eDp1iST99lBI5ag3T1Jw0otQegsi/1WSdqeANdzES4by5FsjVM6Zzu8jdh/PN4Yn\neDm1grT042HNWz+Pp956KpDkyVNDuia22lltfvl8LzvhC2+/4C0qKSfF9wd+n55H9yTviDyunXut\nil1m6NhTY4Zj91yG7aYmGP2F3zLhskKc5oux7Z9B+XDS+a94RPKNg3/AI689wpaPtyCRpDadyR8X\ndMXu/SdkfotHMOuQJ5awcBwn0KdWz6VIl6twIECoP/bh8oDg9JBfgzXyhzhPu1F8592Fc8U5avfi\nXZOFtHahI/E27mp0E3ANpzn/VTcSb2GkKbw+iroXeX2uw+870kE6KZ7527GkUxJJV2U44O761UuC\nnwgucVI1Ey/5FRPx+bjGXWup2fzFQIw706s95aQYN2AcoCIbPPPSDha9bWOftICKi86gqPsV1G6t\n9RJw6cCrQCBE0KcRTj4WJJ8xFBfD44+rzIuzZ0P//srB8L772ud/EoVkEo46Cu64Q5XxwAPBHN+m\nZRb4Do1FRcFdRpg7Cd9nJlAqL+tFeY9gwiBT5ZTXOMoTUGGeo7TgN95OQguvqYui/V5qNtcw+aEm\nmpqH4aRF5C7K3K1MGloceKM3w2+/ctUr3Pzizbz9wdtc9pXLKDulTJm11lyGk0ogHbBEJ07Y8V3e\n67VCvZX2WYxYLGlpkThWM9bXbyOn10qu6v/9SGu4nF7LaPa8/wWLnE7cfcHdXuTWqvoqP/KvA1cP\nuJqeR/f0+tBcaJrSTVTVVzFp6CQml06malOVp57TPIcu03GcaB17z2We5d6su49iT5ODdCxsklx9\nzJ9hoIryW9S9iOvnXe+r/wziN203Y19xHlaPaiU0pHq+NsG2hc2QnkO8yL0Tnp2gcq9sPssPbdNz\nKWlJ1t2Ms7U/OpsoaQtr9RjsXsvbDIVjBjTUC2o41Hp4boYX3XEDx/km50fk+VlI64eRarGQjgBh\nqYyiRzcoayzXkELvGr556sXU/bOOxl2N5B2Rxw3zbvCed+fIO2nc1UjDhw1qnAwUdS9i3MBxjL93\nFi0PfhvSSVILmqHobxSe0ZfGXY3e/eHAq9MWT+Pdj99l7ICxh3SsrRgdgI7ZpVFU5EcO1h7v2mRY\nCF89ZVnqvGXBkCHKH6WoiAAP0tyMm+M7GNgx/Gavr02n1Y4lijsxF+ywqXBNTTG8Ugw5ruBxf7x5\njaPcFLV+WSoMfiFZrL6zpkQdPms4Tc4AHOt5LDpn+LtkRFR+pI4JbwynqX4A1qbd3HPNkYwrU88s\nzi9mwZXKVj9gJr3zOXIS85FYONYe3s17hBwhuLroasqvLIcrbGXO/a+P6DZgHOWjfhP5w62qrwq8\ncUuk95ZsJnIy22kKo6mLpgYWGlvY5B2R5wnFu0be5UUNTss0M1bOYObqmUw/fzq5ObnsKViItJvB\nwTUpDfrEPLBzjMdp5SQsVwV6r/dsHV0ZyBBKF+bezq6Tfu7lbHek45VtY3N+n/O9NhYeX8i0Rxcx\nZ+Z1nuXUudfNZf7rq5QnfmhnYQlL+ScZ576W921O+9f5bDvuUbqdupFtn2zjiTefiLSUU2//jZ5V\nnl7QzYW3rcyOVVXFlJYWUzxQnbtu7nWkTBNdHRYmYlckkcx5cw5z3pwDm4tVNIaCIshfQlO6iWue\nuUb1k2WTsBNeEisppeeVHuaJtr1xKsN3lAbUcWxR+YfUDh6e2/Aczelm6p6tO6Q922PsR4wb5ye5\nysuL3ink5SlyXjsyLlsGv/qVuifKsTH89m6S8mGCuz3cSbb4Yn4d1Y+xau3eB5eMStijs9k5PRZj\njRnBudZtTL6itNVAlrPnNSrB89DzOOkk1y2UFL6c+XzT858TX+Hq3z3M26t68qLzM5wei0k7tm86\nvUXvxrqRnF1OkQ1VEVZvYT7DfEturZ3m/TnvDqVlw9lYvRfxg0vO9tLzJu0kY84YE2iDqc7xhPio\nDdz/+AaWJn4dWPSq6qsCnNaVo0+huLg88OyEbcRZO6kaXoFUS5pk0mLi5YOgh8rZrtunkWPleIJf\nt2tQy0SedE3HaAHyYAAAIABJREFUSdnMv+vbOPLfwfqJSq6Wv9QTBrVba9l2xE6eWS1JtQgsW7L4\npaN55cVjyE1OZP584Iwanlv/XMDIwuy7GXPquPaX/0e653xkfnVGrpBsmR3DcfKuugq29dmp+KYe\n1Ygx33B9n17yrdhcK8WM3dXmwTDzRRXSx/6ppyL1CH0HLvzShTz5jye9c3rXWV42ift/n6KlJUUi\nIeh2+ps0bzdSNz+9jpk3FQc4yk8z5W4sSA4hhJ0ag2//wXzYoCywJkyAE07w0wGbRLxlBQM7hnmP\n8nJ1RAmvbHbpWo3U0JB9NzN9usuLNAHCYemOZ6nZ3LXNiR7+sQd2KQUrmVyeS3F+8J5wm0aPzOOl\nP5yD477dpVMyUpCFd0Dlo/rCKFg0ayXNaTsgAExhFbVz09eUliq+RYeL6XLy2sg34KhFraYGpt1T\nQOqxFyBtkbMYPhrwcGCxALWbUYYBJYjeC0kW1FJaUErdiiOpmlfM6JEw/bYvUjpzJS1pZQIccMJ0\nOa3yUfMz6lQ1psqLs1Z+VTl1F6wNGT8Ue3yN3pkIBFf2VwSdGdtt+umvkmMX0uxuM9Jp1xRYJhD1\n55Dbe7UnRDwO4Iq5XJyYxlNLV5FefhVIwZ49klmzBPfem10A19TAdf9xqkpra98Mrrl1WwtsWG2a\nTkNlpUTa18GYxyF/CcmCFYz8xvHMeQGVY6egCpn/Kt8f+H1AcRueqnJ1ue806aoW7Z7LvCRoAYdO\n7XjY+xV3ntUgxkxCbBiC6LOYokGXkXzWyLhYPyyao/y0Uu5KKT/3x8CBA+XnHVOmSCmEVnhFHwUF\nwWtsW8rKSnVvdbUqp7JSykGDpCwr889pVFcHrw2julrKzp1VucmklLm56v+cHCkty3/mlCnqOXZO\nWiJSkpxPZHLcMFndkKVg8xkN1XLKwineteHP2eoVaOPjr8lEbrO0bEd27txKeyLKrm6olhV/mCkr\nJtZ795ntDre1oiK6T1p7brjuFRVSJpJpCSmp4sFIadmOrJhYLzvf1lnat9iy822dZXVDtdc2YaVl\nIrdZVj7+mqx8/DVJ4hOJaJEkPpGVj7+WtW2t9aXZj9UN1RnPNssJfzdl4RRp32JLJiPtW2w5ZeEU\nWVEhvfaovymZyG2RFX+YKSuXV8rOt3WWYrKQTMa7b8SsEdL63hCJvdu9x5G5udF9qZ9bMbFeWrb7\nHNEsGX6ztG6xMuodvrfzbZ3VsxKfSCEc/7ckmiVn/kEyfJIs+82vVf/muP2b84lMXF0SKLdyeaW0\nv/e1QJ2xd0vGFsvcX+bKiS9MlIlbE9K6xZLJXyZl4uoSr7yc3CZZWSnliHEvq7oY/WeOlzkH9dxq\nz2+jLaDCWbW5xsY7ks8JSkv9XQcEIwdr1Nf7HAqot5drlHqWZFLFALv9dp/UnzcP7rzTV5tFOSma\nHvFBfxW4+mro2TN6NzNrFqTTQlk7pRO0bBjS5vY7W8TiNncyIZVb49pC7r4TajdsUqalPfoCbW/7\na2pg1qxiHnywWAXFvMvnisxYamZbwe8T06m0NZWeGe3Aj8umQ4hIII2d42QaNGwpZva9kG5RMZqc\nlEXjWkUSk+qn9Ospyex5jYwrCxKzugzNY4RTDISjTI+a8AHNX4hWnWRTz4U5rjovurk7IU99krE3\nfMy948s9taXpQJm0k4w+bTSLGiawu+ghFW4Hm1QqwsAixHElEvNploprufSs8zn960eR1ziKqj8X\nUhcxt7VqU6tNz9x+J6ufHUBLSjnQ6hAvTy1y4Ds7sGRnL8LD2K6zKM7v5dVl3MBx1B5zHpUyiUQo\nT/yihyC/hpRjs2rrKp9XctKc8tH3WOPumFNNKa651kHKYYoHHDOCZMHKjB1GdDijtn8b+wuxIPmc\nQEcCnqU0DxQVKSERtvQKcyX6+z174Le/DX7f1KTKkNL3F0kk/JArs2YpvxedQ0WrrEzVGKjrr78e\nVq2C0aPVuQcewF0/lHNaos9iSgumtpoq2Azvkk0tkS3Ui/7O9/BPI8vHkN7+CjNnZYbRDwut6ae/\nyoTLCv24YQSFQTa1I/jWbLat+lD3V5R60KyjOT7u6IHdjD1gFnf/pNhTJ4UXeh1KRz8j7708nn+w\nGVoAIenf29f/RQlnIONcVVVxIMr0U9PPx77qa4EICSaisnCGhUtVozJ2ko5KnpaTX0v5KJWx0VQt\n2pbNVf2vChgeXFP/J9KryiGdwLItGhpsamoyBYHmuIZ861Ve+evZSMfm73cMo6TXMCZMwBvPcFTt\nsNp0+i+aYIuyLly6ZidPPHIcUtqkW1p46q2nSCSuIoVNMulHeDBRXtaLmXfpuSeRA/5KWptdnzaa\nRQ2LvP7+0plbWTPbdVoU0nvhsuiseMDy3MgxKi4u3udEdh1FLEg+R4iKi1VRkbkziYJWeIVhLmT6\nTXraNHjuOTIW1cZG/61o504YOxbeessv27Jg0SIV7kSVKxBC8tVRa5j+s6kZYVE0v9BaeBcT0QS/\nL1TMHZMjgQ1DkCcsiBRK5kLUVD+A3zzxbzQ1+e0VIrswyGbNltevjtqttVA/zLWIyrzXrKOb9NCF\noKDvLo7v9xalxcNpXNuHmi9mGkpoIXLuuTB5slsXCtlw6wZ++7MCpExw1619KBvm9onRTt0PDR82\nKPNc16qsqr6K0tLiQJRp6VhcZURIaM+bbwbHVQqdcpVXu50jufua/0dxvm9Bl43zqF3aCWdjifLR\n2DYQZ/X3uO++oBViQBC98zUWPzbYq3tTk4ppt3u3X7dwVG3z+X4MNpg0qZiamm48838ttDSrSAlO\nt+WcccJABnQfEAiQGp4T/o7Bhh5Tg21zUy2MHplH4cB/MXfdBSq1b+ftKiZXOkEiYTF6YClVf4aG\nLrMC4zbr6XVU7SxuM6zRAUN79F+H+nE4cCTZUF0t5Wmntc6d7O0xaJDPA+jD1FNXVma/N8wbmFzB\nlCnqnMmlhM9VTKzPqvc1r7UsKROJkM7Y0CPndkrJ5LhhkTp+KSN05JbSkQuh7o/ikHR/m3yM5jjK\n/nNrq88z7zc5lURCPTORUH2s+92ygn0XpSPP1jdCqDqZ7dT1qlxeKZO/THq8RO4vc726VlaqepjP\nrqyUcsQI9be9CHAtbfBuUfcmkinFF9m7pf3VSonwx0aXNWWK4sI8jsQKzsFEInNuJhIRvGAWLqjy\n8ddkzjd+LsWF4ySJT9rk29rqgyh+Y8SsEdK6xZKMHSzF8J/Ish89Ezl/k+OGydxOKWlZiqPbm7Fo\nC7STIznoi/yncRzOgkRKNTGTSf8HY1mZgiB8WJa/gJWUBL+7/HI1YfVnc2GSUi0s2crNzVUTvaxM\nCaTKSuOHXxnxg2pjgQy3MxvpPWWKf41JGLdKLjdUK5LT9hcq3XfhRTyq/pWVZr9rgnWwR5Zmg7k4\n67IrKnxBYC6Iui0VFapPKyqyCzhzDuhxMBfcMCkuJgtZ8VRFRjm6rWVlwfq0ZwFrz3i2Ni6KoPf7\ntG+/jzLmZrY5pBfasrLMvgQ1z70x1UT9UxUZRgLZ5oc5z1prf0VF0OjCHNvAXA0JsYqJ9ZEvVWFB\nGSUQ9xWxIIkFSQB6Ausj6ocUJUiSSXV92NqrpET9jVpUwwuMKXDKyoILmn7T1m/gUYvh3ry16msn\nTvTf5s23tPZYnoV3FHphsm2/HyzLX+yzCa8RI8KWdCkphv+k3TuSqB1HeEdSWRkU6npnGCVczHHU\nOzbLdjzrLimj38Cz9Ul4fE8+uW0LuPBiGF54s+0A9P1l/7k18Mzjj8+sg7krHTEic+ej6x+2chTC\n7dPHX/PqkPxlUub+Mjf7zrUNwRhlWWU+V/8mspXRlmWWroM5ByyrbYHWXsSCJBYkWRFeHKMWfL1g\nCaGERrYdTEmJ/2ZrLt6WpcyNS0r8c1FCST8j48dcuXcqD90u/bacmxssUwspsy5R5s3hHYUur6Ii\nUx2i33DNxd1UpwV3JMqEt+IPM1s1x4xS70W1z9yJhMekoiL4XHMHYgo9ra5DNMucb/w80qQ6avEy\n6xi144xaTJPjhkkx/Ccy5+LxMrdTKuvCW/GHmVIM/0lg52YKl+S4YTKRTHu75XA9Jk6MFrhRY613\nBuGXpBHjXg7sQiqeqmi/WXRox5ttRxE1Nu2Z71FC3fztZWvvvqK9giQm2w9DhM1VzfAptg033aSI\nau3AuHBh9rIWLlQkd0so5JGU8M47KvTJq6+qc0IoazLTTDmRUOSwfpaU2cOxtAaTaJcyaH0mpTpv\nBsJsbs5MQ9yaY+GYMZkWb/qztsaKIvh1NkyA8nIr4C2u621aeDU0BCM1m2R+lDGFLtvEtm2Z42Ea\nQ+jsnE89k1ZpXO0WnF4vUVXf2SfEtxQrUtdwLNV9Bn4dhYCuXWH7dvW5qcnvV922pTs+oPmBuSpO\nlN3MqAnPMeiYCyJNyR/84eXIJgn2T7GvusCLFWZaYF14w7Pseu0CjjgCnnrKv7+kBH79aygrUybY\ny5Zlj8Sg+7K8PGh9aNtwxK5Tsd/xLdK0tVhNDX4IEoLWgVEhhsLe5RQswLbLMywpUyk1NpMm0SZa\nix5x993tyyN0IBALksMUUal/wQ/k+NFHKh6XlG2XFV60NNJpZR2jF3f9g7nrLnX+hBNg4kR17bRp\n8OSTvjAxE3y15W8R5cPSXphlmF7w4ZAwoBYZs3zL8hOB1daqc4WF0QtWtmeb4Te0abBtKx8cPRat\nmTSXlwcDegoB3boFhTUEhZI2R5YI+MJWKHyE3IKVKitlRL200LDt4IKr6zhrFvzxj9nbhjhfmbK6\nicW6Wad7Y6b7CNTnVIuNSnoouKrLTM8fw7TAmjfrPFItqg6W5bf91VfVc0GZmuu5m5OTPRKDKVCm\nTVOC6clHupFIzufq3z1MUfciz9dE+weFzbj1i05GiKGQd3n5qL7wWmZftVa/1hB+XnuF0QFBe7Yt\nh/oRq7b2HmGdfFte81FH2DomkVDqpbB3d5gINg+TOGzN0sVUq4XVT/qZZlvCqh6tdjPVWWGVTlgt\nN2hQZl10ORMntm3NFLak0mWHSfRs3vCtqTV0hIL+/X2jhvAz3b2bBEdOnLo+sl7ayi5M+IcNGJLJ\noMowbEGnoxjYOWlP/ZSNBwqrFk3DiDDHMmhQZr9ls1Bra76HeQbTutBUYUaNVVT9oww6ws+Jql97\nOcE2+Zl2ltMaiDmSWJB0FGGd/MSJQa5E8yfZBInJtegfnbkQa1KwtfAugwb5dTF/NOai5hHHli+8\nwqaQUfxCeFHV/EyU4NKfTcGo+YDWOANo3VRY6+hNowO9iIaJ2dYWrcCiW52dJ9AmvOE6jhgRrJdp\n5WT2YVZSOKKvogR9mFcyBZXJMUQJ+agXiI5a+mmE52AiEZxjpsVea6Fu2rN4m2bUuZ1SAd5sb+ue\n7Xn70gdRaK8giVVbMbIiSi1TVhZUg4FSYZjOXRpSKtWDVgWE88w7juJoCgszVTEaffuqxFqmrn7P\nHqXjj1JD2bZyhAR1TW0tnsdzlIrJVFdJqcrWKpcodZLJk7S0BDkDx4lWBc6ZA3PnBnPAmH0Eqg06\nHI2pqjO/N9VTrak1pk71Pdx1nTW/MXOmnx7ZbEv//sEEZtOn+2kLrr1WXTNuXFQYDgWTJ5g6VY3r\nmDFqDLp1U6pS7RWv+12IoLrMTCkwdarfPhUsUdV9+nRVLvh9GY4kUFWVyVW1hby84Nj94Adqrs+c\nGexL21YqLp2zJzyerakyNXQk71lzNvHAzjHcZ0RXMCMImA6S2ZDteeH50Z4I2x3BARUkQojzgd8D\nNvC/Uspfhb4vAaYDXwH+Q0r5N+O7NFDnfmyQUl7knu8N/BXIA1YA35VSRixBMQ4EoiauSdzPm6e4\nDjM/ytixmTlRQC0kjY3B8C7btsEbb8A6NwX1ww+rMnJyfH24lJlxwMz4VkVFZowqtQhdfLHiY8Jh\nVwYPDhoTSKm88sOhw3UU5DCJf//9fviYqPhmGlE5YGbNUsJIStWu2loVmwyCfI1ZB13/cFRjU8e+\nc2dm+mQhVN9q73fbVgvlrl1KiJhZMefPV/2q+zqVUkJF8z9tcT5mxAM9ByzL/1/zSo2N6gVBczxa\n2Om5lEz6ZckIIwz9IhMmu81sn2EeJhvf1NjoC1ch1DX/+Aecdx68+y4sX+4LwjvuyKxDNqj4bOr/\noqJQpIXUI6RffoX0sivZvfbbTNv5ASP7Bl8A8vJaLz8bWpsfBwTt2bbsy4ESHhuAk4AksBo4LXRN\nAUqIzAK+HfruX1nKfQwldAD+CIxvqy6xauvThcklmOqYysqgCihsKtoaVwJSHndcUP1QUuL7xWgT\nXe3oGHW/6WNhqn7CnMqIEdEqr4kTW1fjhVV+/fpFczZaFWG2VZtim2qoKHVcNlNTsw/DarawSias\nsoriFNryTYh69pQpbTu6hj2vTTVPWGWkx7S1KNJmfTL9doL+Nm3xTVG+JVrN1VYdso1FeD4Hxreh\nWuZcPF56EYFx5BmDdkpEOqPP2zPu+reQTTW7L+BgcySocKrPGZ8nAZOyXPtQewQJKgTq+0BO1DOy\nHbEg+XTRli49POE12hMKv7WjLSdL8PXx2fxnshH3lhUkdls7NJEfJRS1kAkLuyjuSMrsC21OTpBE\n130bFVXAdELs1y9aYIW93qurfU4sfG22BTksfLKNUZjnMT3OTV4nvIhG8SHhcqKeqTm0bHyTRmVl\ndBlauLZm2BDlw5FtPpvPHlSyQ4ZD6WPvlohmmdspFWnM0ZaxivnC9HkQJN9GqbP05+8Cd2e5NkqQ\npIDlwBKgzD13LLDeuCYfeD1LmePc+5f37Nlz33syxj5hXyZxWzuS/XHYtu+0pn/k2vu9rMz32A8b\nCoQXe/Po0SNo0aMFZUfqefnlmREITGsh89ChVKLeqG07aLEWbk9ZmRqnsrLgvf36Ze5eop4Rtjoy\nd51RoXi0FV3YSEHHLzOvbW9OnPCOSkdl0M8zdxH6mmzxyKL611yYtaOrbft9km3n1taORPdX8Hkp\nyZl/kGL4T2TFH2ZGts8sq6IicyemxyQm2xV6SSnfEUKcBLwkhKgDPmzvzVLKGcAMgDPPPFMeoDrG\nyIL2kI5R92iuZM0aWLx47/1CTNg2XHih0nWvWaPOpdNKx/2d78Ajj6hz+if4zDNBnxgZmjXmZ53X\nJZmEn/88yNFo3woTxx3nO+21Bw8/nMm5aK6oqSl47ezZSvcejk6seSFQPNXatZntmTNHcVo5OerQ\n7Tev1VxW2ABAl3H//er/oiLF85hcQ1QagzlzlL/GTTcF9fjdugWNAJ54QkWZNrNMRnEGYT7ATD0d\n5tBsW/Fie/ZAXZ1fbmlpJuEO6vo771Rzc/x4v+81p9XYmMmb5eX5XMxdd/k+RkcdFUylMH684q38\nNksQDqL7Kjqd9ScvS2VeXrBPdR0dR/V9KhWsdzKpytX80qFOtr+D2jFo9HDPtQtSynfcv28LIaqA\nImA20EUIkSOlTO1tmTE++wh77k6eDM8/73+vPe9XrVIk8ZIl2T3vHQcGDVKLaUmJT/SnUkqImD8+\nbR3UHmgB1a1b0HJIGwvMmqUW1dxcN/x9DuzYsdddEahfTg7cc496zs03B9u8Z48i2E0IodquSeh/\n/CP7cxxH9UleXrSwkxKWLoWRI/0FW98npRI+2snOXOha608t0E1vbFDGCCbBblqbmblWcnN9o4Xo\npE5BaMuunTuV4yGoNiUSvvFB//6ZwltKJagLC6PbUVqqxkb3iZTKIMCygpZo4BtwVFX5/RaEAJlD\nzvN3c/3ZE6n6cx/PEVJfGxZ0ZhlCwNChcMwxSlDra/fV4XGv0J5ty74cKCH1NtAbn2w/Pcu1D2Go\ntoCuQK77/7HAOlyiHvg/gmT7NW3VJeZIDl1k82kwoaPlTpwYrSvW15h+DPuqcjLjaYX9G8LBKMvK\n1NGv374/Tz8zHAm4PVyNSaa3RYK359DcjzZqMOOZ7csRCGdv8CAmB5NMRqctEMIPyGjOlbaMElqL\nTG2WHf5sxk4zHS+rq6ONO8LGJu0dM31oHmtvxy3KKbesbN9/fxxsjkTVgQuAf6Cst37qnrsVuMj9\n/6vAFuAToBF4wz1/Nsr0d7X7d6xR5knAUmC9K1Ry26pHLEgObewN39IamW8uVlFcjCbUs5HqQqjv\noiy69nahMJ/ZnuvCud+jnArD5WqCWJPUemEK69P3duHXTpS2LWWXLpnX6MV/bwwTopwK9bOy9ZHJ\ndWhBrvku0yjBNC4IW95FLbyDBmU63+rz2lCiNV4qLBDCLzhRY9W//96NQ7ZnRp1vj2d/NrRXkBxQ\njkRKOReYGzr3C+P/ZSj1VPi+aiByMymlfBsYtH9rGuOzjL3hW1q7Niq+2LZtyifFTBcMShVgOkhq\ndcrYsUq3Hla/QHanymzIyYFRoxQXoN6RopFIqL9mLLGBA5VqxoRWy2gu5cEHlepD+1R06RLOA9/6\nc8OQUqnJTJ7AVKmZKr+PP1a+F2b5OnPjEUf4bU6llOrIdDadNQvuvdeNvZXyOYgePVQgUF2m7vuq\nKtUXuk2pVNBfSaOpSfVBZaV6Zv/+yrFwwYIgJzRggLou3Ddmf69eDVdemekzE0Y6rVJYh2FZkJ8P\nmzer+19/PXsZYQwdqgJSak7MfH5YNZdItO3rsj/wWSbbY8Q4YAhzMWH9+lVX+UErw6lrtRAyPbLL\ny32SXQunlhZfp6/JeSnVIq8dDEERyk1NahEwSdW+feGUU3zCXAdbTCZ9gWY632lBoR39ZswIOtEt\nWKDqX1vb/oCcYaxenf27IUMyDRZAtVdKVe/Jk/0267aMHq36XztAPvig4pgaGoJBGd97T11v9msy\nqdpsRgHOBiH8/hk92ifgw/1QVKTG2LajBRKofl+ypH19qMdTQwv6LVt8IZntOWEkEooDOeUUxROG\nERYqY8d+OpGAhdyX2XSI4cwzz5TLly8/2NWIcQghHKI7KpR9a1F59Xd64YoKPR5VTrZrop7X1vNN\nAwPLgttuU+FHTA/wcGh8DS3U2muAoEnrqOsHDVJe9GY9w3UfP94XbtoTH4Jv3ELA97+vvP/Nfq2q\ngp/9zG+Lea+JkhL1Jq/D6pghW8x2/8//qH6aMcMPE7O/lkkdNRl8Qd8eWBZcdFG0oM6G3Fx4+eWO\nCRIhxAop5ZltXtge/dehfsQcSYx9wf5w6DqYyOYoJ2V2T3CtszfJ/XBgTjMNc1mZykc/6KIVfqKs\n0LE3KXjb4owmTmz93pyczORWOtpzVKDPqLZHEfjaCCCckdG8T/d1WxyH5u9M3qqte3QQyb3htEpK\nOj6H+CyQ7Z+VIxYkMQ5XtCYMw2RxOC2xeZ0m2MPZ/HQWQOt7QySJTyTCCSyuUQt/a3XNJtz0YUYp\nbq2d2cLLhCMHh50g26pvpgNhMKyM+cxsKaejLNXaQ9pnS5GtoyWcdlrmPR19CWqvIIlVWzFiHMaI\nUsG1lUTM/H7qoqn8/OWfk5ZprC1DONe6jdEDS/c5U5+pdoNM1U9lpYqeu6+IaseMGYp8Hz26fWXP\nmKEcAWtrW8/iGQ5iKYRSAUY5B+p67dyp+Kx0WvEo4QRadXVwzTV+BOeLLvKDkdbUKCJeqxdNdea+\nor2qrViQxIgRY59Rs7mG4bOGe1kA55fPV6l6O1KmsdjX1SlLOiHgxhs7JkT2N1rjqMLXtCWos5UL\nmc9o7bma10mng06b+4pYkBiIBUmMGAcONZtrqKqvorSgtMNCJEbH0R4B117EgsRALEhixIgRY+/R\nXkFifRqViREjRowYn1/EgiRGjBgxYnQIsSCJESNGjBgdQixIYsSIESNGhxALkhgxYsSI0SHEgiRG\njBgxYnQIh4X5rxBiO7DpYNfjIOFY4P2DXYmDiLj9cfvj9u87ekkpj2vrosNCkBzOEEIsb48d+OcV\ncfvj9sftP/Dtj1VbMWLEiBGjQ4gFSYwYMWLE6BBiQfL5x4yDXYGDjLj9hzfi9n8KiDmSGDFixIjR\nIcQ7khgxYsSI0SHEgiRGjBgxYnQIsSA5hCGEyBdCvCyEWCOEeEMIcaN7/hghxAtCiHXu367ueSGE\nuFMIsV4I8ZoQYsDBbcH+gRDCFkLUCiGedj/3FkK86rbzUSFE0j2f635e735fcDDrvT8ghOgihPib\nEOJNIcRaIUTxYTj+P3Dn/+tCiL8IITp9nueAEOIBIcQ/hRCvG+f2esyFEGPc69cJIcZ0pE6xIDm0\nkQJuklKeBgwGrhVCnAbcDMyXUvYF5rufAUYCfd1jHHDvp1/lA4IbgbXG518Dd0gpTwY+AMa658cC\nH7jn73CvO9Txe+BZKeWpwBmofjhsxl8IcSJwA3CmlPLLgA38B5/vOfAQcH7o3F6NuRDiGOC/gbOA\nQcB/a+GzT2hPYvf4ODQO4AngG8BbQHf3XHfgLff/SuBS43rvukP1AHq4P5xzgKcBgfLkzXG/Lwae\nc/9/Dih2/89xrxMHuw0daPvRwMZwGw6z8T8R2Awc447p08B5n/c5ABQAr+/rmAOXApXG+cB1e3vE\nO5LPCdwtehHwKvBFKeVW96ttwBfd//WPTmOLe+5QxnRgIuC4n/OAnVLKlPvZbKPXfvf7D93rD1X0\nBrYDD7qqvf8VQvwbh9H4SynfAX4LNABbUWO6gsNnDmjs7Zjv17kQC5LPAYQQRwKzgQlSyo/M76R6\n3fhc2ngLIUYB/5RSrjjYdTlIyAEGAPdKKYuAT/BVGsDne/wBXHXMxSihegLwb2SqfQ4rHIwxjwXJ\nIQ4hRAIlRB6WUv7dPf2eEKK7+3134J/u+XeAfOP2Hu65QxVDgIuEEPXAX1Hqrd8DXYQQOe41Zhu9\n9rvfHw00fpoV3s/YAmyRUr7qfv4bSrAcLuMPcC6wUUq5XUrZAvwdNS8Olzmgsbdjvl/nQixIDmEI\nIQRwP7BWSvk746snAW2FMQbFnejz5a4lx2DgQ2M7fMhBSjlJStlDSlmAIlhfklJeDrwMfNu9LNx+\n3S/fdq+6BakOAAADBklEQVQ/ZN/WpZTbgM1CiFPcU8OBNRwm4++iARgshDjC/T3oPjgs5oCBvR3z\n54ARQoiu7q5uhHtu33CwSaP46BDh9jXUFvY1YJV7XIDS+c4H1gEvAse41wvgHmADUIeydDno7dhP\nfVEKPO3+fxKwFFgP/B+Q657v5H5e735/0sGu935od39guTsH5gBdD7fxB24B3gReB/4E5H6e5wDw\nFxQf1ILalY7dlzEHrnL7YT1wZUfqFIdIiREjRowYHUKs2ooRI0aMGB1CLEhixIgRI0aHEAuSGDFi\nxIjRIcSCJEaMGDFidAixIIkRI0aMGB1CLEhixNhHCCHSQohVxnFz23e1u+wCM7prjBifZeS0fUmM\nGDGyYLeUsv/BrkSMGAcb8Y4kRoz9DCFEvRBimhCiTgixVAhxsnu+QAjxkpsXYr4Qoqd7/otCiMeF\nEKvd42y3KFsIcZ+ba+N5IURn9/obhMpB85oQ4q8HqZkxYniIBUmMGPuOziHV1iXGdx9KKQuBu1ER\nigHuAmZKKb8CPAzc6Z6/E1ggpTwDFSvrDfd8X+AeKeXpwE5gtHv+ZqDILafiQDUuRoz2IvZsjxFj\nHyGE+JeU8siI8/XAOVLKt92gmtuklHlCiPdROSNa3PNbpZTHCiG2Az2klE1GGQXAC1IlKkII8V9A\nQkp5mxDiWeBfqJAoc6SU/zrATY0Ro1XEO5IYMQ4MZJb/9wZNxv9pfE7zm6j4SQOAZUaU2xgxDgpi\nQRIjxoHBJcbfGvf/alSUYoDLgUXu//OB8eDlnz86W6FCCAvIl1K+DPwXKgx6xq4oRoxPE/GbTIwY\n+47OQohVxudnpZTaBLirEOI11K7iUvfc9ahshj9GZTa80j1/IzBDCDEWtfMYj4ruGgUb+LMrbARw\np5Ry535rUYwY+4CYI4kRYz/D5UjOlFK+f7DrEiPGp4FYtRUjRowYMTqEeEcSI0aMGDE6hHhHEiNG\njBgxOoRYkMSIESNGjA4hFiQxYsSIEaNDiAVJjBgxYsToEGJBEiNGjBgxOoT/D+Vislm1Q+UtAAAA\nAElFTkSuQmCC\n",
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZIAAAEWCAYAAABMoxE0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvXt8VNW5//959s4FLyg1akER8VKR\nIEogYvOjQFCPpSoVxfZ4O6igMSi0HI9F7NE2ag8oYqEixwZFSqrW+i0VRUVUdARNjtzCpaAoYgQU\nFKNBFAiZmef3x5o1s/aevWf2XDMT1pvXvMjs2Ze1b+tZz3URM0Oj0Wg0mmQx2rsBGo1Go8lvtCDR\naDQaTUpoQaLRaDSalNCCRKPRaDQpoQWJRqPRaFJCCxKNRqPRpIQWJJp2h4hMIvqOiHqkc932hIhO\nJ6K0x9YT0YVE1KR830xEg72sm8SxniCi3ya7fYz9/oGI/pLu/Wraj4L2boAm/yCi75SvhwNoBRAI\nfb+FmZ9OZH/MHABwZLrXPRRg5l7p2A8R3QTgOmauVPZ9Uzr2ren4aEGiSRhmDnfkoRHvTcz8htv6\nRFTAzP5stE2j0WQfbdrSpJ2Q6eLvRPQ3ItoL4DoiqiCi/yOiFiLaSUSPEFFhaP0CImIi6hn6/lTo\n98VEtJeIGojolETXDf3+MyL6kIj2ENEsInqXiG5wabeXNt5CRFuI6BsiekTZ1iSiGUTUTERbAQyP\ncX3+m4ietS2bTUR/DP19ExG9Hzqfj0Pagtu+dhBRZejvw4nor6G2bQQwwLbu3US0NbTfjUT089Dy\nvgAeBTA4ZDb8Srm2Ncr21aFzbyaihUTUzcu1iQcRXR5qTwsRvUlEvZTffktEnxPRt0T0gXKuPyai\nNaHlXxDRQ16Pp8kAzKw/+pP0B0ATgAtty/4A4CCAERCDlcMAnAvgPAgt+FQAHwIYH1q/AAAD6Bn6\n/hSArwCUAygE8HcATyWx7vEA9gK4LPTb7QDaANzgci5e2vgCgKMB9ATwtTx3AOMBbATQHUAJgGXi\n9XI8zqkAvgNwhLLvLwGUh76PCK1DAM4HsB/A2aHfLgTQpOxrB4DK0N/TAfgA/ADAyQA22db9JYBu\noXtyTagNPwz9dhMAn62dTwGoCf19UaiN/QB0AvC/AN70cm0czv8PAP4S+rt3qB3nh+7RbwFsDv3d\nB8CnALqG1j0FwKmhv1cCuDr0d2cA57X3u3Aof7RGoskU7zDzImYOMvN+Zl7JzO8xs5+ZtwKYA2Bo\njO3/wcyrmLkNwNMQHVii614KYC0zvxD6bQaE0HHEYxunMvMeZm6C6LTlsX4JYAYz72DmZgAPxDjO\nVgD/ghBwAPBvAL5h5lWh3xcx81YWvAlgKQBHh7qNXwL4AzN/w8yfQmgZ6nGfY+adoXvyDMQgoNzD\nfgHgWgBPMPNaZj4AYDKAoUTUXVnH7drE4ioALzLzm6F79ACEMDoPgB9CaPUJmUc/CV07QAwIfkRE\nJcy8l5nf83gemgygBYkmU2xXvxDRmUT0MhHtIqJvAdwH4NgY2+9S/t6H2A52t3VPUNvBzAwxgnfE\nYxs9HQtiJB2LZwBcHfr7mtB32Y5Lieg9IvqaiFogtIFY10rSLVYbiOgGIloXMiG1ADjT434BcX7h\n/THztwC+AXCisk4i98xtv0GIe3QiM28G8F8Q9+HLkKm0a2jVGwGUAthMRCuI6GKP56HJAFqQaDKF\nPfS1FmIUfjozHwXgdxCmm0yyE8LUBAAgIoK147OTSht3AjhJ+R4vPPk5ABcS0YkQmskzoTYeBuAf\nAKZCmJ26AHjNYzt2ubWBiE4F8BiAcQBKQvv9QNlvvFDlzyHMZXJ/nSFMaJ95aFci+zUg7tlnAMDM\nTzHzIAizlglxXcDMm5n5Kgjz5cMAFhBRpxTbokkSLUg02aIzgD0Aviei3gBuycIxXwLQn4hGEFEB\ngF8DOC5DbXwOwEQiOpGISgDcGWtlZt4F4B0AfwGwmZk/Cv1UDKAIwG4AASK6FMAFCbTht0TUhUSe\nzXjltyMhhMVuCJl6M4RGIvkCQHcZXODA3wCMJaKziagYokNfzsyuGl4Cbf45EVWGjv0bCL/We0TU\nm4iGhY63P/QJQpzAfxDRsSENZk/o3IIptkWTJFqQaLLFfwG4HqKTqIVwimcUZv4CwL8D+COAZgCn\nAWiEyHtJdxsfg/BlbIBwBP/DwzbPQDjPw2YtZm4B8J8AnodwWF8JIRC98HsIzagJwGIAdcp+1wOY\nBWBFaJ1eAFS/wusAPgLwBRGpJiq5/asQJqbnQ9v3gPCbpAQzb4S45o9BCLnhAH4e8pcUA5gG4dfa\nBaEB/Xdo04sBvE8iKnA6gH9n5oOptkeTHCTMxhpNx4eITAhTypXMvLy926PRdBS0RqLp0BDR8JCp\npxjAPRDRPivauVkaTYdCCxJNR+cnALZCmE1+CuByZnYzbWk0miTIqCAJjQY3h7JdJzv8PiSUneon\noiuV5cOIaK3yOUBEI0O//YWIPlF+8xKrrjlEYea7mfkYZj6KmSuYeWV7t0mj6WhkzEcSskd/CJFs\ntQORTNRNyjo9ARwF4A6IpKQoByURHQNgC4DuzLyPRNXQl5zW1Wg0Gk32yWTRxoEAtshMVBK1hS6D\nKNsAAAhlwIKIYoXtXQlgMTPvS7Yhxx57LPfs2TPZzTUajeaQZPXq1V8xc6yQeQCZFSQnwppluwOi\n7EGiXAURvqnyP0T0O4hwy8lONm8iqgJQBQA9evTAqlWrkji0RqPRHLoQUbwKDQBy3Nkeqi7aF8AS\nZfFdEIlU5wI4Bi6JX8w8h5nLmbn8uOPiClSNRqPRJEkmBclnsJZrCJc9SIBfAng+lJwEAAgVneOQ\nFjIPwoSm0Wg0mnYik4JkJUR1zlOIqAihKp8J7uNqiNIMYZQ5EAjASIjaSBqNRqNpJzLmI2FmPxGN\nhzBLmQCeZOaNRHQfgFXM/CIRnQtRcuEHAEYQ0b3M3AcIR3SdBOBt266fJqLjIIrNrQVQnUz72tra\nsGPHDhw4cCCZzTVZplOnTujevTsKC91KQWk0mvbikCiRUl5eznZn+yeffILOnTujpKQEQrnR5CrM\njObmZuzduxennHJK/A00Gk1aIKLVzBx3zpqcdrZnkgMHDmghkicQEUpKSrT2qNHkKIesIAGghUge\noe+VdxoagKlTxf8aTTbIZB6JRqPJMg0NwAUXAAcPAkVFwNKlQEVFe7dK09E5pDWS9qS5uRn9+vVD\nv3790LVrV5x44onh7wcPeptW4cYbb8TmzZtjrjN79mw8/fTT6WgyfvKTn2Dt2rVp2ZcmM/h8QogE\nAuJ/n6+9W6Q5FNAaSTtRUlIS7pRrampw5JFH4o477rCsw8xgZhiGs7yfN29e3OPcdtttqTdWkzdU\nVgpNRGoklZXt3SJNe9HQIAYSlZWZ10q1RpIADdsbMHX5VDRsz5zxecuWLSgtLcW1116LPn36YOfO\nnaiqqkJ5eTn69OmD++67L7yu1BD8fj+6dOmCyZMn45xzzkFFRQW+/PJLAMDdd9+NmTNnhtefPHky\nBg4ciF69eqG+vh4A8P3332PUqFEoLS3FlVdeifLy8riax1NPPYW+ffvirLPOwm9/+1sAgN/vx3/8\nx3+Elz/yyCMAgBkzZqC0tBRnn302rrvuurRfM02Eigphzrr/fm3WOpSRJs577hH/Z9pfpjUSjzRs\nb8AFdRfgYOAgiswiLB29FBUnZeYt/eCDD1BXV4fychF198ADD+CYY46B3+/HsGHDcOWVV6K0tNSy\nzZ49ezB06FA88MADuP322/Hkk09i8uSoyv1gZqxYsQIvvvgi7rvvPrz66quYNWsWunbtigULFmDd\nunXo379/zPbt2LEDd999N1atWoWjjz4aF154IV566SUcd9xx+Oqrr7BhwwYAQEtLCwBg2rRp+PTT\nT1FUVBRepskcFRVagBzqOJk4M/lMaI3EI74mHw4GDiLAARwMHISvyZexY5122mlhIQIAf/vb39C/\nf3/0798f77//PjZt2hS1zWGHHYaf/exnAIABAwagqanJcd9XXHFF1DrvvPMOrrrqKgDAOeecgz59\n+sRs33vvvYfzzz8fxx57LAoLC3HNNddg2bJlOP3007F582b86le/wpIlS3D00UcDAPr06YPrrrsO\nTz/9tE4o1GiygDRxmmZ2TJxakHiksmcliswimGSiyCxCZc/KjB3riCOOCP/90Ucf4U9/+hPefPNN\nrF+/HsOHD3fMpygqKgr/bZom/H6/476Li4vjrpMsJSUlWL9+PQYPHozZs2fjlltuAQAsWbIE1dXV\nWLlyJQYOHIhAIJDW42o0GivZNnFqQeKRipMqsHT0Utw/7P6MmrXsfPvtt+jcuTOOOuoo7Ny5E0uW\nLIm/UYIMGjQIzz33HABgw4YNjhqPynnnnYe33noLzc3N8Pv9ePbZZzF06FDs3r0bzIxf/OIXuO++\n+7BmzRoEAgHs2LED559/PqZNm4avvvoK+/YlPbWMRqPxSEUFcNdd2TFzah9JAlScVJE1ASLp378/\nSktLceaZZ+Lkk0/GoEGD0n6MCRMmYPTo0SgtLQ1/pFnKie7du+P+++9HZWUlmBkjRozAJZdcgjVr\n1mDs2LFgZhARHnzwQfj9flxzzTXYu3cvgsEg7rjjDnTu3Dnt56DRaNqPQ7bW1vvvv4/evXu3U4ty\nC7/fD7/fj06dOuGjjz7CRRddhI8++ggFBbk1ztD3TKPJLl5rbeVWT6FpF7777jtccMEF8Pv9YGbU\n1tbmnBDRaDS5i+4tNOjSpQtWr17d3s3QaDR5ina2azQajSYltCDRaDQaTUpoQaLRaDSalNCCRKPR\naDQpoQVJOzFs2LCo5MKZM2di3LhxMbc78sgjAQCff/45rrzySsd1KisrYQ93tjNz5kxLYuDFF1+c\nljpYNTU1mD59esr70Wg0+YMWJO3E1VdfjWeffday7Nlnn8XVV1/tafsTTjgB//jHP5I+vl2QvPLK\nK+jSpUvS+9NoNIcuWpAkQDqnML3yyivx8ssvhyexampqwueff47BgweH8zr69++Pvn374oUXXoja\nvqmpCWeddRYAYP/+/bjqqqvQu3dvXH755di/f394vXHjxoVL0P/+978HADzyyCP4/PPPMWzYMAwb\nNgwA0LNnT3z11VcAgD/+8Y8466yzcNZZZ4VL0Dc1NaF37964+eab0adPH1x00UWW4zixdu1a/PjH\nP8bZZ5+Nyy+/HN988034+LKsvCwW+fbbb4cn9iorK8PevXuTvrYajSbLyMmTOvJnwIABbGfTpk1R\ny2JRX8982GHMpin+r69PaHNHLrnkEl64cCEzM0+dOpX/67/+i5mZ29raeM+ePczMvHv3bj7ttNM4\nGAwyM/MRRxzBzMyffPIJ9+nTh5mZH374Yb7xxhuZmXndunVsmiavXLmSmZmbm5uZmdnv9/PQoUN5\n3bp1zMx88skn8+7du8Ntkd9XrVrFZ511Fn/33Xe8d+9eLi0t5TVr1vAnn3zCpmlyY2MjMzP/4he/\n4L/+9a9R5/T73/+eH3roIWZm7tu3L/t8PmZmvueee/jXv/41MzN369aNDxw4wMzM33zzDTMzX3rp\npfzOO+8wM/PevXu5ra0tat+J3jONRpMaAFaxhz5WayQeycQUpqp5SzVrMTN++9vf4uyzz8aFF16I\nzz77DF988YXrfpYtWxaeMOrss8/G2WefHf7tueeeQ//+/VFWVoaNGzfGLcj4zjvv4PLLL8cRRxyB\nI488EldccQWWL18OADjllFPQr18/ALFL1QNifpSWlhYMHToUAHD99ddj2bJl4TZee+21eOqpp8IZ\n9IMGDcLtt9+ORx55BC0tLTqzXqPJI7Qg8Ugm6vtfdtllWLp0KdasWYN9+/ZhwIABAICnn34au3fv\nxurVq7F27Vr88Ic/dCwdH49PPvkE06dPx9KlS7F+/XpccsklSe1HIkvQA6mVoX/55Zdx2223Yc2a\nNTj33HPh9/sxefJkPPHEE9i/fz8GDRqEDz74IOl2ajSa7KIFiUcyUd//yCOPxLBhwzBmzBiLk33P\nnj04/vjjUVhYiLfeeguffvppzP0MGTIEzzzzDADgX//6F9avXw9AlKA/4ogjcPTRR+OLL77A4sWL\nw9t07tzZ0Q8xePBgLFy4EPv27cP333+P559/HoMHD0743I4++mj84Ac/CGszf/3rXzF06FAEg0Fs\n374dw4YNw4MPPog9e/bgu+++w8cff4y+ffvizjvvxLnnnqsFiUaTR2j7QQJkYgrTq6++Gpdffrkl\nguvaa6/FiBEj0LdvX5SXl+PMM8+MuY9x48bhxhtvRO/evdG7d++wZnPOOeegrKwMZ555Jk466SRL\nCfqqqioMHz4cJ5xwAt56663w8v79++OGG27AwIEDAQA33XQTysrKYpqx3Jg/fz6qq6uxb98+nHrq\nqZg3bx4CgQCuu+467NmzB8yMX/3qV+jSpQvuuecevPXWWzAMA3369AnP9qjRaHIfXUZekzfoe6bR\nZBevZeS1aUuj0Wg0KaEFiUaj0WhS4pAWJIeCWa+joO+VRpO7HLKCpFOnTmhubtYdlAvffQfs3Cn+\nb2+YGc3NzejUqVN7N0Wj0ThwyEZtde/eHTt27MDu3bvbuyk5R2sr8MUXADNABPzwh4CSQtIudOrU\nCd27d2/fRmg0GkcOWUFSWFiIU045pb2bkZNMnQrcc4/I4jcM4MILgZqa9Ic+azSajsEha9rSuCOz\n+A0DCAaBN94ALrggPcUqNZpcI53FWA9VMipIiGg4EW0moi1ENNnh9yFEtIaI/ER0pbJ8GBGtVT4H\niGhk6LdTiOi90D7/TkRFmTyHQxGZxX/hhRFhkq76YhpNLtHQIAZJ99yjB0upkDFBQkQmgNkAfgag\nFMDVRFRqW20bgBsAPKMuZOa3mLkfM/cDcD6AfQBeC/38IIAZzHw6gG8AjM3UORzKVFQIc1ZxcXrr\ni2k0uUQmirEeimRSIxkIYAszb2XmgwCeBXCZugIzNzHzegDBGPu5EsBiZt5HRAQhWOSMTvMBjEx/\n0zVAZuqLaTS5RCaKsR6KZNLZfiKA7cr3HQDOS2I/VwH4Y+jvEgAtzCzLzu4IHScKIqoCUAUAPXr0\nSOKwGiAz9cU0mlxBDpZ8PiFE9LOeHDkdtUVE3QD0BbAk3rp2mHkOgDmAqLWV5qZpNJoOgh4spU4m\nTVufAThJ+d49tCwRfgngeWZuC31vBtCFiKQATGafGo1Go0kjmRQkKwH8KBRlVQRhonoxwX1cDeBv\n8kto6se3IPwmAHA9gOgJzTUajUaTNTImSEJ+jPEQZqn3ATzHzBuJ6D4i+jkAENG5RLQDwC8A1BLR\nRrk9EfWE0Gjetu36TgC3E9EWCJ/J3Eydg0aj0Wjic8jOR6LRaDSa2Oj5SDQajUaTFbQgOQTRJSE0\nGk06yenwX036kSUhDh4UCVg60VCj0aSK1kjSTK6P9nVJiI5Nrj9/mo6J1khi0NCQWMZrPoz2ZUkI\n2UZdEqLjkM3nL9F3Q9Ox0YLEhWReSqfRfq69ZLokRMclW89fPgyYNNlFCxIXknkp82W0r0tCdEyy\n9fzlw4BJk120IHEhmZdSHe2XlET8D/ol02SDbGmb+TJgylU6ollQCxIX5EtZV5f4doBW/TXtQza0\nTW0eTZ6OahbUgiQO8+eLmz5/PjBzJtDcHP/l0aq/pqOjzaPJ0VH7Bi1IYqDe9NZWYPx4Me1svJGE\nVv01Go0TsfqGfDZ5aUESA/WmEwmBos5f7nazteqv0WiccOsb8t3kpQVJDOzO84kTvWsZWvXXaDRO\nOPUN+W7y0oIkDupN79tXaxkdiXw2JWg6FvluDteCJAG0ltFxyHdTgqZjke/mcC1INIck+W5K0OQm\nqWi5+TxQ1YIkRebMARYsAEaNAqqq2rs1Gq/kuylBk3scylquFiQpMGcOcMst4u/XXhP/a2GSH+S7\nKUGTe6Rby80nH54WJCmwYEH0dy1I8od8NiVoco90arn5pt3o+UhSYNSo2N81Gs2hg9Ry778/9Y4/\n3+YN0hpJCkjtQ/tINBoNkD4tN998eMTM7d2GjFNeXs6rVq1KaR/5ZK/UaDKFfg+yRy5cayJazczl\n8dbTGokH8s1eqdEA6e+I9HuQXfLJh6d9JB7IN3ulRiM7/XvuEf+nYw53/R5o3NCCxAPSXmmambVX\nNjQAU6em56XXHNpkotPP1nugyT+0acsD2cg50GYDTTrJhLNW595o3NCCxCOZtlfqkh2adJKpTj+f\n7Paa7KEFSY6Qb+F+mtynPTr9XIg00mQfLUhyBG020OQ72jx76KIFSQJkerSlzQaafKa9zLNaC2p/\ntCDxiB5taTSxKSkBDANgzp55Ntn3Uguf9KLDfz2SajhlqqG9OjRYk8s0NIipqAMBIUxmzsxOB53M\ne5mJHJtDHa2ReCQVZ3iq2ozWhjS5juzQg0GACGhuzs5xk3kvdYRk+tEaiUdSqeyZqjajM4rzg3zS\nGtPd1vZKVkzmvdSJlRmAmTP2ATAcwGYAWwBMdvh9CIA1APwArrT91gPAawDeB7AJQM/Q8r8A+ATA\n2tCnX7x2DBgwgJOlvp55yhTxf6xl8fZx2GHMpin+j7Wd2/G8bq9pH5K9R4k+S+kgU89Te5xLsuRq\nW3OtXQBWsZe+3stKyXwAmAA+BnAqgCIA6wCU2tbpCeBsAHUOgsQH4N9Cfx8J4HCOCJIrE2lLsoLE\n6YXLZIcRa9/peMBy7SHtSEyZIu4bIP6fMiX+Nu01QEimrZrMk4sDRq+CJJM+koEAtjDzVgAgomcB\nXBbSLgAAzNwU+i2obkhEpQAKmPn10HrfZbCdrriZlOSyAweAujpv6rSX0N5YtttUQ4O1nyWz5JOt\nXie/5ib57LvJpI/kRADble87Qsu8cAaAFiL6JxE1EtFDRGQqv/8PEa0nohlEVOy0AyKqIqJVRLRq\n9+7dSZ2Aky21slJ8B0SY47x5+WFn1n6WzJJPtvp0zuSnSR/57LvJ1aitAgCDAZQB2Abg7wBuADAX\nwF0AdkGYy+YAuBPAffYdMPOc0O8oLy9PavYut2zzMWOA2lohSPz+9I0cMpndrkehmSdRrbE9qxnk\ne/JrR8wDyefqFpkUJJ8BOEn53j20zAs7AKxVzGILAfwYwFxm3hlap5WI5gG4I03tdcTphRs9Gpg/\n33unnMhDn6kXPJ8f0o5Mvnfo7UFHNtPm6/OQSUGyEsCPiOgUCAFyFYBrEti2CxEdx8y7AZwPYBUA\nEFE3Zt5JRARgJIB/pb/pAjcB4LVTbmgQPpR584TmUlQkErWam9unM8/UQ5qPo8NcaHMutCEfySVf\nQrbvYc4+M1488sl+AFwM4EOI6K3/Di27D8DPQ3+fC6F9fA+gGcBGZdt/A7AewAaISK2i0PI3Q8v+\nBeApAEfGa0cyUVv2CIra2uRCfolEdAzAbBjMhYXZjcrIdKRWLkaaxCMX2pyNNnTUKL1cuH/t0Y7a\nWtF/GEb2zhs5ELUFZn4FwCu2Zb9T/l4JYfJy2vZ1iNBg+/Lz09xMR9RRz/79wK23iuVeVWm5PYe8\nM0SidEQgILJ/442k0jHyyIYJIJujw3SNxnJhRJvpNsS69zk7qvVIrphpE7mHqV7zhgbgttuEZQMA\nWltzK6orV53t7Y6MzgoExHf5v9MD4/SQqM5t0xQO+rIyUY8onm8lXQIgGx1mtpz46RSKuRB4kOk2\nuN37juJfyAVfgtd7mI5r7vOJAajENHMrYEYLEhcqKkTn/+c/W5cbhvUGuj0kbqOmvn3jj0zSJQCy\n0WFma3SYTqGYCyPaTLfB7d7ngjbWUfB6D9NxzSsrgeJioYkYBvDoozl237zYv/L9k0pmu8wAVv0c\n0jZZX8980UVimZolnKptur6eubhY+FeKizteNnsybcoVu3g+ocvt5Abpuubt8S4jF3wk+U5FBTBi\nBLBwYWQZcySZ74ILxAhBVTlbWiLLDQOYPRuoqnLefyy7qfStcAIZME77ywUTgEqyan4uaBH5htO9\nz/Z19OobyHe/TSzSdc1z7V224EXa5Psn1aKNqlYiNQS1XpFdY7F/nzQpsZGhum8i5upqb+087DBx\nvIICEeGRS8hrVl2t6zwdKngdiWstKXeBR41El5H3QEFIbzMM4LzzRG5ISYkYURNZ1w0Go79Pmwbc\nfbd1Ep1YJUuSKcPi80W0I78fGD/eW+mWbJQ+VycSevJJcT0zXQYin0q6d1S8luXR5XvyH23aioPP\nFwm5CwaBZcvEp7BQOLwWLwZefDFagNixh/zGmpZUOvoTKcNSWSn2J9sRCMTfJlsRPGpHAQA33wz0\n6JE5M0Y+RCblgykn1TZ6DfZIV1BIPlzTDosXtSXfP6matuyJhfIzcqT7b04mLnsp+lhmqGTU/UQT\nlpIxoSWCNGfV1mbXdJHrZdLzwZSTbQdxOgJUnNqbi8Em+QTS6WwnotMA7GDmViKqRGgOEWZuyaCM\nywmko6yuLqIhSOrrhTmJHRzi5eXAqlWRqUcvvBCoqRH7mzo1/rSkyTjoqqq8hRdL1FwZaUIbPTo9\nozm7VpDN0jC5kCdiRx0t50MIbrra6NVBnKoj2c08luuaaUfBq49kAYAAEZ0OUVH3JADPZKxVOUZF\nBfDYY8Bll1mXf/llRBioFBcDY8eK/00T6NQJGDVKPNwNDfHLRUv7PgDcdZf436u9v6JCbOP15R0z\nJtJ+aUJLB/YXu7nZe7tSRQrhXCmTrvqILrgg4l/L5XLh6Sppni1flVN7te8le3j1kQSZ2U9ElwOY\nxcyziKgxkw3LRSZNAhYtitj6Jb17A5deKh7UE04Q61VURLSDkhKR0a6GBEstx47sdOS6t98OzJpl\nzZBPl9YAJF7J2CvtrRXkUqikk1DN9VDmdISsZtNX5dbeXNNMOyxe7F8A3gNwNUShxFNCy/7lZdtc\n+KTiI7EzcGC0P4RI2GadfBMyaVH1oxQWuvsNpkyx+lcMw/qdKP129UzZkTuyfTqRc8sHn0gyxLsG\nueCr6sjPYDZAOudsB1AK4BEAV4e+nwLgTi/b5sInnYKktjZakNid6vKFUZ3q9nUuusj5JauvFw54\nVXAUFloFERHz6aenniuiX7LeolJLAAAgAElEQVTkSEYwdLRr7eUadFQBeijhVZB48pEw8yZm/hUz\n/42IfgCgMzM/mGblKC+oqhJO9969henJ7h9Ri6lJk4b0oxiG+BQXC5+JtOmaJrBtmzAFVFQI01dh\noVi3UyfgP/9T/C1hBrZsAW65BZgzJ7nzsNvtE7Vht2eeRrLH9rKdl3WSsb0n4rtKN5m4V16uQbp8\nVTonKA/wIm0A+AAcBeAYAJ9AmLr+6GXbXPikUyNRqa8XIbPFxULLME0REqyGHsaa00TdPlbYolsW\nPSA0m2RIxeyQzVBL+z6THeWmcwSdTyPtTLU1W9cg0eN0NM2vvUGaa20dzczfEtFNEGG/vyei9RmQ\na3mFdOiOHi0c53PnirpcL7wgNJZf/zq2w7KiIpLwGAgIB3tNTSRMWF1fOg2JIgmSgNBsnIiXnJWM\nM1zuc9u27IRaOjlrkw1L9bKd133nU92vTIUa52LVZ/V5yURgiiYGXqQNxIyE3QC8BuDc0LL1XrbN\nhU+mNBKV6mpnjWHSpMg6TqNrVaOxJy6qyHWrq8U+L7rI3UeSyMg6GYdxUVG0FpUJx6rTPnNBI0mE\nVEbI6Rhd55P25EQi7bdr7pkITDnUQJo1kvsALAHwLjOvJKJTAXyUfrHW8Zg+HRg5UvxtT9CTk1yZ\nJnDmmcD77zvPntjQIOp1LVokXpHiYjEaBITt2D4i9DKKk9pFSUlEo5ATHzmNMr2UOUk11NJ+bCet\nKdmRsJft0j3KTiX8NV2hs/mkPTlRUSHelQULhPYdr0xQURFw4EBkKJdrCZ8dtoyLF2mT759saCT1\n9SK6ykkrOfFE5tJSa/RW9+5WLcSpwrDcb1GRdX+GITQTN/9LvFGcPZpMakGxSpl4HdGrbUhmjvuO\nVOIiFS0tVR9Wvl4zO8n4SNz8ju1NPJ+pXCeX7h3SHP7bHcDzAL4MfRYA6O5l21z4ZEOQMEce4n79\nokN+3T5E0aG9as2rKVOia3kVFlrLsRuGWKa+OLEeyOrq6H2apntIsnp+snZWrIddCj8i8b+XlyIX\ncg4SRTU3pttUlkkTXj6R7HORax0ys/Vc3N7ZdN67dFyDdAuS1wHcCJEJXwDgBgCve9k2Fz6pCpL6\nbfU8ZdkUrt/m/Y7U14tcDy/CxP5RfR92jcQ0xe/qQ1dQED1LY6x2OWk48TQSdft469j9RYnMp5Iv\nHaD9OrrNZOnlZXZbJ56gciIfBXIsVO05HfPstKeAiffOpvPepet98ipIvPpIjmPmecr3vxDRxNQN\na7lPw/YGXFB3AQ4GDqLILMLS0UtRcVJ842ZFBfCb34hcj0SwF3GUkV2ynEpZWeR3afuWJVi8+Cd8\nPmuJl969gaFDI9Et8Yo+qr4Se5RZKuSCLT8R+7XPB7S1Rb57scU77T+eL0SWr5k/35ufpL1L06Qb\n6SMZP148cxMnimc0kedD9Qeq70m267Cpz3hLCzBjhliu3qd03busFwb1Im0ALAVwHQAz9LkOwFIv\n2+bCJxWNZMqyKWzeazJqwOa9Jk9ZltgwobZW+EO8aiOmmVz+gteRlpt/JFHTSazt6+ud55xPdjSY\njVFkMrb4eBpJvEg35tij0I5k1kmFdOU8JaK5ZxI3LSsZDTTeMXJNIxkDYBaAGQAYQD2EeavDU9mz\nEkVmUVgjqexZmdD2srT7kCHW/A+JOhkVILolqX14zXNIZCQtR0U1NcAbb0SixOrqvO3DbXt1xFNR\nAbz1lnV/yUYhtcfkW15GcHZN0SlfQd2nvMdsiySKpUHE+i3WPc+lgpWx8PrcpqJlqffAMESEJFH7\namv2ihfNzdHP+ejRye9fXtdsTt2Q9CgfwMRkt832J50+kmT8Jcxi1OGUne60TI1/t0dC2Uf6XiK0\n3Ozv8UbLMa9JgiMe+6iyutrbqDkTNn+na+KmRaV6HC/XOJYG4dZWr9c+U9pJqvtNRgNMVpuNFymV\nbZzOPV3Pebp9jUins91xQ2Bbsttm+5OsIKnfVs/Vi6q5elF1WIgU31/MVENcfH9xwsKkvt65erDT\nxy3EV42GmjRJOPRlBJaatOdlZkK5nhoBlikTSrKCK5kXI17H7BZmnGikmRfsA4F0dGJeO51MBDBI\n80uq4bXZDArIRVOfvU3pulfpvq7ZECTbk902259kBEn9tnouur+IUQNGDbjwvkLuOaNn+DtqwNWL\nEp+btr7eWt3X7WMP8bWH59rDd50ir0wzWsi4tSkRrSZVX0eigitZgeV0Lm4vWj5FO3ntdNJ9TvK4\n6rOXiHaZzDkcSqRD4LWXRuLVR+JoFUvVrJbL+Jp8aAtEwnLagm1o2tOU8n5ldd/x44XPhENXkQgo\nKIj4USoqgKOOEnZdZmE3HTUKWL5c2FHlqyw59VQRJbZggYimUv0ugNi3m01YnU7YjtOUuclGvsj1\n6upEewBvtupYNn+7nT2er8PN3p6sHT6ZTGV1GyAzWfpA+iO45LVVn1nTFFM0+/2JPQ+5EKWXa6TD\nt9Vu1zWWlAGwF8C3Dp+9APxeJFUufJLVSArvK7RoIOrHqDESNm1Z9u9gIpg0yd2PokZ2TJnCfO21\n1nX69bPW7IqV5OjWHqeRjH1UGytpMd6Iym7eSjU6xWl/XnJhamuda5UlOiJM1uyWim8qUdJp1nG6\n3smaRTNFLpqxkiUXzgXp0EiYuXNmxVhuQ/bJRiw/Ags3L4SvyYfKnpWecktU1MrBcvTg80VrEoAY\nXT/9tIgAq6gANmwAnnvOus7atZG/DQMoLwfWrYuMFGUUiJdaWgcOCK1BjSpqbRUj0H79IlqROo8K\nED+6yl6vq0ePxEdMavvV/QUCYp6YTp1iR6s0NEQ0quXLrTkJiY4IvUZ6ubXZLZIrnaQzgsuuucpn\nyj5Vc3vVk0pXhF9DQ+xovGyQzWmK00Eqpq0Oja/Jh0Aw4Pp7kIOY9u40GGSg2Cz2nKhox/6iFxaK\nh8fOsmXAnXeKApC33eYcSgyIzr64WHSmGzYIU1e/fuLF3rDBWijy4ouBrl1FkuO2bZF9MAOPPy6W\nV1VZE8JmzRLfGxuFSePxx0VHcv318TvVVE0t9jLhF18shKYUTLJDbm4Wk0jJbRIxfSWCl/NxMg3K\nbWQoqhT2XsN7M4XXY9qTJFVTCuCtlHsmzi8d97ahQbRJvoPz5olQ9mwLxJqaiIk61wpPOqEFiQuV\nPSthGiYCAXdhAgiBcjBwEL4mn6Mgadje4FlrUXMTdu0S85qofpCHHgJeesldiBgG8POfi78nTwbe\nfVc8iK+9FpmdMRgUn0BAzJ0iIbIeKxAQwqNvX9Exy+327xfzrvTvH5lHRb50agepzvionl8q9lu7\nBqK2X56/2iE7jerS6Tfwcj72zq25ObrjTTTbPRPIY0rNc8QIYNIkb7lM6syPU6dGa4lqVr4c7Sfj\nV4lHOu5tMhULksFNkKr3IRiMfqZzFi/2r3z/JBv+W72o2tVHovpKDvvDYY7+kvpt9XzYHw5j817T\ndZ1YTJoU7S+JlRE/aVJ0HS17ZJd9/vd4IcjSRmvfb2FhtH0/XaGhbjhFDal+oIEDrX4Xt6ilRKPA\nspkvEavdmWTKlOhCo/Gy9Z3yMtwiu9S5ZJx+SxeJ5uQ4reOlhlqqbXR7JuyFHS+6KLnjp8u/gixE\nbXV4Rp8zGk80PgF/MKICdCnugpbWlshKBEw4bwLq1tWhbl0dyrqVoXlfMyp7VsLX5EOrvxVBBNHq\nb3XVWtx48EHx//Tp0VFaKj17Al26AM8842wWAyJzxV9xhVjPjl0jkXPMl5SIEdOYMcCf/xz53e+3\n1hHbsCFSA0zVVNI1mlOzdRsbhUlNVRYNQ/iEVq+OjIDVEapdS/JianHSDADv9vNEM4zVmlCZrJfl\ndK6VldFVFlpbo++fqoW51a6SfhRV61D9Q/I5SzXD3Ok83HxCXrU8t4oF6TTFxTLB2bWqZOrYtYt/\nxYu0SfYDYDiAzQC2AJjs8PsQAGsA+AFcafutB8SMjO8D2ASgZ2j5KRBzxm8B8HcARfHakUpme+2q\nWi68r5Cphly1Evtvxr1CS5n0+iTL8tpVyZUulSN9t/lO4n2uvVaMTiZNih51lpaKUaXcP1EkB0Wt\no2UfqRUWiu2Ki60ajFskUipagNMIrrY20t6CAuaRI51Hcl60JK8Ra9XV3keriWoicn1Zf+naa2PP\ngpkssdpVWxv9fMQ6fjzNSU2MtSfIxovai/e8yPvvtVZcuup1pau8ezLVKLySTo0W7a2REJEJYDaA\nfwOwA8BKInqRmTcpq22DqNl1h8Mu6gD8DzO/TkRHApBjpQcBzGDmZ4nozwDGAngsQ6eBqgFV6Ht8\nX9T4avDGJ28gyNFhVWxLqZF+k7U718IgA0EOwiADzfuao7b1ghrhJf0nixdbR3ex6NxZjB7vvjs6\nKuzjj8X/W7eKERKzczTRXXcJR/u4cRF/yZ/+JEat4fMO+V7ssycmMkJym6dd2ozlKPmuu6yVigFg\nyZLIem+8IaKyli4VbYmlJbmNEO2jQ8C7/TxRx696jsGgiNIzjOjIslSJ1a6qKqHt1daKe28Y1krU\nduL5JOR+7cEGsSLqYmk66npqwElra/xacemq15UOLTueby3VSLt05w95IZOmrYEAtjDzVgAgomcB\nXAahXQAAmLkp9JuleyOiUgAFzPx6aL3vQssJwPkArgmtOh9ADTIoSKSzfFTpKCzfthwHAwdBRDix\n84nY07oHLQdaHLczDRMH/AdAoX8GGSg5vCSltqgPmHRazp1r7dycmDPH3TTW1haJyJLOvYICYXZo\naxP/l4Sa3dgYETKBALBpk3VfMkENsL4gToJAPY94UVUlJZHjBoOiBLeKNKvJKVllMUkZxjx6dPSL\npR43lgnM7hh/8klrcIHbS5roy+xkWspExE68do0eHR3O60aywQYyok5FHUAQRQSqm+C3D4jiOe9T\nCfTIRMecycKa7ZGUmElBciKA7cr3HQDO87jtGQBaiOifEKasNwBMBvADAC3MLJ0WO0LHiYKIqgBU\nAUCPHj0SbjwQPRfJzOEz0byvGSWHl2DiqxOx37/fddu2QBuWbVsW/h4IBjBh8QQ07mzE6HNGJxUq\nrGLXUv7v/4D1653zUJyWFRREBIvMsDcM4MILhV12w4boOSBiUVAAXHqp0JTmzAGeeEJk8FdVARs3\nWgWBFExeo6p8PqsPZ8YM4LTTRLukgJJC7Pbbrbb4efPENbILhGHDxDoFBcAllwA//alYvnhxJKRZ\ndkj2TsyLjyTRl7miIrrigVMUWqqdg5fRcDo6oUT9ParAiVelt7JS3DfVH9jWFl/wJtt552MWfiYF\nlRO56mwvADAYQBmE+evvECawF7zugJnnAJgDAOXl5R4MQNH4mnw4GDiIAAdwMHAQzfuacdfguzB1\n+VQc8B+IfXybuYvBOBg4iNrVtXhy7ZMY029MWKAkEiJsx66lyJe3sTG2tmIYIg/j5ZcjTuvCwohz\nr64u0qHJCaxGjYp0CnIfBQWRXAGfD3jxxchocvx4kf/y9NOR46pls51i5e+6y/mlNc2IKSMQEJqH\nLMUNiHb6/cDDDwO9egHvvx9ZZg9RHTcuYpJra4uEEZtmxLS3f78QjDfdFAl6sF/vRO6NF+SUA9J8\n2bWr1dmbLgdqvHZ5bbc9r0c+B4B3c5bEPoCQ25SUiPsn2yX/HzMmYoIDIpqwqkEnQ6zyNcle7zlz\nxLt4wgnOIdWptDFXhFomBclnAE5SvncPLfPCDgBrFbPYQgA/BvAkgC5EVBDSShLZZ8K4zUVScnhJ\nlKDwiipQ5q+bjwnnTcCMhhkIcMBzYqOb4HF62NVIKxW/H9i3z6qtnHNOaP8NwoQjX1LV5zBrVuQF\nl50DEBFgqnnG73eOEFuxAvj97yNCzj7ytp9HRYXQNKZPF9+LiyN1x+x1xVSTW6Ix+IGAEKZyf19/\nDUybJv5WhYkT6Xy5VdOS7JidTH5yeXt1KGqb1JwRe3KqmzlLxcmMWFcH3Huv1WQlj1tWJqoYyOs0\nYYLQVJOdRRGIFozSvGsYEe063vb2+zFnjjW68eWXgbffjt+2eHkmuZbxnklBshLAj4joFIjO/ipE\nfBtetu1CRMcx824Iv8gqZmYiegvAlQCeBXA9EtBSEqXipAosHb00qtNu3tcMAnkSJv269sPGLzei\nLWhVDRiMVn8rptdPDzvwWwPxQ4QTmfpX2rv3O1jgTDO6M165Ehg6VLyk9qRH6d+wdwoNDREzUVGR\n6PDlC20Y0fsxzehEyx49gOHDI/uzv0ANDUKAAWKfM2dGRu8+X2TaUrUIpmqms79oZWWiHfZc0+Ji\n4JFHxPl9/XVk+T//GVuQpOPllue9bVtsx79MGGxpyW6H4hYyXFQkfFHSB2dPTk1EkMsBhLyecr+A\n2JcMKZb7feSRyGBG+k0SnagtVvkadSAlk3Ptnboq+Jy0MHvSbFtbfJ9XrOcp3Y7/dJExQcLMfiIa\nD2AJxPS8TzLzRiK6D0IovEhE5wJ4HsL3MYKI7mXmPswcIKI7ACwNOdhXA3g8tOs7ATxLRH8A0Ahg\nbqbOARDCxN5RV/asRKeCTmj1t4KIMKjHILy34z0cDByMEi7HH3481gfXO+6bEQ51DhPPIW83t8US\nPOooT+1sTRN49NFIZzxxotASmMWDvmKF87GDQeDVV63RWNJPAUQc6Y8+Kl6ilpbIiB6IOFHtTv+m\npohfRZqWTDMyCpQvjzqjnDw/+RKNHBmdu6AKEXtEELMwy91+O/DttxFTUt++wpyltvuKK2LekpRf\nbvtI2KkyckWFtVTNH/+YeMeZrNbk1rG55YyUlUW2TaZWlerjAiK+kl27rM9aYyPwmBJmIwUtIExJ\nwWBsIetUvkYtuaNq14FAdJCIuu3110cGZAcOiPskn1eVwsL4gjXW89QeEVme8BIjnO+fVGdIdMI+\nU2L9tnoe+beRbNQYljlMalfVcsF9BXEz5GU+SrwM+FSy5d3i06urE8tLKSwUeRtqDomaYa7O+mb/\n3am6cazjyFyQVGYEVLe3z9tdXR3JQVFzEuSkYZMmebuuqeQZeJk9sr5e5JXItssqBV6rCNvzVBLJ\nT/GSl6DmjLhNHOY1N0LNEVHzTezPqb2itdzO/ry55VFUV1vn6xk50rqtYUQ/F27XxL6tfb8DB4r/\nvVyPZPNM5DVKx5zvErR3HklHx0lTWfLxEotGYpCBvsf3xeyLZ+PWl29FgGPX7ZL+E1+TDwCc/SAu\n5jZPbXZxGI4ebQ1rjYfqoI46BxamtGnTgDPOsP5mmpFRnmFYc1bsmfWAGJHV1QnTl91h6za6dgo+\nUM1FakSQaUYHJEit6sEH4/tF1GOmEtVjH2WqDvapUyNalFp/SRbmbG4W5ycz/Z00IhnYIE1FbqYa\nr+1zGgXL667W2lJ9OV7McGodLnmvJk4UVRsA63NqmhHNR73PdnOl2zw8dj9gQQHw+efWdYJBYOBA\nUVfOrlnZr0nXrtZnmJWoO7uD3Ysp9PrrI+fsZk6zL6+sbL9ik1qQeCRWZFXD9gbU+GrQGmi1CBJ/\n0A9fkw93Db4LfY/vi7p1ddj13S58vf9rS2iwChFhxWcrcO/b98If9If9IIBVsKQaPqwi8zd8PhGq\nq0ZZecE0o01WCxda1XoiazQVs1gmS7GccQawZYs19JVImLyCQfGiz57tPYIplrlI7YBra6PPxc1c\nEOsllmYze4SRF5wEkT2vQiaLEgnfz6hR1mAHt9wPtQigen+kqQaILwATEZRu4dtOphp7lKH0f6jt\nnDHDaqKaNcsalg5YK1rbn7kbb3R+NmpqIv47ud5RR0WbdVetEqHwMujB7ZoAwjSr7tPNRxfLdGV/\ntkePjlwnaZ4OBMRAwu47yUaxSTe0IPFALAe3/E3W1FIpMArCkV5q5z/upXGugsQf9OOFzS+EBdLB\nwEHUravDk2ufRFugDYVmIXzX+wA4ayzJoo7khwwR4bWjRonvc+cCa9aIl8QwxDJVkzjjDCEg7Kgd\ngmEAH31ktXtLgRIIAB98IOzHt9wiRpqLF1u1Hr8fuPVW0eEA7h2TOsujmil+7rnRI0s5KpWjOOk7\nimVPb20V53L77WKk7KYtxHN+24WSXZNSQ6PtI90DB4Bf/coazSR9Fbt2Ra6BHCCoYdJSeBcUiE7z\n3nsjwuqOO4SvKZ6mFws3oaMKl5ISEYKtaoNutd6k41vVblTfkAwDl5rIiBGRkPbi4mgB4FRdt7hY\nPHMTJ0YE0ckni4FGrNwU+zWReUDy2DU1YvnUqc5BCk6C3y5k6urEIMEenajOGST3qU5BkW3/iRYk\nHojl4Ja/BRG0RHIRCDf2uxEAMHX5VJQcXhIu5rjr+10xj6fuo8gswq7vduFgQDwhBwMHMe3daVjy\n8RJPkVvJUFVlDXWsqrJ2fGqyYnFxRNOIxaBBwDvvRL4fc4wYYamW5YMHRUdYVSU6GTuBgAhnlsJM\nDe+1q/ZSSwLEvleujB5Zyo522jRh1hg71j3E0+ezCqZp0yLmMbkMiO54vBaDtP+mdnSyQ5Ud7TJl\nDCKPVVlpFYpz54rzKSuLOKHtZkQ1eo5ZnJNdA0hmROsUvm0v9KhGZMnjS0xTCISf/cxaKkV2jGon\nrE4/Lc1Ikya5a0+qYDUMMQFc//5igCK1IdMUUYReM/wlaiShUySXPUjBqY12IQNYBwLq9ZLJtnKf\ns2alN18lEbQg8YBbPon9N9MwQSD4g36YholNuzdh8LzBYd8IgVBoFoLtzgAXiAgzh89E485Gy/IP\nmz/0HLmVLtTOoaIi+oWRtb9UTSNyHkJwqC/D7t3Ox1m4UIQgd+rk3ha1Q5wwIWKbV1V7u61cCiqn\nkeWSJeK3DRsiGfz2l7yyMrqEidSmpNlMag8yKc5NYMQybdg7OmnGcup81cxvu2mjrc06Y+TcuUKY\nSh+J2zQ7ch8yCVVWOZATpHXpkpwfSK4vNa1Yr0AwKO7JpEnOHa7dpOTkT3DTKrdti9wv04xUjLab\nP0ePts5e6vV81fvoFMotf6usjM6tkYOOCRPEjKejRonn0UkjASLJtoA1cm7DBnHtspm4qAWJB2I5\nuO2/AUDdujrMbZwbZb5iMNoCzqnmBAIRWYpCBjmI5n3NGH3OaMxtnBvORfno64/CxSBV81k2sY86\n33rL+nJPmwYsWiQ6jOJi4Yy0d8RuLFsWfx1A7FsmKX77regIZEcoOwY5ynRKTnTKrpemBKdR5OzZ\nQHV1dGeuhjurSXE//Wmk81c75srKiCZj98nYR6TSxi4z3qUfAQAGDwYeeED8vW2bNXRVXh8ZJrt2\nrVX7iEcwCLz+OvDmmxG7/2uvifPt1ClxbcWuaanXzz74kO32+awVCeyoM37Kzl89nvo8qh2taYrC\nooB1OgJ7sVEg8Q54zpyItl5QYBVOJSWJaaKy6Kg0W6qmQPk8y32qg4zWVnFd1Km2M51npAWJR2I5\nuO2/+Zp8ljlMVBgsijjCQIFZgDH9xoTnMCk5vAS3vXKbZduSw0tQcVIFxpaNRe3qWiGMgm0wYIT3\nlwvYBcvzz0e/zPPnR4+qE8Epsks1M8kPIDqLX/86kjsjExkB90go0xS+ILXzt48if/Ob6NwYGf00\ndap1JknVdKRWB5g5M9JO+b96rWTxyVGjrB1aRYVwCMvjL1smNLhZs5xNV/K4u3ZZBYzTdVR/AyId\nuz2hNJZmF2sEXFcXua6q8JD10WbNsj4bak02dd9qZWC5nr1NThnqdie+FBhy0OBUbNQeDKCWrQGc\nTZZqVeK2NuCyy0TkV6ygA7kv1YRlLxvk80V+U5Nt6+qiE45lcrE832w43rUgyQDxSqhw6N8vSn+B\nxy61Fi5u3NkYFhiy9Pyc1XOwZucaGGSEzWTSsX8wcBATX52Isf3Hhk1g6SgKmQ7swkVNjvT5hEkh\nzkzGAEQZ/GuvFfb+8eOd64fZzWltbZFpieVvjY3CSX3wYEQ7kh1bebkYwakvoEzAtJfp+PDDSCAA\nc7Smoc4hryKrAzz0kGif7KhVLUidx12WkAcindbatdZ9/vOfzjZ0iWGIzk9qOfaOVTq17cl3sTCM\n6EKSsSKK7KG2MvRbfrp0EevX1AgtSGqQMvHUrTKwvU2yarPaYat+MiBiDpQRdnKitHnzIsU61WWy\nGKSK9N9J7eamm8SzuWBBtEa4eLHVX+HmZFfLCzlp0E6a6oYNQgNSKS0VgSvqoCIrjncvySb5/slE\nQmIspiybYpnsyqgxeOCcgY4JiNWLqi1JhWrCYdH9RTzkySGekhnVj3mvmfQkWtlEJlANGRI7UVFN\nClSTyBL5mCZzz57uv6sTY7l9iCLJXjK5Ty6XSX5u7ZOJbXJ9IJJwN3BgZBmR9feBA63JhvbplydN\nsk5f6za5mJrEZp8UrLY2scnTTNOaKGqfDEu9TlOmiL/ltSUS19otaTHeBGNu00XL5EE5/a86gZa8\nFjK50Z40qbYv0emo7eftdK3UhMhYCbMyWXTSJPeEQ7m8vl6saz/ekCHeJw/zAnRCYvtR2bMSpmFG\nTFQE9O/WH2t2rbGYrRiMP6/+M+atnYe3rn8LgPCv/PQ0UdN88ZbFUX4WgwwQhP3BLcExwAGMf2U8\n+h7f16KZJFNlOJXKxPFwSh6UxSA3bgTee0+UJ1ETA9X5MgBvGo1cr6nJ+TciUVvL7mOwwyxGrWVl\nYgStlpbx+4W29OijwoegmhtkDS/V4Q0I88rOndZlzCKMU46m1d9aW8XovbY2YvqSUWZyOmbDEGYi\nNRjCbu+XZhLZ/ciRv5zcTCKj4woKIoUZZRulyc8tomjuXKtPQvUVuEVWuUUz2UfjUmNQfQayDbJI\npN0hrz5bixdHzGj22mBqzo4bbr4+ef2ldiu12JISawiw/ZlfscJq9uvSxbnIpbrt1KnOz+q77wL/\n+7+RMPlkStQkgxYkGaDipArMvng2xr8yPlzVd/Q5ozH6nNGY/MZkvLPtHUvOSWugFdPenYZXtrwS\nDvOVznQ7d/x/d6BLcZn5vf0AACAASURBVBeUHF6CcS+Ni8pdkQQ4YInmSqTYoySZbZIlmTyFkhKr\nTTpRpBmKWfgbpJlHdrBOBALC4f6b3wh/iv23xkYRDq2aoH79a9Gxqw5vwFmwGYYI2926NTJBl0T6\nDfr2FR1i376iM5J1t+Q6zc3u19Mpcknmk0gzjur8Ns1IwUzVibxtm/DXqOYYFbWAJrMQeD16iPar\n87kA0Z2sU76GW7isWkpeNePI/dh9K3YfnRSWatl6NcHx4ovF719/LSINjztOfF++PPqc1YoDQCSv\nx57zo/px7MEHbpn4dkpKnH1dzOIZlIMtdV6dTKIFSYaQU/TaR/Nv3/g2GrY3YOKrE7Hi80garQzp\nldiFCIFwTd9r8O2Bb/HhVx8CAHoc3QNNe5os6xkk3oxis9gSzZVIscdUtskG9s7GPhmUFAaxtAvD\nAAYMiNYGgNjOaLme6nCXBIPOZfsXLRLl9L2UoDEMofHI0FP7No2N1kilM86IFqQtLZHOecOGSG6B\nmpdhmkIQrV9vTfwkEpOGbd0qzsfvj+SkjB0bmeZZVhxQJxT79tuIlqBev2BQlPYvKxOhrWqui2HE\nL9XuVjlA1U7V+VDks6FGUMl22AkGhZapdvLqnDCLF1sFrFu+VO/eImxdLW9jDy5RQ4DVaEGV886L\nn8g6bVpk3h8VKcjksbJZIZg41hvTQSgvL+dVq1a1dzMsNGxvQOX8SrQF2mCQAWZ21S4kJplx63WN\n7DUSA08cGBZeDdsbwqVZFm9ZLI5nGJh98WxUDXDJvlPamC2NJBXsZjHVnGEv+SJHoHLkaJ9lUZoX\ngNjCJJMQCY1HdepLRo4UgsmLQ1yaaez7lufodn7XXgv8v/8XLcScEjDlsWS+y69+FanA63Resa4p\nkQhpPuYY8b1r10jGudohy3I5aqKseu+l8Fy1Kna4uVqlQU1OdDpmPNSKBoAQFKpGKa/dj34kSgGp\ngQAqpim0HaeIOLeptUtLxSRsau5JuqYYIKLVzFwedz0tSNoP2ck/vubxuAJCCpt44b4DTxiImcOF\nbi3zWWT+iUkixjHIQZiG6VmYZMpHki3mzIn4FOy+A7UjUud7t5eCsVNYKP6X2kAmXqMhQ6Jzanr3\nBjZv9paP44aXc6uocM/ncfIRqJFfqbTNjlMdN3m8xx6LCBNpJgK8HV9OIWAPO5bHZI6/H9O01o+T\n+SlqAqE0l0rTZjwMA/jDH8TzKU2A8QSbYYh75jQBWKrJiFqQKOSqIAFE+ZS737w7rjYystdIiw/F\nDQLBIAMGGfAH/TEFj0kmlt+4POkZGTsaTvNTNDYKs8ynn4pOoUcPMQJUcwns866oOE2g5ZWBAyM1\nztJJ9+7ANde4txlIvt1ek04TwU2TMQxRH0w6rL3Ssyfwy18Kp3ZLiwjHtu9fzpQZT6Co16m4WBR/\nlGY/2UbA+zUpKopMge1Wh8wJuY6qHcab3tgLWpAo5LIgkeaj/X5rVpFqxio2i8NRXZPfmIzl25an\nLRGx9NhSPPHzJ1wFRL6Yt9JFsmUl5LzcnTpZTTOjRwtTy8yZIr5fvm4yksnvF53Aj35k/R0QjuS+\nfUWHb59VMhUKC8V0rxs2iE7044+j9+2l8/KCm0aRKzidpyqg4pnHVLp2FT4rr5UZ7O247LJIqaFY\n16tfv2jtxknwJ1uFwLoPb4JEO9vbGVlipcZXg9e3vh5ORLy5/83hdcq6lcHX5EPJ4SVY+fnKsNYB\nuIcAS0wywWDHCDAA2PSVqAd2c/+bMfocMcxWtY9cdbhnCq/RY3bshS7t+5QmGHvEkt3M5lRAcuDA\naF+JYQA/+Ykwd1RWRmZ5tDuH5UyTajXlYDCSMS3t6arZRFYHViOv5HKnDi7W8ptvFqaZW2/1puGc\nfDKwfXvyWo085xNPFFpkvHa6tftPf4rfodvZtUt8kuXDD6MDFZwoKoo+F6dryxypzqCd7WkglzUS\nidvIX10OCP+GLLMCIK5m0r1zd3y29zNPGoxJZtgPo5arV9s1c/jMcBXjRATKoWIeywT2ysaqf8Bp\nXbvz2V42xKnOk326XFnoUTUZDRki8hTsuSYFBaLTUp3AqvNZLfGvCjvDAP7934HnnotkxF9/vbX+\nlbo/04yeykDtvgoLhQAePVo4u197LfKbjKpyy1ZXyVawhZsZMJ42OHIk8Mor3qIApfapne1pIB8E\nCeDc2U5dPhX3vHVPlOZRaBRG/CAeIr5UhvQY4sk8Vj2gGo9d+li4XSWHl2DiqxPR6m/1HPklz+tQ\nMo9lArs2k0zHEM9sZ/9dCrC2NtEhydBV1QmsRsk5Lfd6HDV50J5fYZoiyc5ecVqtg2W/LnPmiLlt\nJLW11igvezSf6mMoKIiUqVFzSdRoOVkvzEkQFBYKjUjNEzIMcfyyskh7y8qc86AGDhQJjU4CT0Z1\nAZHCqG6mQ3nd3DRlL2hBopAvgsSJO9+4E9Prp1sitgiEWwbcgtHnjIavyYcVn6/Awg9c5r51oPbS\nWnz8zcd46N2HYgqTkb1GouuRXQEgfCw1MKDQKMTbN7wNIPYkW6owNMnE/cPux12DHVJ3FbQGkxtk\nsxS5/Zh2rSpR1Gg9p85U/i7L4zuFj6vHdjNNytpxnTpZAzGkFhmrQ58zx2r2Ky4WlbTlvktKhAa3\nebOIEHOattdeWBKI+OdSvWdakCjkqyCZs3oObnkpMqwqMArAzI6zNA6bPwytgVbL5FpOGGSgqn8V\n5q+bjwP+A67rmmTCICMcOlxsFuORnz1iqU6s7kvOxzKm35ioopGJaiTq+m771GhyHa9COB3aZqbQ\ngkQhXwXJT//6U7y2NWLoHXjCQIw8c6TrvPG+Jh9aWlsw/d3pjqYuObFWvx/2w6qdq1wd8PJYKz9f\naRE01QOqUdatDLe+fCuCHEShWYgx/cZgzpo54X0RCJ0KOkXNM6/+HStCzNfkw7Y92yy5Neo+tTDR\naLKHjtrqAIwqHWURJGP7j3X1ScgO9oK6C6K0DClALj79YizeshirPl+FIILhyC+7QDFgoFNBp6j9\nzFs7D2XdysLl7APBADbt3mTZnsHheealpiK1kFjmLKmFtPpbQUThWmOy5H4yEWOqbyeZAAGNRuMN\nLUiyRDI2fyk0FmxagFGlo+I6tmWorupLkVqD9HEs+nCRECIwUN6tHI27GqMEiWmYWL5tedT+/UE/\n5q6JZMoHOBBVnViy6/td4bDhA/4DqFtXZzHF1a0Turw0WfmafGj1twpNKiS/Lut1GRZvWQx/0B81\nxbEbToEBUmgWm8WHhFaj/UuabKMFSRZIJWqpakCVp8goIHr+eClAAIQ7V3Xu+f7d+mP1ztWWfZhk\nhgWFE50KY0ymHiLAAbzy0SthzYXBmLd2XrgtQ/8yNHyM2tW1+M2g32Bkr5EwDAPBUJgKgzHwxIGY\nNGiS505Rvc5y2mJp4gtyMCfzYNLd6esIOU17oAVJFshWUp/b/PHz1s4Lj+plHkjJ4SVo3NlomXUR\niJ3gGOAA3vn0HRQYBVFTCdud/G2BNvQ+tjc2fbUJgNBmZISZKqgYjGnvilod9tL7snN18wfZTVbq\ndTbYgGmYACOskXjVajKNXWtKZ6d/qCWQJovW2tKLFiRZQNUUMt2ZyY53zuo5GP/KeEu9rVZ/a9hM\nNvHViTGjttwIIghisjjjZXKkCoOxuXmzZVlLawte/OBFx/0+9O5DuGXALXj04kfDUwY7ofpS7CYr\n+3VWhWYsH0m6OhUv+3HUmtKoLWXzWctXtNaWfrQgyQJ2TSHTD23D9gZLmK4kiCDe+OQNvNn0JgLB\nQNL1upgZ/bv1x7ov1uFg4KDrJFx2TWd6vXM0GRCZLVKGHQc5iPnr5ke95HLEbTdZ1a2rQ4+je2DC\neROwdudaV5+SvbP30qkkKiBidU5OWhOB4nb6XoVdtp+1fERrbelHC5Is4WSiyRS+Jl/Y1wAIs1Pv\n43rjg68+EB0+A0QENfTbnjeiYhcURISjOh0V1naYGQVGQUzfChAdHeZEgANhAeT0kssRt6qRmIaJ\neWvnoS3QFl62fNtyx6mG7Z29U6cir6Hs2L3ktMTqnFQh4KY1pUNISZJ51g4lU4/W2tKPFiQdkMqe\nlSguKLaYrrZ8vQWFRmHYVzLhvAmY0TAD/qA/PDdJ3+P7om5dHdbsXBPOMyEQfn7Gz6NK2E+vnx4W\nDKq5S531MRYGDPQ6thc++OoDV82IiFByeIllmRxxy8m6uh7ZFbu+34UXPnghvB8nU5GclVJWWW71\nt4Y7TimYiAgtrS1h05lhGLj0jEvDAiIQCKB2da2jpuTWOTkJgUQ1hlRG0KloU/ksXGK13avWls/n\nn220IMkTEnmo1YrCb3zyBoIcRCAYsFQUHtlrJEb2Ghm1Tydzz6RBk9D1yK6oXV0briRs7/wDHMDe\ng3s9zeIIACDg/a+c5y1VJ+Ca+OrEKM0CgCWb3m6mM2BEdeaV8yutUxkjiI27N2LhBwvR69heWL9r\nPQLBgFVABoNYtHkRCowCBAOxc1rcOicnISDzaaT2Ey/Lf9uebSgwCoAgEhpBe9Vk6tbVhQcdqlaW\nqHDJlY7Xy3nH09ryxY+SK9dcC5I8IJmHuuKkCtRU1mD5tuXh7cq6lYVzK55ofAKzL57tmCTo1inO\nXzcfrf5WVw3i/a/eh0lm3DItAOBWUYFAGNBtAFbvXO2qWdT4atAaaBUC0qF+tmEYmDl8pqUzbwtE\nm92e3vB01DJ7uxiMG/vdiF3f7cKiDxeBwVFCSr1OdlPatj3bwqZB0zBR2bPS8/20l4qRpf69duBe\nNJmG7Q14cu2TlvtVcniJq8nPrd3t1fE6nbvadnsOk1eS1QKz2bHnkrDTgiQPSPahtgsENekvGAxi\n/CvjHUf7clv7iHvm8JnhSDBAdPoy8kgS5CAu63UZFn24KKZmUmAUIBAMRDnfDTIwtv9YbHh1Q3iU\nvHH3RkxdPjUcLiuXOx0fEJrE3DVz0bizEaPPGY2Sw0uiwpy9YpCBozodhfnr5oOZLUJK1XRMMvG/\nl/xv2MFvjy6j0L8NX27Agk0LwoIw1v1UNQUOMLZ+szVqHafOBHDOG3LSZHxNPktQRoADuO2V23B7\nxe1R27o9h3bhni0Htv3c1Sg90zARCFhzmBJpjz0na9uebWjY3pAxLSYZAZRLQQNakOQBqTgH7QJB\nTfoLcCChh695X3PYrGXAwIWnXohRpaMwYfGEsNnIIANnHHsGRmAENjdvRnFBMdbvWm8RGATC2LKx\nAIBXt7yKpj1N4d9GnDECVQOq8PE3H2Pau9PAzHh6w9MgEEzDtHR6DAZxdOhxEEGs+HwFVny+AnMb\n54bP1YCB4488Hru+iz/7kJw8LMhB/LHhj5HkxqCoNCD9SfK8AxxA9UvVAEQSqT26TJqNxr08LlzJ\nWc1tcYomUzUFGXG3fNtyi4Ne7Uz2+/dj7ItjsfWbrWgLtMEwDNxecTu6FHex7Fc9TsnhJVGC2B/0\nY0bDDDx68aNRgQCqP6nk8BLHcOxsObDVc2/1t2L8K+MR5CCKzCJc/KOLw36ztmBbwp2s6oubt3Ye\nHl/zuKNvzK09sTRAu8BIRACpVSHKupUlLOwyRUYFCRENB/AnACaAJ5j5AdvvQwDMBHA2gKuY+R/K\nbwEAG0JftzHzz0PL/wJgKIA9od9uYGbbxJMdi3SFdFacVOGY9OcVObJniCitU39wKvoe3xe+632Y\n9u40UX6Fg+EEQ0BUDX7s0sew+KPFYdNQsVlsMbMBkXIukwZNAgCs3Wm9pQyOCmcGEHceFjWSLIgg\njul0jKsgUU1yRBSZWZJhSW5845M34PvUh9N/cHpUG6WW19LaYjGTyX3LZQRCebdy9O/WHws3L8SM\nhhnheyLvdSAYsGwf5GBUhzlz+Mzw6Buw+p2CwSBmNMwIl/of99K4qOTUBZsWOJoi/UF/WKNTfTlS\nKw1wABNfnYjrz7k+LDANGLjwlAtRU1mTcWe93XdERAhwIKwRgSOTvgU5aAnaSCSUWmpsXkb99gFf\nyeElmLp8atxQc6+ahd3XJytyN+5s9CzsMkXGBAkRmQBmA/g3ADsArCSiF5l5k7LaNgA3ALjDYRf7\nmbmfy+5/owqdQ4F0hQ9XDagSnX+MF8k+6mne14yW1pZwlJdBBpjZ8uAOPHFgWFCoHAwcRPO+Zjx/\n1fOWF1gdsUvtpqayBoCYv6Rft36WgpWxUDtCOcujm4A59vBjHZcfVnAYfnr6TyPRXyHhIXM8Jpw3\nAc+sfwY79u4Id1Yya18lEAxg2rvTsHCzdX6Yk7ucjKaWpvB3gwys+2JduICmpDXQGjZLqUKbQPAH\n/VEdZvO+ZozpNwZ/Xv1nx/PyB/2Y9u40LPl4iSWK74D/AG59+dawuc8gIyysZFDB3Ma5FsGzdPTS\nsFYa7rABS+epCpFY/hS79uVV4MhnU7ZL+o7kwEQer+uRXWHACD9fzfuaw9snEkQQTzioqAM+p6oF\nbgKjsmclTMNEMBAEEWHFZysw7qVxUeY4u69P3v8eR/cIC7tWfytqfDXh+5AtMqmRDASwhZm3AgAR\nPQvgMgDht4+Zm0K/JTlDsyYZYgklpwgn+4hVdiQMDj+4o0pHWfI7JAVGQVjrsR/X3gFt+HJDuHNz\nypZ3g8EwycSgHoPw3o730BZocxxlm2Tivc/ei9p+yMlDUHpsKcq6lWHJliUWm3vjzkbs+m4XZjTM\niJsnAwiNpX5HfdRyVYiYZGJErxFYtHmRo8BraW3B/cvuRyAYgGEYmPWzWWFT2q7vdjkWslTL7tuv\njRTw6vVgsGX9XiW9sPWbrZb7bq+KYBdwpiGi6+y5MLF8Jk5+DacyMU5lcADh7LdUZAgCPY7uETVA\nAmCpPq2WDLJHqG34coNFS1eFXjzh4CRMAKDGVxM+jhwcxDJRy+fVH/SHByGPr3nc4ner7FmJQrPQ\nIsDlPtR3T5pAs6mZZFKQnAhgu/J9B4DzEti+ExGtAuAH8AAzq0O8/yGi3wFYCmAyM7faNyaiKgBV\nANCjR49E237I4hThZO+QDTJEAmIoAdBuu29pbcHD9Q+LPBSKFgiyk1A7oA1fbkD1S9XhYyWadc/M\n+Pzbz9EWbHPVRo457Bh8vf9rAOLFPe0Hp+GK0isw671ZeOfTd6J8CgCSKiXz5fdfuv5GoHD01ZIt\nS6IELzPj4fqHI508C9/Uhi83YG7j3LBGOKLXCPzs9J+FO7h418Y0TMssm3Y6F3W2CA67TyqIoEXA\nyYTWx9c8HhYIslO2V11WOzz7qHzBpgUWP4cclKj7kP6qXsf2Qmug1dJGdd/2gYrdHGz3OxUYBSg5\nvMRSBcKpUrVdi44VDSYFpfrMBFlcO/WZl5FxACy+NpUAB3Dry7daAmLG9BsTzp9SNRZ7uH+2ne+5\n7Gw/mZk/I6JTAbxJRBuY+WMAdwHYBaAIwBwAdwK4z74xM88J/Y7y8vKOP3tXmrCPeoDIaEl2LjJ5\n0f7gNu9rxl2D78LU5VMBhEa9QatD3y3K6LZXbvPcWRsw8JOTf4JjOh0Tjg4LIoiPv/k45j6OKDwC\n37Z+G87HqLu8LiqS7eH6h3Fz/5vD0VVehYhM9owb9gxGWbcyy0h34+6NWPzRYnxz4JsoTUF2Qr/z\n/S7c2QU4gBc+eAEvf/hy2FEeKyLNNExc8qNLAAAvf/SyRbOS9/SEzidg3Rfrwv6G/l3744SjTsCL\nm19EkIVA8H3ii0TMMVm0Uum3ISJLXk+Po3pg+OnDw8ezJ4Aed8Rx4Yi6IIJ4fevrePP/b+/co6uq\n7jz+/d2bmyCgPII1oSSivGmVBOjUmKosdSxgEVo6ba1rhSIt1fqcqUb9o6OjsxZja0cKRZexyOiM\n1elUiwIGqkiAhcgjJBB5BELknUh4KiIhufc3f5yz993ncW9ucnOTQH6ftbKSe+45556z78n+7d97\n34eOys1qTHY0WMYMtZBxVxnwC8V2m4aU34lAmDR0Et7a8ZYngKNkSwnys/Nxzdeu8WhPftFg6tyZ\nPTN1RJ75HBAIz69/XoeAq75ATeEmvdCK9dyE2TKVFhcWe67F9F35hft3ZMZ+KgXJYQA5xutB9raE\nYObD9u9aIioDkA9gLzPX2bs0EtEi+PtXhDZSkFOAshllHh+JX+HDWA+unwqv/sk3Htnom/xmlnRJ\nhIlDJiKzZ6bDF6GiyTJ7ZqLhbIPnmAOfW85ZZVMv22d1lARB90AJczimv8FNAAGMHzges8bO0qYn\ns1uk7zEUtdcX5BRgcfVi33wWk7JPyzyBBioaye+aRl4+MloOB5YwWrJ7CdKD6SjIKcCa/dEeMlf0\nugInz53Ekt1LEAwEMWX4FJTWlKK8rhyVn1Vqn1OAAiivK3es5gMU8PhtiJ0mxX2n9+lqAGo1blZV\ncN+7WnyottJ+E6zp0FeoIqVhDvsKGcAbLFJaU4rGsMeYgQhH8Ktlv8Ivxv5Cm+caw43aH6USc5sj\nzbqBm6k9qUWXiszTgtLOe3L4z4zbIxCmjpiK6uPVjqCJJbuXIKt3VswINdOENWPMDADo8NbUqRQk\nmwAMI6KrYAmQnwD4aSIHElE/AGeZuZGIBgAoBPBb+71sZq4jS5RPA/BJSq6+G9OSD8V0NvpFk7m3\nA3CEiCpM/0lGWoZepRbmFqJ/j/4AvCtowDKzqNWfm7RgGk6dO6VfBymIcdnjsOnIJkQ4oidk03Si\nSCSR0tz3kcJH8Oytzzq2r96/2jdjX2Xrm2GagFVqJh5BCnp6xsS7ptnjZqNoTJFevQJRbeVc8zns\nPbHXcUz9l/X6viPhCI58cUQ7bsPhsKOLpikgbx92u+4VY/oOiMhX6DU2N+oWzWoyjzfWP/rGj7B6\n32oc+uKQ573po6d7Ej9N89T58HlPKRtVIkeZ5Yb2H4qdDf6VFdT91p+pdwjjzJ6ZyOyZqdsoEJFu\n4GZqTwTCwEsHYkj/Idr3ZobI+6HaSauoxRsW3aC/NzVOanHmDrh4betrOgBBhX1f1uOyDs14T5kg\nYeZmIrofwApY4b+vMPN2InoawGZmfpeIvgXgbwD6AZhCRP/GzN8AMArAS7YTPgDLR6Kc9K8T0eWw\n1pGVAO5J1T0ITmJFvLSU0Dhn7RzHPxtg/ePMzJvpsPH6PfgqSmfN/jU6Skqt6t2RXdNGTkNWryy8\nvOVl/RlK+1A1wCIc8fzzA1HtQlU0jjXJDes/DLUna8FgzN8wH9NGTPMkJ5oEEEBGWgbmTpyrQ6BL\ntpTg1a2vYsaYGTEz/BUjMkf4Rof5kR5M1ytRlQOhxgKwJqTDX3iNAqZfqryu3HFNaiJ1X2dW7yzH\nd6wc3acaT+G5dc95vmsgKtDCHNaC1Q8Gx9TSTI1OUeYqUqrOYfoJHHk9DG0mi4U78gsAnl79NI5+\neVSbMMMRq4FbWiANHGaHRnL4i8M4/MVhbDi0AUVjiuJG1gGWOdGsxvDC7S84NCwADh/k8+uf18e5\nAy0iESsEvyO7gqbUR8LM7wF4z7XtX42/N8EyebmP+wjANTHOeXM7X6aQIG3NpPWr2JsRzND2ZSC2\nFqS2vVL5it4WCoSQ2TMTz6x5Rme3P3K9pR2sP7geiyoXIRKO6Im1bF+ZIxQ0q1eW7/XMnTgXgKWt\nxCo+ufektaJ3OzTdDlMC4dHCRx2O+/veu8+hHdSfqff4o9wM6DUAONbiEAMAJg2dBABaY8ztk5tQ\nxWWTRLL/gxREfna+QzMtyClA1dEqPFn2pCfYwU/TmzJ8CrJ6Z8WMNvP7TAAeU+kEu6JyWjDNMY5u\nJ39rqhuMGjAKIzJHOL4fPyHMYDSFm3BD7g3okdYDedl5qKyrxPu17+v7Vc9I0ZgivFL5iiNx1/xu\nmNkhIFUUmsrPKtlSop9RM+DBrSmadKTTvSs724UuRlsz7N0hlC2VTXfjdpLOzJuJ42eP63/wAALo\nm9FX7++O/JpgV0M2a44plA/IDA+tqI/dWEuVzFcRQyqvoP5LZ5Ijg/H5uc+16WvO2jmOVbMZlqtW\n7H7RcaMHjMbHhz52TpIIAORflt/POeyXyHnb1bc5JrzWMOiyQXq1TCD0v6Q/+vboi5oTNb7ncwuW\nIAW1Caf+TL2ugLC1fqvv8aFACH+c/Ee8vu111J6sxYJNC/D2zrd1RvfdeXc7MtkJhKv7Xo1HCx91\nmLVa6uqpkmJrTtRo82QoEMLlPS/H0bP+kXgMxpoDa0AgHblYtr/MITBUVN3koZNRfbwaIwaMwKSh\nkxzmLgJh45GN2uSpQr3NUkPnms9h4ZaFUZNsC18dgRzm41QigkRImGQy7JNJqHQLMKXJ+Dn0nyp7\nymF6KLOr7cbKBVCmIL+wTcCa9MyyMMpMpQIQTN9AS/cQCoYczl336tic2AKwIpN2NOzAkH5D9OSj\nhN7i6sX43brf6f1DgRCyemd5nMM/z/95TJPK1JFTHZNvokLl4OcHHUKs4WyDb3CDH6omGQCHKTCe\nqQsA1uxfgzUHrCAB0+wVDlvBEenBdKQH03VIeu2pWjy8/GEAiBl9p8faiAIDgJfKX9L7NEWafIVI\ndu9s1J2p06+VH+j42eOOSg8Mxn3v3YdIJBqFVnuyFsXXF+v93q1+FxFEsHjXYizbvUx/rhsGexJY\n/Rg1YBRqTtSgKdKE5kgzqo5WiUYidC3aK8O+tZ/pJ8BUiY/po6cDQNyaT+q6lb/GbZ4r21fmCdsE\ngMLcQu0wdWtS5rncc7Ay/5iJde7Vb6yJDbBs383hZj157jy2E8Mzh+PZW59FSXkJfv/R77Vwu2Pk\nHSi+vhhVR6s8zuEJgyf4mo/er30foWBIO46VHb6lpEuV/Z4IsUxas8fNxpy1cxz5SmEOxwyhbo40\no3RPadzPag43Y/a42ag9WatD0r9q/sqRve+HKusyffR07X9IRKCaQsS4Yf18qEoPfqanxnA0+zyr\nd5ZDMLTYHK4FIRIKhHDTlTeh+lg1AGtc4xVnbS9EkAgXBG4BpswV58PnsfbA2rg1n0xihSZvPLzR\nd4Jcu38tNh3eVCer5wAAFIlJREFU5NBc7l16LwDLLGbWuVKoqKQHSh9wlDYxz98now9ON56Oadry\n6/ny3EfPYUi/IY7JUZk4lDB0lwUpyCnAlOFTPCVbGIzmcLPWpIjIYR6KRWtMYX77LtuzDN9/8/s4\n8dUJx/vKfFVRV4GFFQsdE2p6MB2Thk2KHyZNiOZ0GKYlPyGiTD4qfFYlQMbzVSXCI9c/op8RVQdM\n9bFxo+q1NYed5jZ3p9JQIIRh/YclFHChiqEWjSnCnyr+1ObirG1BBIlwQeJ2/AP+NZ/cuP01Kmwy\n1iTizncxzTEEwrVXXGsl8tmY4bJqxc1gBDjgMH+dajzlOMasFKBeN4ebnStQtsw0boG3ZPcSlJSX\n4MDpAwgFQ57yKcWFxVi6Z6nDR6DzG+zkweZwM7J6ZaFHWg/dRVLhrmWmTX0UwHdyvoO9J/ciwhHP\nKl3VZDMn0qZIk0eoqQlQlQIpGlPk6IBp5kTEEiZ3fvNOrbGauR5+EEWrTwNARV1FVLOMgVkN2n3e\nQZcOwm9u+g1mj5vt6SEzdcRU/xB2+/mIFYTgLiy658SeuNqKMrsWjSlC1dEqDOs/DLuO7QKADvGT\niCARLkj8/Cb52fna1NVSVVfAp24Tog5XlX1sTsru8jEMxtbPtlorz0hEl1iZv2G+nkiURpIeTMd3\nh34Xi3ct9lxPhCOYmTcTuX1ycarxFCrrKpGXnYc/fPwH7VMhEDLSMjB99HTHilsdf/979+tInikj\npmhT18PLH8bAywZiweQFqKizggjMQpyqUnMEVli0ClM2J3sV7RSkIH59/a8xf8N8qxUxBXDXtXfp\nCdTt8zD3jWWSUeMNWEmFyveU2ydXC5CS8hJdOuXS9EsdQmLQpYNw0+Cb8Jftf9G1suZOnBs3Gk71\nmDG/p3jaw425N2LiUCsBtnRPqfZpKBrONuCar1lBpuYCBxErTDrWfbujyAII6GdOlbRRwiNIQUco\nsinQCaRbOvgVDG1N3bq2IoJEuCDxS3o0TV0t2YTVP7xZtykUDDkyov0qwvpNUOFIGL8c90t9nNnC\nWH2W+vu9Pe95jjcDCNRqVpUJAeCpjnx33t3Y0bAD6w6u05qB9i0wsGz3MgzPHB4t53/E2rb6Z6sd\nYzJn7RxHGOo7u97BipoVWFm0EpOGWeVDeqb3xJLqJfqYvhl9PaXk1Vgr5/GRL45g1lhLw5g2Yhpe\n2/oadjTswLbPtjk0MQAYOWAkak/WoqS8xJGHoUKyH/j2A/o+/l77dxQXFqNHWg8tAMYPHI83PnlD\n38O55nN4a8dbmDxsskdo512Rh+0N2xHmcLTHDFs9ZlQbapXUZwqKdQfX4bqc6/DMmmdwPnweacE0\nDO03FDuP7YSZ4a7K3SjfVXowHR8f/thTkkYtVIBowq1ZyDPMYY9QcxcyLcwpxKYjm/RCSpnn3Nok\nYPmYxLQlCDHwS3p0t4aNFWFmajQqhNRdVsLtl1GT5eMfPK6d4IA1OeT2yXVk9ruPU7gjdYIUxLxJ\n8zyBAAEOOMrYKyFihve+cPsLWrN47qPndNJgmMN4e8fbjvv1a+40YfAEZAQzol0YDTPeEzc8oTWN\n0j2liIQj2kRStq9MT8Lu4oUr9lrVk6uWV+lVupk/YUIgXJp+qaPQplno8Hz4PP687c+OYyrrKjF3\n4lyrA2Z9Bd6pdvp0GIwPPv0AaYE0BCnoWPFf1uMyh8A1x1d99yrvaOORjVoQhTmsKxAowXPjlTfi\n01Of6udHmUfNa7kh9wZPK4RRA0bhoese0oueYCCIe8bdo01Sy3YvgyrhYmokoUBIf48AsOHwBsyb\nNM/T4MyNEmhi2hKEBPDrGxGrBW288i4tUZBTgNUzV+OxDx6zJhe2yrvE+0d1azYqokfNOSoRzX0P\n7vLsbmGp3rvltVu0EAnAWsn/YPQPHA3GQoGQ5xrNDHiz74h7PzMvp+pole5DH2Zn8UK/hFUAnmrS\n6jqDgaAOZ/WL8ApQwON3ubzX5S1WZFb+h1EDRmmtgUBYd2CdPiYUDOGh6x5CZV2lwxSqFgHrD67H\n0t1Rv5I7f6hoTJG+5wOnD+DlLS97rmfzkc2ea9t7cq/DJxMJRzWfB0sf1NqJ2ToAiPqNlFmvKdyE\nhVsWYmz2WADQPU3MwI8gBXWlaQn/FQS03NXOLRjck5oqrpdIeZdEePbWZx0mrHi1ydwCLVZiZ0vC\nze8400RnmsAKcgowpN8QLNyyEAMvG4ji64vjlrJRk6L7c1UyqHLIq2KBgLOHhl+/jcyemaioq3D4\nAkKBkHZ0v7zlZR1hNn7geJw5f0ZP/IBVjqb6eLXjet/85E1fh7cbBqP6eDXSg+meqDlV+Vf5SJQp\nVN2vGgOzm2haIA2Th072OP+V0Hl166se4ZbbNxcn6k84rksJJncV4fov67U/LMxhVNRVYPa42Z7v\nzCwQqdpJL6pchHmT5vn6QkxNOZWIIBG6PIn2tHYLBnNSA9Cm8i7xSEQQ+fWwePF7L8YUGPHOGUvQ\nxOpQePzscUf9prbciykcgGizqwAC2r8SDAQ9Wp5K1jQd7QEEcPvw27U/SAn2YCCIrZ9t9UQx7Tmx\nx+MEdzinfaLCTMwgBnciqllNN9ZCQ5UpURqbqqBslvZxfy+nGk+h7NMyVNRXYFv9Nsd+KrcpPzsf\neXV52HRkkxbEKu9DYUZtmYso1XfEXYblrR1veTS/jspqB0SQCBcAbanx5eeM9+uYl2pMk4NpBmqr\nNuR3nLt0eKKCN9HPU+avhRUL9USuuiNGOOJYCbsTP02ndQQRh0NffT/KNOQ2cUU4orUXtw+CQLhj\n+B2eFsImaYE0x5iY4wQ4nwfAf6Gh8nP8+rb7acll+8owNnssyuvKPUVBVeKjaZpTPowRA0Y4qkZn\n9c5ytBVWvUumjJjiidxTzvYP932oc0fcRVFTjQgSocuTTI0v8x+preVdkqEgp8DTw6K9ImjcAkNN\nkH7tZNvyeeZEaRaAJBDys/J1fxK/e3IX6lQCwu3QN01DfiHTphPc7cspLixGcWGxLkfiNnnlZ+XH\nHKd4C41gIOjom+5nsrt36b2OazFbBqtwYlURWEWgPTXhKd1ITV8nWRUarvnaNSjdU+qoB+cJT2dg\n8a7FKN1TivmT5utwbtPUZrYMdmtOqUQEidDlSabGl/s8HV3eBbD+0VOhDcVybrvbybbl8/x6q5uT\n6ayxs1C1vCrmPbnNXBV1FTEd+n6Tul+PGz9fzvqD67Fi7wrdfEslWUYQwea6zbjltVt01QM/bcO9\n0FCal8rFWFS5CKtmrPKY7MwJXpmWzPyRKcOn4GzTWeRl5+kK0OqzAoGAo4CnqkCwasYqj4/PT9NS\ngRYvfu9Fx3azb73Zyld8JIJg01lCoD1oL0HoJpbz3V0puS2f5xZSx88e99yDmrTiBUCY200hAMBT\nhj5WyHS8c5p9RoIIYlb+LEe9rfPh86g/U6/L1riFmNs8pcxYCrf2pEx2polNmZbK9pchErbaDavE\nwrUH1jpMi24nfkYwGvFnmscye2Y6GlmZRR/jLUbUOVTduUAggAWTF+iqAalCBIkgdACpEISJON/b\nat7wE1J+k7163VJUnbl/sj4cdy8Sv/tVLaCDgSBKa0oRjoQRCAQcwQexIurMpFP3pO3OP1KRXEA0\ng5yZ0cRNMfuBmJqDOV5+WqDK9K+oq/AtGeOHMp9FEEEkEpGijYIgxMdvcm8vM2Ci52mtYGhrg7RY\nn+V3nb6OfCZH8yi/63jihidQNqPMkb8Ra3zNyDTl/wEsQWImO/ppD34LCz8tUOUJtUboThg8wWE+\nk6KNgiC0mvbSfhI9T2sFQ1uDJ2J9ljI7+V276ciPpV00NjeCiJDZM1NrO/FW/WZkml8dsVAw5Mg6\nT/S78LueRMbWrQ3GM5+lChEkgtANScQUlejxrRUMyWhN7flZBTkFjrphZtn/RFb/7pW/YmbezDb5\nJNzX8/Dyhz1BDu77jaUNxjKfpQoRJILQzWgPH0Ui5qV4JJNH056fdfzscV03zCz7n4hmpVb+Zn+Y\nZMNuzeuJFeRgEk9j6cgAFREkgtDNSMZHEet4P/NSqmjPCdLtPDc1kkTMQWb2O+D1qSRzPbGCHFra\nvzMQQSII3YxkJ5+uMnklg7vsSLwclpZoT8HWWo0rVaHlrYVU1dCLmfHjx/Pmzd5KnILQXWlPH8mF\nlt/THiVkLuT7bw1EVM7M41vaTzQSQeiGJLuKvpATRJM17bVnLbOLhUBnX4AgCEJHokxzQQq2yTQX\nqzRNoqw/uB5z1s7B+oPrW3VcV0Y0EkEQuhXJ+hWS8RFdrNqMCBJBELodyZjmkhFEyZrVuioiSARB\n6HAudGd1WwXRxRDx5ocIEkEQOpSL1byTCF0lXLe9EUEiCEKHcrGadxLlQo54i4VEbQmC0KEkGzUl\ndD1EIxEEoUO5WM073RkRJIIgdDgXo3mnOyOmLUEQBCEpUipIiGgiEVUTUQ0RPe7z/o1EtIWImono\nh673wkRUaf+8a2y/iog22Of8XyJKT+U9CIIgCPFJmSAhoiCABQAmARgN4E4iGu3a7QCAnwH4s88p\nvmLmPPvnDmP7swCeZ+ahAE4CmNXuFy8IgiAkTCo1kn8AUMPMtcx8HsCbAKaaOzDzPmbeBrh6VcaA\niAjAzQD+am96FcC09rtkQRAEobWkUpB8HcBB4/Uhe1ui9CCizUT0MREpYZEJ4BQzN7d0TiKabR+/\nuaGhobXXLgiCICRIV47aupKZDxPR1QA+JKIqAKcTPZiZSwCUAFY/khRdoyAIQrcnlYLkMIAc4/Ug\ne1tCMPNh+3ctEZUByAfwFoC+RJRmayUJnbO8vPwYEe1vxbV3RQYAONbZF9FFkLFwIuPhRMYjSrJj\ncWUiO6VSkGwCMIyIroI12f8EwE8TOZCI+gE4y8yNRDQAQCGA3zIzE9EqAD+E5XOZAeCdls7HzJe3\n8R66DES0OZFOZd0BGQsnMh5OZDyidNRYpMxHYmsM9wNYAWAngL8w83YiepqI7gAAIvoWER0C8E8A\nXiKi7fbhowBsJqKtAFYB+A9m3mG/9xiAfyGiGlg+k4WpugdBEAShZbpFz/aLAVllRZGxcCLj4UTG\nI8oFr5EI7U5JZ19AF0LGwomMhxMZjygdMhaikQiCIAhJIRqJIAiCkBQiSARBEISkEEHSBSCiHCJa\nRUQ7iGg7ET1kb+9PRO8T0R77dz97OxHRPLtw5TYiGtu5d9D+EFGQiCqIaKn92rdYJxFl2K9r7PcH\nd+Z1pwIi6ktEfyWiXUS0k4gKuvmz8c/2/8knRPQGEfXoTs8HEb1CREeJ6BNjW6ufByKaYe+/h4hm\nJHNNIki6Bs0Afs3MowFcB+A+u8Dl4wBWMvMwACvt14BVCHOY/TMbwIsdf8kp5yFYYeOKWMU6ZwE4\naW9/3t7vYuMPAJYz80gAY2CNS7d8Nojo6wAeBDCemb8JIAgrR607PR//BWCia1urngci6g/gSQDf\nhlUX8UklfNoEM8tPF/uBlWT5jwCqAWTb27IBVNt/vwTgTmN/vd/F8AOrYsFKWAU6lwIgWNm5afb7\nBQBW2H+vAFBg/51m70edfQ/tOBZ9AHzqvqdu/GyoGn797e97KYDvdrfnA8BgAJ+09XkAcCeAl4zt\njv1a+yMaSRfDVr3zAWwAcAUz19lv1QO4wv472YKYXZ25AIoRrQodr1inHgv7/dP2/hcLVwFoALDI\nNvX9iYh6oZs+G2yVTnoOVguKOljfdzm67/OhaO3z0K7PiQiSLgQR9YZVT+xhZv7cfI+tZcNFH6tN\nRN8DcJSZyzv7WroIaQDGAniRmfMBfImo2QJA93k2AF0+aSosATsQQC94zTzdms54HkSQdBGIKARL\niLzOzG/bmz8jomz7/WwAR+3tSRXE7OIUAriDiPbBqqd2MywfQV8iUrXhzPvVY2G/3wfA8Y684BRz\nCMAhZt5gv/4rLMHSHZ8NALgVwKfM3MDMTQDehvXMdNfnQ9Ha56FdnxMRJF0AIiJYNcN2MvN/Gm+9\nC6swJeAsUPkugCI7IuM6AKcNtfaChpmfYOZBzDwYlhP1Q2a+C1bNNdWO2T0Waox+aO9/0azOmbke\nwEEiGmFvugXADnTDZ8PmAIDriKin/X+jxqNbPh8GrX0eVgC4jYj62Vrebfa2ttHZTiP5YQD4DixV\ndBuASvtnMixb7koAewB8AKC/vT/BamO8F0AVrAiWTr+PFIzLBABL7b+vBrARQA2A/wOQYW/vYb+u\nsd+/urOvOwXjkAdgs/18LAbQrzs/GwD+DcAuAJ8A+G8AGd3p+QDwBiz/UBMsjXVWW54HAHfb41ID\nYGYy1yQlUgRBEISkENOWIAiCkBQiSARBEISkEEEiCIIgJIUIEkEQBCEpRJAIgiAISSGCRBDaCBGF\niajS+Hm85aMSPvdgs7qrIHRl0lreRRCEGHzFzHmdfRGC0NmIRiII7QwR7SOi3xJRFRFtJKKh9vbB\nRPSh3RdiJRHl2tuvIKK/EdFW++d6+1RBInrZ7r3xdyK6xN7/QbJ612wjojc76TYFQSOCRBDaziUu\n09aPjfdOM/M1AP4Iq5oxAMwH8CozXwvgdQDz7O3zAKxm5jGw6mhtt7cPA7CAmb8B4BSA6fb2xwHk\n2+e5J1U3JwiJIpntgtBGiOgMM/f22b4PwM3MXGsX46xn5kwiOgarZ0STvb2OmQcQUQOAQczcaJxj\nMID32WpUBCJ6DECImf+diJYDOAOrXMpiZj6T4lsVhLiIRiIIqYFj/N0aGo2/w4j6NG+HVT9pLIBN\nRtVbQegURJAIQmr4sfF7vf33R7AqGgPAXQDW2n+vBHAvoHvV94l1UiIKAMhh5lUAHoNVFt2jFQlC\nRyIrGUFoO5cQUaXxejkzqxDgfkS0DZZWcae97QFYnQ4fhdX1cKa9/SEAJUQ0C5bmcS+s6q5+BAH8\njy1sCMA8Zj7VbnckCG1AfCSC0M7YPpLxzHyss69FEDoCMW0JgiAISSEaiSAIgpAUopEIgiAISSGC\nRBAEQUgKESSCIAhCUoggEQRBEJJCBIkgCIKQFP8PxXVdHpON2kEAAAAASUVORK5CYII=\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
@@ -508,9 +526,9 @@
},
"source": [
"## Further metrics\n",
- "From the plot, we can see that loss continues to reduce until around 600 epochs, at which point it is mostly stable. This means that there's no need to train our network beyond 600 epochs.\n",
+ "From the plot, we can see that loss continues to reduce until around 600 epochs, at which point it is mostly stable. This means that there's probably no need to train our network for so long.\n",
"\n",
- "However, we can also see that the lowest loss value is still around 0.155. This means that our network's predictions are off by an average of ~15%. In addition, the validation loss values jump around a lot, and is sometimes even higher.\n",
+ "However, we can also see that the lowest loss values are around 0.155. This is relatively high. In addition, the validation loss values are consistently higher.\n",
"\n",
"To gain more insight into our model's performance we can plot some more data. This time, we'll plot the _mean absolute error_, which is another way of measuring how far the network's predictions are from the actual numbers:\n"
]
@@ -520,15 +538,13 @@
"metadata": {
"id": "Md9E_azmpkZU",
"colab_type": "code",
- "outputId": "39b97561-b01d-49f2-c35c-fbd8db663806",
+ "outputId": "093496ad-2ec8-4152-f360-b1c0a21f0bb9",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 295
}
},
"source": [
- "plt.clf()\n",
- "\n",
"# Draw a graph of mean absolute error, which is another way of\n",
"# measuring the amount of error in the prediction.\n",
"mae = history_1.history['mae']\n",
@@ -547,7 +563,7 @@
{
"output_type": "display_data",
"data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAEWCAYAAACXGLsWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi40LCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcv7US4rQAAIABJREFUeJzsnXmYFNW5/z9v98wALoiOUSIMYIiJ\noqOAhNjXJU0gRo3EBe+9GnPHuBFZvEGNXk00GRMTlBglUWPAhTC/GDAJEVfckFHEVgQBUVwQHQEV\no6OIiszSfX5/nD5d1dVVvcx0z8b5Pk8/3VV16tSp01Xve95dlFJYWFhYWFhkQ6izB2BhYWFh0fVh\nmYWFhYWFRU5YZmFhYWFhkROWWVhYWFhY5IRlFhYWFhYWOWGZhYWFhYVFTlhm0cUhImER+UxEBhWz\nbWdCRL4qIkX32RaRcSLS4Np+TUSOzqdtG651u4j8rK3n9zSIyGYRiRa5z7+KSG0x+7RoO8o6ewA9\nDSLymWtzF6AJiCe3f6yUuquQ/pRScWC3YrfdGaCU+nox+hGR84AfKqWirr7PK0bfFsWBiPwVeEMp\nVdvZY+mpsMyiyFBKpYh1cuV6nlLq8aD2IlKmlGrtiLFZWFi0H37vbKHvcXd8760aqoMhIteIyN0i\nMk9EPgV+KCIREXlWRLaKyHsi8kcRKU+2LxMRJSJDktt/TR5fJCKfikhMRPYvtG3y+PEi8rqIfCIi\nN4nIMhH5UcC48xnjj0XkDRH5WET+6Do3LCI3ikijiLwJHJdlfn4uIvM9+24RkRuSv88TkVeS97Mh\nueoP6iulGhGRXUTk/yXH9jJwuKftlSLyZrLfl0Xk+8n91cDNwNFJFd+HrrmtdZ1/QfLeG0VkoYh8\nOZ+58RnzNSIyP/l8fCYia0RkaHJ8H4jIRhEZ52rfT0TmJP+TzSLyKxEJJY8dICJLROQjEfkwef97\neObnYhFZm3wG5olIr4BxZe0riW8m/5uPReQO05eI7CMiDyWfnY9E5ClXvweLyJPJY2tF5HsB1z9P\nROpd26lnXUQmA/8N/Cw5Z/ck2wwUkXuS8/aWiEzJMu+9ReQGEdkkIu+LyJ9EpHfy2DgRaRCRn4nI\nFuA2v33Jtrmeg8ki8gbwatBYuiyUUvZTog/QAIzz7LsGaAbGo5l1H+AbwDfRkt5XgNeBqcn2ZYAC\nhiS3/wp8CIwCyoG7gb+2oe0+wKfAScljFwMtwI8C7iWfMd4L7AEMAT4y9w5MBV4GBgKVwFP60fO9\nzleAz4BdXX3/GxiV3B6fbCPAt4EvgEOTx8YBDa6+NgPR5O/rgXpgT2AwsM7T9r+ALyf/kx8kx7Bv\n8th5QL1nnH8FapO/j02OcTjQG/gT8EQ+c+Nz/9ck72lc8ty/AW8Blye3JwHrXe3vT15vF2BfYCVw\nbvLY14CxQEXy/14GXO+Zn2eB/sn/5XW0JOw3rnz6ejH5H++d7NfMz+/QDLc8ef4xyf0VyXu7LHls\nXHLev+ozx2n/Af7Peq3reAhYDfwseZ2vot/HsQH3dxNwT/L56As8BPza9Vy1Ar9N9tUnYF8+z8HD\nyWv06Wz6VDA96+wB9OQPwcziiRzn/RT4R/K330vxZ1fb7wMvtaHtOcBS1zEB3iOAWeQ5xiNcx/8F\n/DT5+ylcRAg4gQBmkTz+LPCD5O/jgdeytH0AmJL8nY1ZbHT/F8Bkd1uffl8Cvpf8nYtZzAV+6zrW\nF22nGphrbnyuew2wyLV9CvAJEEpu75nsbzdgAJqx9HK1/x/gsYC+TwOe98zP6a7tG4Cb8/z//fpy\n/8ffN/8bmqD+Cxjq6WMM8A4grn3/AK70meNCmcWRwJue610F3OZzLyFgBzDYte9okkw5+VztACpc\nx/325fMcHJPP/HbFj7VZdA42uTdE5EDg92jVyC7oB+u5LOdvcf3eTnajdlDb/dzjUEopEdkc1Eme\nY8zrWsDbWcYLejV9RvL7B8lvM44T0S/9AeiXfBfg+Rz9gZYaAscgWv12EVrqIDn2vfPoF/T9PWM2\nlFLbRORjNDE3c1LIf/a+6/cXwAdKqYRr24xvMNALeF9ETPsQepGCiPQH/ogmnLsnj33guZZ3XHv5\nDSjPvrzzu1/y97XA1cBiEYmjFzC/Sx7fqJKU1XXeAL8xFIjBwCAR2eraF0ZLl170R8/jGtc8iqfN\n+0qp5hz78nkO0t797gRrs+gceN1GZ6FXsl9VSvUFfkHmw1psvIde8QAg+i3J9pK2Z4zvAVWu7Vyu\nvX8HxonIALSa7G/JMfYB/glMR6uI+gGP5jmOLUFjEJGvALeiVTyVyX5fdfWby833XRwmg4jsjpYA\n3sljXO3BJpIEXinVL/npq5Q6NHn8OrQ3XnXyP/sRbX+u8unLO7/vgiaaSqmLlFJDgJOB/xORbyWP\nV4mLQifP85u3z9ELA4P+nuPe/2gTWjLo5/rsrpQa79P3+2jV8NddbfdQSrltMn7PgHdfPs9Bt03z\nbZlF18DuaFXD5yJyEPDjDrjmA8BIERkvImXAT4AvlWiMfwemicgAEakE/i9bY6XUFuBp4C9oVcb6\n5KFeaP3wB0A8KWWMLWAMP0sahAeh7SgGu6Ff4g/QfPN84EDX8feBgZI06PtgHnCuiByaNOpOR6v4\nAiW1YkAptQl4ErheRPqKSEh0DMsxySa7o4nsJyJShVYdthX59DXV9R9fgbaRkXzGhiaZwido1UwC\nvQpvBS4RkXIR+TZaRXm3T99rgENFpDq5aPil5/j7aFuWQQxoFpFLksbrcPLcwz3nobTL+e3ATBH5\nkmgMFJFj85wbg055DjoKlll0DVwCnIU2OM/C/2UpKpRS76M9SG4AGoGhwCr06rHYY7wVWAysRauM\n/pnHOX9D64VTKiil1Fa0qugetJH4NDTTywe/REs4DcAioM7V74toA+fyZJuvk65iewxYj1b3uNU2\n5vyHgV8lx/UeenV8Zp7jai9+COyKNth/jNb5m1X3L4HRaAJ9H7CgHdfJp695wOPABuA1tK0C9Hw+\ngTZeLwP+oJRaqpRqQjssnIR2xPgj2la13tuxUmpdsr/6ZN9PeZrcDhyW9MT6p9JuqSckx9yQ7H8W\n2o7gh0vQKrDlyXt8FK3qzBud/ByUHJKuLrTYWSEiYbQYfZpSamlnj8fCwqJrwUoWOzFE5LikWqYX\n2mjcgl5ZWVhYWKTBMoudG0cBb6J19d8FTkmqBiwsLCzSYNVQFhYWFhY5YSULCwsLC4uc6DFBeXvv\nvbcaMmRIZw/DwsLColth5cqVHyqlsrnNAz2IWQwZMoQVK1Z09jAsLCwsuhVEJFdGBcCqoSwsLCws\n8oBlFhYWFhYWOWGZhYWFhYVFTvQYm4WFhUXHoKWlhc2bN7Njx47OHopFAejduzcDBw6kvDwoxVl2\nWGZhYWFREDZv3szuu+/OkCFDSE8Ya9FVoZSisbGRzZs3s//+++c+wQdWDWVhYVEQduzYQWVlpWUU\n3QgiQmVlZbukQcssfBCLwfTp+tvCwiITllF0P7T3P7NqKA9iMRg7FpqboaICFi+GSKSzR2VhYWHR\nubCShQf19ZpRxOP6u76+s0dkYWHhRmNjI8OHD2f48OH079+fAQMGpLabm72VT/1x9tln89prr2Vt\nc8stt3DXXXcVY8gcddRRGbaCE088kX79+qXtu/7669lll1349NNPU/sef/xx9thjj9Q9Dh8+nCVL\nlhRlXIXAShYeRKNaojCSRTTa2SOysLBwo7KyktWrVwNQW1vLbrvtxk9/ml64TymFUopQyH89PGfO\nnJzXmTJlSvsH68Luu+/Os88+yxFHHMFHH33E+++/n9Fm3rx5HH744SxcuJD/+Z//Se0fM2YMCxcu\nLOp4CoWVLDyIRLTq6de/tiooC4tiIbYpxvSl04ltKp0h8I033mDYsGGceeaZHHzwwbz33ntMnDiR\nUaNGcfDBB/OrX/0q1faoo45i9erVtLa20q9fPy6//HIOO+wwIpEI//73vwG48sormTlzZqr95Zdf\nzujRo/n617/OM888A8Dnn3/OhAkTGDZsGKeddhqjRo1KMTIvTj/9dObPnw/AP//5T0477bS046+/\n/jqtra3U1tYyb968os9Pe2GZhQ8iEbjiCssoLCyKgdimGGPrxnLVkqsYWze2pAzj1Vdf5aKLLmLd\nunUMGDCAa6+9lhUrVrBmzRoee+wx1q1bl3HOJ598wre+9S3WrFlDJBLhzjvv9O1bKcXy5cv53e9+\nl2I8N910E/3792fdunVcddVVrFq1KnBs3/nOd3jiiSdIJBLcfffd/Pd//3fa8Xnz5nH66acTjUZ5\n6aWX+PDDD1PHlixZkqaGamhoaMPstA+WWXhgPaEsLIqL+oZ6muPNxFWc5ngz9Q31JbvW0KFDGTVq\nVGp73rx5jBw5kpEjR/LKK6/4Mos+ffpw/PHHA3D44YcHEuJTTz01o83TTz/N6aefDsBhhx3GwQcf\nHDi28vJyjjjiCObPn088HmfgwIFpx+fPn8/pp59OOBzm5JNP5p//dErVjxkzhtWrV6c+nZFh29os\nXLCeUBYWxUd0SJSKcAXN8WYqwhVEh0RLdq1dd9019Xv9+vX84Q9/YPny5fTr148f/vCHvnEGFRUV\nqd/hcJjW1lbfvnv16pWzTS6cfvrp/Od//ifXXHNN2v5Vq1bx5ptvMmbMGACampr42te+xgUXXNCm\n65QCVrJwwXpCWVgUH5GqCItrFvPrMb9mcc1iIlUdswLbtm0bu+++O3379uW9997jkUceKfo1jjzy\nSP7+978DsHbtWl/JxY1oNMrll1/uq4K65ppraGhooKGhgXfffZe33nqLzZs3F33MbYWVLFywnlAW\nFqVBpCrSYUzCYOTIkQwbNowDDzyQwYMHc+SRRxb9GhdeeCE1NTUMGzYs9dljjz0C24dCIS699FKA\nlHSilOLuu+9m8eLFqXYiwsknn8zdd9/NYYcdlrJZGPzyl7/klFNOKfr9ZEOPqcE9atQoVYziR7GY\nliiiUauCsrDwwyuvvMJBBx3U2cPoEmhtbaW1tZXevXuzfv16jj32WNavX09ZWddch/v9dyKyUik1\nKuCUFLrmHXUiIhHLJCwsLPLDZ599xtixY2ltbUUpxaxZs7oso2gveuZdWVhYWHQA+vXrx8qVKzt7\nGB0Ca+C2sLCwsMgJyyySsPEVFhYWFsGwaihsfIWFhYVFLljJgrbFV1hJxMLCYmeCZRY48RXhcH7x\nFUYSueoq/e1mGJaJWFiUFmPGjMkIsJs5cyaTJk3Ket5uu+0GwLvvvpuRxM8gGo2SywV/5syZbN++\nPbV9wgknsHXr1nyGnhW1tbWICG+88UbatUQkbUyrV69GRHj44YfTzg+Hw2n5o6699tp2j8kNyywo\nPNNskCSSjYlYWFgUB2eccUYqe6vB/PnzOeOMM/I6f7/99kvLu1QovMzioYceyqhL0VZUV1en3ds/\n/vGPjHxT8+bN46ijjsrITNunT5+0/FGXX355UcZkYJlFEoVkmg2SRGy6EAsLfxRT4j7ttNN48MEH\nU4WOTHqMo48+OhX3MHLkSKqrq7n33nszzm9oaOCQQw4B4IsvvuD000/noIMO4pRTTuGLL75ItZs0\naVIqvfkvf/lLAP74xz/y7rvvMmbMmFQepyFDhqQyxN5www0ccsghHHLIIan05g0NDRx00EGcf/75\nHHzwwRx77LFp13Hj5JNPTo15w4YN7LHHHuy9996p40op/vGPf/CXv/yFxx57rF01tQuFZRYu5PtA\nB0kihaqzLCx2BhRb4t5rr70YPXo0ixYtArRU8V//9V+ICL179+aee+7hhRdeYMmSJVxyySVky1Jx\n6623sssuu/DKK69w9dVXp8VM/OY3v2HFihW8+OKLPPnkk7z44ov87//+L/vttx9LlizJqFa3cuVK\n5syZw3PPPcezzz7LbbfdlkpZvn79eqZMmcLLL79Mv379WLBgge94+vbtS1VVFS+99BLz58/PyCH1\nzDPPsP/++zN06FCi0SgPPvhg6tgXX3yRpoa6++67C5vYHLDMIolCH2g/ScQWTrKwyEQpJG63Ksqt\nglJK8bOf/YxDDz2UcePG8c477/hWpDN46qmn+OEPfwjAoYceyqGHHpo69ve//52RI0cyYsQIXn75\n5ZxJAp9++mlOOeUUdt11V3bbbTdOPfVUli5dCsD++++fyu2ULQ06OEWSFi5cmJH/ydS8MO3cqiiv\nGsrLaNoL6zqbhN8D3RZib9OFWFikoxQJOk866SQuuugiXnjhBbZv387hhx8OwF133cUHH3zAypUr\nKS8vZ8iQIW1S1bz11ltcf/31PP/88+y555786Ec/apfKx6Q3B22IDlJDga7NfemllzJq1Cj69u2b\n2h+Px1mwYAH33nsvv/nNb1BK0djYyKeffsruu+/e5rHlCytZJGFVSBYWpUEpJO7ddtuNMWPGcM45\n56QZtj/55BP22WcfysvLWbJkCW+//XbWfo455hj+9re/AfDSSy/x4osvAjq9+a677soee+zB+++/\nn1J5ga6l/emnn2b0dfTRR7Nw4UK2b9/O559/zj333MPRRx9d8L3tsssuXHfddfz85z9P27948WIO\nPfRQNm3aRENDA2+//TYTJkzgnnvuKfgabUFJJQsROQ74AxAGbldKXes5fgEwBYgDnwETlVLrXMcH\nAeuAWqXU9aUcq3mgbcZZC4vioxQS9xlnnMEpp5yS5j105plnMn78eKqrqxk1ahQHHnhg1j4mTZrE\n2WefzUEHHcRBBx2UklAOO+wwRowYwYEHHkhVVVVaevOJEydy3HHHpWwXBiNHjuRHP/oRo0ePBuC8\n885jxIgRbSqBalRNbsybNy9DLTVhwgRuvfVWampqUjYLg+OOO66o7rMlS1EuImHgdeA7wGbgeeAM\nDzPoq5Talvz9fWCyUuo41/F/Agp4LhezKFaKcgsLi+ywKcq7L9qToryUaqjRwBtKqTeVUs3AfOAk\ndwPDKJLYFc0YABCRk4G3gJdLOMYM2KA6CwsLi0yUUg01ANjk2t4MfNPbSESmABcDFcC3k/t2A/4P\nLZX8NOgCIjIRmAgwaNCgdg84FoMxYxxD3JIlVh1lYWFhAV3AwK2UukUpNRTNHK5M7q4FblRKfZbj\n3NlKqVFKqVFf+tKX2j2WujpoagKl9HddXbu7tLDokegpFTZ3JrT3PyulZPEOUOXaHpjcF4T5wK3J\n398EThORGUA/ICEiO5RSN5dkpEWGLc1q0ZPRu3dvGhsbqaysREQ6ezgWecC42fbu3bvNfZSSWTwP\nHCAi+6OZxOnAD9wNROQApdT65Ob3gPUASqmjXW1qgc86glHU1MCdd0JLC5SX6+1CYdOdW/R0DBw4\nkM2bN/PBBx909lAsCkDv3r0ZOHBgm88vGbNQSrWKyFTgEbTr7J1KqZdF5FfACqXUfcBUERkHtAAf\nA2eVajz54pxz9HdNTduIfLGC+ywsuirKy8vZf//9O3sYFh2MksZZKKUeAh7y7PuF6/dP8uijtvgj\ny4RXImiLVAFOcF9TE4hAZWVRh2lhYWHRKeh0A3dXQbHy10QiMHOmjgRPJGDaNOuGa2Fh0f1hmUUS\n0agm8CL6uz3pPhobNaNIJGyqcgsLi54ByyxcMI4d7XXwsHmmLCwsehoss0iivh5aW3WMRUsL1Na2\nXX1kU5VbWFj0NNgU5Um4DdOJBDz+OCxd2nZi702cZmMvLCwsujOsZJGEkQbGjYNQqLj2Blub28LC\norvDMgsXIhGtfurVSzOMYrm+2trcFhYW3R1WDZWEW000cyZMnaqJ+7Rp+nhjY3YVUjY1U6GVwqzK\nysLCoqvBMgsyA/LOOstxfW1q0owjkQhO35ErxUchhZVsuhALC4uuCKuGIlNNBI7rayik98fjmnH4\neUnlo2aKROCKK3ITfquysrCw6IqwzILMuIiaGr2iP/98OPFEnVTQGL0ffzzTSF3MuAobo2FhYdEV\nYdVQOCk6FiyACRP0diwGc+bo1X1ZGYwaBStWpHtJGSmhmPW7bS1wCwuLrgjLLNCMYdo0zQSWLoXq\naqcQEuggvU8/1RJGa6v/ir+YBelLUdzewsLCoj2waijysxO8+qqO7j7/fGt0trCw2PlgmQX+doKa\nGv3bQCnNTAYNsozCwsJi54NlFsDChbDXXnDkkY7UEIloCeOCC3SQXiEG51gMpk+3kdoWFhY9Bzu9\nzeL//g9mzNC/33lHMw634dqNfKrnueMkwmFdec8UUrJG654PG1Bp0VOx0zOLf/0rffuuu+C66/Tv\n2bOdSO5evfKrnue2f8TjMGuWrust4hjHrc2jZ8IGVFr0ZOz0aqhTT03ffv99/dLHYjBlivaEMpHc\n+QTIGfuHqYlhUp7bQLueDxtQadGTsdMzi+uug2OOcbaV0m6ztbX6pTfIt3qeiZP48Y8dW0d5uQ20\n2xlgAyotejJEKdXZYygKRo0apVasWNGmc712BhFHojBlVm+5BSZOLLxfo78Gq8veGWBtFhbdDSKy\nUik1Kmc7yyw0zEu+cSPcdpuWKkIhXd+itrawF98SDAsLi+6CfJnFTm/gNjDusrEYzJ3rGCkNo5g9\n20kHkk3CsEZOCwuLngjLLDwwNoe6Or29dq12rV24UG8/+qj+njjRX4LwM3JaZmFhYdHdYZlFAObO\ndepxe7Fggc4f5SdBeAsdVVbqAL32qKSsWsvCwqKzYZmFD4x04McoQKui3BKEqXNhVFYma2xlpZOg\nsK0qKavWsrCw6AqwzMIHRjowkoWIdqkFna68ulr/drd5/HGdsXbmTKcEa5DffSFSglVrWVhYdAVY\nZuEDr3SwYIFmBomEJto1NXDppbpNba1zzFuCdebMTJVUoVJCofW7LSwsLEoByywC4K4pUV2tpYYd\nO7SE8cYbOuhu1izNLJYu1cRcRDMTUyCpsTG9kFFbpARbDMnCwqIrwDKLABijcmWlJvoXXgi33w4f\nfeS0WbBAe0UF2SgMcXcT+GxSQpAh2xZDsrCw6GxYZgHENsWob6gnOiRKpCqSMir72Szc2LHDSUO+\ncaP+uG0WXgKfTUqwhmwLC4uujJ2eWcQ2xRgzdwzN8WYqwhUsOWsJ9fWRNG+ooCD3pUs10TfJAkHn\ng1qyJJjQB0kJ1pBtYWHRlbHTM4u6NXU0xXWx7aZ4EzOWzeCy6D2+3lDeb8Mk3MykqUkH9AUR+qB8\nUdaQnQ4bW2Jh0bWw0zMLLxa+tpBdK37IWb8/Fhq+xYihg1m1CrZsgf79YcQIWLUK5szR9SnCYad2\nhcFtt/kXSvJLWOiucVEKQ3Z3JLpWJWdh0fWw0zOLEV8ekbHvrrV3IfyN3n17M2Kf55g7tzpFuGpq\ntFG7psYxat9xByxf7pwfj6dLF4ZgL1/ueFQlEo5EYmplXHFFph2jPYS+uxJdq5KzsOh6KCmzEJHj\ngD8AYeB2pdS1nuMXAFOAOPAZMFEptU5ERgOzTTOgVil1TynG2Li9EUFQpBsmFIodrTu4454NNDdX\nZxCuSETnjZo6VUsHQfAayw1CIUcaSSQ00/E7rz2EvrsSXauSs7DoeigZsxCRMHAL8B1gM/C8iNyn\nlFrnavY3pdSfk+2/D9wAHAe8BIxSSrWKyJeBNSJyv1IqC1luG6JDopSHy2mON2ccUyhW9bqRsvLx\nQBgRnVCwslLHXkyZks4oQiEtLRgJBIJTh3z96/Dqq3p/KKQ9qNwohNAHSSDdlegWK7akO6rgLCy6\nKkopWYwG3lBKvQkgIvOBk4AUs1BKbXO13xX08l4ptd21v7fZXwpEqiKccMAJLHx1oe/xxMBlnHvD\nXWx5tIaFC7UqaflyXV3PzSjKy+HmmzPdZg3B/uKL9H6/9jV4661gQp4voc8mgXTngL72xpZ0VxWc\nhUVXRSmZxQBgk2t7M/BNbyMRmQJcDFQA33bt/yZwJzAY+B8/qUJEJgITAQYNGtTmgfbftX/GvrJQ\nGYlEAhFhxOgdLPDwkqVLnd/hsGYUfnUu3CnP77hDM5jycjj+eG0wB39jeL6Evr7eUXEZ20dnBvR1\nldV8d1XBBaGrzKvFzotON3ArpW4BbhGRHwBXAmcl9z8HHCwiBwFzRWSRUmqH59zZJG0bo0aNarP0\n4WfkjifiCEI8EWfaw9O4cMxYHn10qOva+lsExo/XEkUs5v8iG4LtNoq7I72NyirovGyorHRUXH62\nj45EV1rNd1cVnB+60rxa7LwIlbDvd4Aq1/bA5L4gzAdO9u5USr2CNn4fUtTRuWCM3GnXRZEggULx\nResXrB4wmcumb2D0aM0gDEIhuP9+uPJK/UKbiG43YjFd0wK0x1Njo3822jaNvVGPwYzFa/soFsw9\n+N2fQVCW3Y64thdGMvv1r7s/cS3VvFpYFIJSShbPAweIyP5oJnE68AN3AxE5QCm1Prn5PWB9cv/+\nwKakgXswcCDQUKqBRodE6V3Wmx2tO1AoX++ox958jKVl1Xyj30aU2ju13x1f4VUDxWJa/WRiMsyq\n0J0CXSS7NJBL/RCN6qjxUq6g813ZlmI1355VdU/JqdWTpCSL7ouSMYskoZ8KPIJ2nb1TKfWyiPwK\nWKGUug+YKiLjgBbgY5IqKOAo4HIRaQESwGSl1IelGmukKsLimsXUN9RTuUsli9Yv4t7X7k1jGApF\nU2sTb25qCuwnHHYq4xlVk4mrAGdVeMUVOofU1Kma2Uybpr2rsgXxBRHKjjBi56v/z3cshejfe5rt\noS3ozo4KFj0HJbVZKKUeAh7y7PuF6/dPAs77f8D/K+XYvIhURXQSwU0xpj08zbdNggTfOuUN7npl\nQMYxEbjoIscWIZIeeCeSvipsbNTHTTpzPyJYCJEuFQGJxXSCxLLkk5JrZZtrLIVKCnZVrdFTpKS2\noCcb97vTvXW6gburob6hnqbWppQ6auieQ9nw8QYUipCEOPi4Z5g1+FvccQesXOmoocrKYNs2h7gb\nu4aIljjOOy/d6ykfItjZhNKbnuT88/09twpBoZKCd1UN7a9pbtF9UGrjfmcS6+7muFBKA3e3xNam\nrSTQ7kUKxanDTqUiXIEglIfKiQ6JMnEiPPecJp6GKRiPpIoKJzjPfEQyiWw+BtjONtK6CXs8DoMG\ntX8MhgGGw/kzwEhEq+5Av1wkobxhAAAgAElEQVRXXRXsTGDRs1BK434sBmPGwM9/rr87+nnqbo4L\nVrJwIbYpxg2xG1LbIUK8/uHrtCZ0iEdcxalbUwdotVVNDcyd66wMRiQ9cF94AZ5/3lFBtbb6r6C9\nqgW/VU5nqh9KIdm0R/9u7ReFozupOfxQSum6rk47mUDubNGlQGdrDgqFZRYu1DfUk/Dk5XAbulsT\nrcxaOYu5a+ayuGYxkUjEt0peOKzVUqbGRSiU7vFkvKTAkTi6okhaKsNqWxlgd3u5Ohtd8ZkqFD3Z\nuN/d7s0yCxeiQ6L0KutFU2sTItp9VqnMBINN8SbqG+q1UTxJ+KZPd1a9oFVUW7boGIxEQueRAu31\nFI3qtqDdapcsSV81NzVpxjNyZPttBO1FVzKs5qo02F1euo5CT5HESvUM1tTAnXfqRV15eXBwbCnR\nld6vXLDMwgW3C+3yd5cH5osShOiQaNq+aFRLFImE/jbR2vfdp9VRra3aVfbccx2JA5yX2B17kUg4\nOagMM+kuD1Sp4fdyeQ3x55yTm8nuDMzFSmLZEYnoZ6CnPwfFgmUWHkSq9BNT+2RtYJvxXxufaueG\n2wNq7VrtcuqGkTrKyx3JwrzEZtVcWwuPPZYZm2Ef5GAC7zXEz5qlbUlBapeeoJ7JB91NzdEZ6E4r\n+86GZRY+qG+oJ56I+x4LSYjjDzie2KYY9Q31RIdEiVRFqK/X0oMptWoC7twmEBHo2xdOOAFee02n\nKb/sMs1YamthwgT9bYgf2BWhQTYCb1bQJgBSqexMtqeoZ/KBJYYWxYJlFj6IDolSEa5Ipf9wI6ES\nTH5wMmWhMloTrVSEK1hcs5hoNJIS+UUyGQXofTNmONtvvqlTlZt9jz6qV8X19ekGcLCxBdkIvDuz\n75w5mllnS6Ni1TMWO4MastgQrwG3u2LUqFFqxYoVRevPRHIvf3d51nZhCXP+yPMZtMcgKhtPpPGV\n6pRnlLE/iDhqJTdEYMAA2LzZ2XfssfDII87DvHUr3HijJpK9ejkr6p3tYc9XdTR7tiPVuefLr7+d\naf4sHOwsash8ISIrlVKjcrWzkkUAIlURZh43k+jcqG8VPdCGbhFhzuo5SSnj19qltipCdbXjUrtq\nlSZiXkmjrAzeey9934QJwaVYTaJCKMyg2xMIY77693zSqJj+uuJc9IT/qqtjZ1JDFhOWWWRBpCpC\n/Vn11DfUs7VpK79b9ruM5IJKKVpUCwmVoDnenOFSa7Bliy7JajBsmK62d9ttzr5jjtEFlIwbrpe5\nhMOaiBRi0DVRqmYV1Z09q/Ih8NlUTF2dENsVb8fAqiHbBssscsCdYPD+1+7nlQ9fSTseV3HCEiYs\nYSrCFRkutQaXXQaLFjkP6O236/133ul4ST33nCYYXjdak1/q5psd4pGvQbezo1Q7GkESSEcT4rYw\nJrvi7RhYL7G2wTKLPBDbFGNs3Vh2tO7wPa6U4qjBRzFs72GBfUQiTvCd+wE95xwtGZhYjPp6nQfJ\nHRnurevtNeiaWhlddYXU0St6PwkkKA9PPuMqdPxtZUx2xdtx6KpqyK4MyyzyQH1DPc3x5gzPKIME\nCZ56+yme3vh0KhWIOc+41oLzgJrKb9EoqfxSphDS1q3OMZM8zw+mLxP8F0TIOjtK1W1/CYXgllv8\na5WXGl5CXFmZH0FvC+Fvq4RgV7wWXRmWWeQB40rb1NqUykjrh4RKsKN1BzOWzeCRDY/QHG9OudYa\nhuFHPGfOhMmTtYQwY4ben82Tx41sKySzIr7ppkzppKNQX++o0xIJmDRJ7+9ohuElxPkS9LYQ/vZI\nCPmseLu67cWiZyIrsxCRvkqpbQHHBimlNvod62lwpwHZ2rSV3z/ze+LKP2hPobj3tXsRkQyjN2QS\nz8mTdXCeuzyrnyePm0CYfnJVo+ssY6l7rNGoZn7GWJ9IaNdWv8qApYaXEOdD0NtC+EspIfRkI7hl\ngl0buSSLemAkgIgsVkqNdR1baI7tDDCG7ulLp5NQwdIFkCqc5Gf09hLPeBzWrcvsw62Scme0NTEb\nSgVLH7GYjgQ3TKkjjaV+xOyWW7RE4b7nzjbe5kvQ20r4S6UT76lG8J7MBHsKcjELcf3eK8uxnQbR\nIVHCoXCqxkUQlFJMPHwiNYfVpOWRikQ08Zw6NT2hoEE4rL9NtHco5DAXryutibtwSx/uKOZEQp/b\nkcZSP2JmbC9Tp2pVmzdle2chX4Ju2hijeGcSsZ5qBO+pTLAnIVelPBXw2297p0FYwjnbKBQvvPdC\n2r7YphjTl06n+vgYTz4Jo0ennzNwIIwfnzw/ObuJhCawvuMIO8TCrMxmzXIkilAIxo3r2FVaUCW8\niRO1629ZmR7btGmZlcmM4b+rVcAzc9sVKvQZSact1RO76vxC2yooWnQsckkW+4jIxWgpwvwmuf2l\nko6si6K+oT6nVGGw/N3lHHnnkVx65KUAXP/M9Sil6F3Wm8U1i5k5M8KYMU4cxDvvpKf+cMMvQO+i\ni9JdQJubHSYjotVUtbUdm+4im9omW3R1qdQQ3vtsy33X1TkxLV1h1dsWFVcx57cUz471BOv6yMUs\nbgN29/kNcHtJRtTFYTyjmuPNKSN2NhuGQjFj2Yy0fU2tunjSFUdHWLIkMy15Phg/Xns5Ga+qiy92\n1BMmBciIEf6qk1Lqh7MRkmwqlFKoIbz3OXOmY/vJ975jMe16bP6bsrLuueot1vy29dnJh8HY2Ieu\njazMQil1ddAxEflG8YfT9eH2jDKG63y8pNIgpM6NRDSzeOKJYHWTF+Xl0L+/s9pNJHSywZtvdlxk\nIfilLpV+OBchybZ6LIUu3nufCxY4KjqvvSdbH8ZTTQTOPrt7ErRizW9bnh1rvO4ZKCjOQkSGAWck\nP1uBnJkKeyKMZ5R7e/rS6Wlt+u/any2fb/E9/6f/8VNfo/cFF6RLF8OH6/oXy5Y5BGvIEMdg7G4b\nj2tGYY65y7x6X2o34QiHdZGmWKz9L3A+hCRo9ehmJJWV+nvt2vbFh3gJ5PDhOg08aIaRj5Hd20dn\nlN4sBoql5mkL07HG656BnMxCRIbgMIgWYDAwSinVUMqBdTe41VMV4QquHnM1kx+cnCZp7NVnL/rv\n1h+A6Uunp0V3T5yoYw8uv1zXufjWt+Bf/3II+vjxOrfUpk1alXLWWempz93Gbsj+UkciWiVzxx06\nI+5tt2WvLJf3HGS5Zj4w13Zn3M03QNFPzeEXiGc8y0IhzYjyGVNH6NI7IsagGGqetsxHd07umA96\nwj3kg1xBeTGgLzAfmKCUWi8ib1lGkQmveqq+oT7DlvHRFx/x0Rcfse4DHVhREa7gpuNvYtV7qwCo\nOayGJ5/UT9v06TB/viNRbN+u1VTxuFY/bdkCvXs7NoubbyZ1nnlog17qWEwzHKPGAifJYK7Av2wv\nRjEIq1mFuoP4cq1Gs6k5vASyV69gZhZ0b0FEtlhEorupaQplOkHPRSH33VUJcnf779qDXJLF+8AA\nYF+099N6dmKX2VzwqqdCEspqw2iONzPpgUmpFCJzVs9hyVlLiFRFMlZjEyboRITxuCbwDz6Yn40i\nWwoLtxorkdCSRiKhpRQRJ0HhYp3qKiNxod+L0d7Vq7lvt2SRS0rJV83hp+oy+wt96d3t86kpkg09\nRU2TayHh3ZfvfXeVbATZ3qXu/t/lg1wG7pNFZA/gVKBWRA4A+onIaKVU9hJyOzkiVRH+9L0/8eMH\nfpy1nTvXlLcehns1BukpQVpaNHGfOVM/nNlsFF4YguyWLAxzMAZzcFxF6+q0msrdvtDMrfnCS9Dz\nsVkUov5yq7rcxKfQl97dPldNkVzwG//s2dogP2FC5yReLBRtIej5/m+dRZDzuaeermJzI6fNQin1\nCTAHmCMi+wL/BdyYzA1VVeoBdjfENsVSqqjqfaodN1uEb+z3jYwyrSFCKYYhIlTu4lhd3aux6dMz\nXWuXL9cFk265pTCjtSHIRlLwShlKaY+rREL3CZkxHBUV+WduLRTFUnMEwY/4FGpv8TLc9sRgeMe/\ndi38OLnGMAb5rs4wCiHobiKaz//WWVHr+TpsdIX6KR0CpVSbPsDgtp5bis/hhx+uOhvPbHxG9bmm\njwpfHVZ9rumjLrj/AhW+OqyoRYWvDqsL7r9Alf2qTFFLat/wW4entqlF9fp1L/XMxmcy+35GqbIy\nQ5bSP+Xl+vgzzyh18slKhcNKhUJK9emj1KxZSv32t85x89vgsssy+wuFlLrggvTz+vTR/VZU6GOm\nr3BYnxMO6+3uAPf99OnjzIff/OTq54ILlOrVq/19uXHssen/x7HHFt5HRyNoTtvazu88v/lszzzn\nc822jFWp7vVuACtUHjQ2l4H7vhy85vvFYlo9AabuRVzFU3W73R5SgGG0gE5pvvr91Wl9NMebqVtT\n51sL47zz4M9/zrxuPK6lBID773fUVTt26HxMQXaISARWr87sr1evTP170AqwFCu+QjPsBp2bLfjL\n737aItFEIpk1Rdq7qpwwwZEozHZXR77SXVtVSn7/TalX7+1x2OiJObxyqaEiwCZgHvAcO2nywHxh\n3Gd3tO5Aoejbuy+LaxZTt0ZT8hFfHpFWFyOomNLsF2YD0CvcK60WRk2NdnN12y5AM4PZs521qIGI\nbutOQqiUZiKmvKqXMJ18si4Bm8tAaYjyzJnFrZXhJgD5ZNgNOjcX8SiGG2lQX0EEMV8dtlE5dSeb\nBeQ3p8Ukoh1hy2jrc5LLG7E72jJyMYv+wHfQMRY/AB4E5imlXi71wLojIlURLvzmhcxYNgOlnDQf\nc9fMpTneTDgU5oSvnsC7n76bYbsAEARFSs1HU7wprRZGJAJ/+lN6um8Dv9xRRx6p63q3tuptpbRh\nXCltq6ipaRthCiLK2SQCcyyX0dpNANzIJ+K6EO+aUr6sfgSx0FXwxIndh0kUgmK4Vxt09dV7W6Sh\nrsxIcnlDxYGHgYdFpBeaadSLyNVKqZs7YoDdDavfS9fr/Gvdv1KqqXg8zr2v3Us45J+11itpKKVS\nBu+U4fz4KE8/HaGuDl54AZ5/PtPwPWSITkq4bJlmEuefrxlDXZ1T77ulRacZqa0tnDD5EWVIdyV1\nq7xMTqZ8Au38PLUgM+jQD/kQj44wPPoRxEK81Xo6iiXV5cN4uhrxzbag6epG8XwiuHsB30MziiHA\nH4F78ulcRI4D/gCEgduVUtd6jl8ATAHiwGfARKXUOhH5DnAtUAE0A5cqpZ7I8546FROGTeDRNx29\nzqnDTmXmszOJJ5fKCkU8kRl7EZYwCpUWyKdQTHt4Ghs+3sCNsRuJq3hKNXXrrRFiMf0SNDen99XQ\nkL49aJDz0BkX2ERCJy9cujT/hHrmpfMjyu6XwOt6u2BB/oF2Xk+tlhYn6DDXGPMhHh3lhukliN45\nq6xMD6C0aBuyMZ6uSHyzLWg6y0U4X+QycNcBhwAPAVcrpV7Kt2MRCQO3oNVYm4HnReQ+pZS7Ltzf\nlFJ/Trb/PnADcBzwITBeKfWuiBwCPIIODuzymHi4XqIvWLeACcMmMPHwiWzbsY0/r3Qs0yEJEZIQ\nLQld/SgsYS75j0t49I1HMwzeX7R+wfXPXJ9iIiZjrYnFqK/Xhu+ganvuBzIS0av8SZMcW4A3cjvf\noCg/oux23XVLFhMmaKaUb6BdkOE4H+RatXak6sK7qnXHjxSa/daicHRF4pttQdPV1Wq5JIsfAp8D\nPwH+VyRl3xZAKaX6Zjl3NPCGUupNABGZD5wEpMiaSq/vvSvJ6HCl1CrX/peBPiLSSynVlPOOugAm\nHj4xxTRAp/GYu2YuTa1NhEIhbjnhFqr3qU4zfF+46MKUB5UXbmkjQSIjFuP22+HoozP1/CJw4YWa\nGcyYoTPVeiGSOyrb76W74or0dt6XwJxnXojq6sIC7Tqj3kYxEbSqtSqp0sH7zHRV4hu0oPF7NruS\nGi2XzSLUjr4HoD2pDDYD3/Q2EpEpwMVoldO3ffqZALzgxyhEZCIwEWDQoEHtGGpp4c0bFamKENvk\nlCtb9MaiQEYhSKpuhsGCdQsAaNzeqPuLRPjTnzKz1iYS8PvfpzORcFh/jDvt+PGOu63xkoL0B7Sy\nUksDSuUnEbi3g45lQ0e4RJb6xcu2qg0iYl2JMHQEinm/Qc9Mdyuo5H42u5oaraAU5aWAUuoW4BYR\n+QFwJXCWOSYiBwPXAccGnDsbmA0watSoLp2zyp03KrYpRnRuNJBBGIQIURYuY9jew3jx/RdTkd6P\nvvkoj775KCEJpWwYEydG2LBBSxCp80OZXlJum4K7nck5dccd6ZKGMU7H47rdhRem51TyQ3uq08Vi\n2uhuVFbFWnl3NCHOtqoNWkF2JcJQasyerWOA4vH83KJzIYg5d8TCoFToamq0UjKLdwB3OpCByX1B\nmA/cajZEZCDakF6jlNpQkhF2Euob6mmJt2RtEyLEqP1GsWrLqgw7hkFCJdJsGNddB0OHaoLf3KwJ\n7uuvZ6qnjPQRj8O992omYGIaWlqc337G6RtvdNKA+Ln9eZMN5ludzn1uS0v+SQTzQSkIcS7mk2tV\nm29sRk9ELAZTpjjFvvItRJUNXVXl1B50tXsqJbN4HjhARPZHM4nT0bEaKYjIAUqp9cnN76Gz2iIi\n/dAxHZcrpZaVcIydguiQKOXh8qySRXm4nJFfHukbj+FGggRbm7amto0LrMktBHDAAfDGG/5lW03i\nQHdtDKUcQm2M0yZIzkgm3mAzvzxTbmaTjQgaYu52lw2FYNy44BrihaDYhDhf5lPIqnZn8paqr0+X\nbvNxi86F7qhyyoWudk8lYxZKqVYRmYr2ZAoDdyqlXhaRX6FzkdwHTBWRceiiSh/jqKCmAl8FfiEi\nv0juO1Yp9e9SjbcjEamKUH9WfcrA3bd33zSPJ0E4e/jZ9O2dzX/Awe+f+T0nf/3klJprwYL04xs2\nwKGHwpo1/ucrpRmBG6NGORlt3cZpt5TgDjbzxkUYTyw3s/FbHbnVTu5Ehb16FYdRQPFXaKWQAorl\nLVVqdVsx+o9G9f/rrsVSrLgLv4VIVyG2bUFXUqOV1GahlHoI7Xbr3vcL1++fBJx3DXBNKcfW2fDW\nvhi651CmPjQ1FUtRc1gNtfW1efUVV3HOu+88fnLET2jc3gjDDoBHTUIhQSl48cXg80Uy7Rhr1ujs\np+ZFM+Vaq6sdIzg4hNNN6MvL0+s7GGbjl/bAWxWvrCyzNoTfC58PESg0u2m+KJT55Euw2ustVWq7\nR7b+CyHKne2RZtFG5JNtsDt8ukLWWTee2fiM+u1Tv/XNIJvvOZc9dllaRlpqUYNvHKxCtaGM/Rmf\nI3+rkFYlEldlZUqJ+GesdWeudbcR0fv8sqm6M3HOmuWfkTYfuDNzglKjR/tnFe3TR2fCLSvT15s1\nS4/NZNb1u157Mobmg3yznQZl7M3WV1vHXupMp0H9l3qu24rulPm1M0Exss5atA2xTTHG1o1NZZt1\nJwPMBiNtxDbFmPTAJOasnpPRZtMnm9IKJgXiOz+DA+9jwEf/w6ihQ1h083dpbgr72i1CITj3XF2q\ndeFCvU8ppxiSO/GgVwXT2Ni+zJzhsGOA91OT1dc7kkciAZMn6/3mnCDjaFcxGLvH4VckqZgun6U2\niAb131Xm2otSz0dnq7g6+vqWWZQA3lTl7mSAuWAYjclcC06CQSA/RmFQ9SzvD17J/SpBuOYoTvp8\nHov++eWUx5Nxra2ocKKl77033dBtvk3iQT9DbNADm4/H0DnnOPmqWlszCU00mu4CnEjklzOqlISi\nEPWGGUdQkaRsLp+gt9euzR7IWCp1mxdBTKzYc12oijGoTSnVXZ2t4uqM61tmUQKYVOVGsogOieZ9\nbn1DPU2tTWlJBQWhIlxBS7wlkFkYhhIixKH9D6UiVMF+fffj/tfu13XABzzN6DF/4bIpV6ReHnDs\nD2vX6up6bq8oN5qbddtbbw02xLrTlUN+D3NNjeNFVVaWSWgiEV0J0Pjkl5XpMebKGVVsQuEmToWs\npM04vC7F5j6DvKDM3OZKvuhHNIx9qRTwM7gWc67zIYKdlYrejc6Wpjrj+pZZlAB+Edv5IjokSigU\nIuGyOCdI8JMjfsLq91bz+FuPp7ymwhImoXRdDLfksXrLairCFZw78lweeeORNKYVqUqqPzbFqHtg\nPXP+ciYtzeEM91kv3NKFnyH2iy+0isi43Z54Yv4Ps1eS8a4aq6u1mgz09aEwg3F74SVOM2cWtpI2\n4/DLdRXkBWWcDnIlX+xsomVQrLnO535Kcc/5ptA36OwYiM64vmUWJYLX28nAXaPb73ikKsItJ9zC\nBQ9ckCZdrH5vNbXRWpZuXJqqjTFs72GBAXvN8WZWvbcqrfjS2n+vpb6hnspdKpn28DR2LLkI1aTA\nQ6yD0NKSmbbCbXMw34mETiFSVua45VZW+vWo+zPR46bi39y56YTZLb24mVU25KvPzaddsew0QeP2\nY76hkJ5byB6g2NlEq9jI535KofbyeuXliirv7BiIzri+ZRYdiHwN3xMPn8iGjzekiieBTn1uJJYZ\ny2Zw/+v3BzIKgy2fbaFuTR1zVs+hOd6MQiEI4VBSIhnyBIR/DnEBFU5JFkESRiKRSfS/+lX/jLdK\nwfHHO3mn/vd/tYTgfai9Lz6kE+Z8gvq8yFdNkW87P+JUjJV0rsR3Rq2XbbXb2USr2Mjnfop9z2Yx\nkE8Kfe84OnO+O/r6lll0IAoxfF837jqG7jk0LdW5wQPrH9B2iCwISziVoNAtoZh6GiEJQdWzcNZY\nwm+P45KxP6KfGsrWrfC73wX3+6tfwV13wV57wYMPamnDQMQJ7isrS081YlKh+/nle7PVuiULb1Bf\ntshm0+fGjfkxmELUGWclw0W9tcnbikK8oMx9+d1rsRhXUL8dfT7kdz/FlC4Nk843hX4xUGi+tK6w\nGLDMogNRqOHbm+ocNMNx2zNChKjcpZIPtn+Q2jdw94F8Za+vsPTtpb51vhWKIwcdyVNvPwVVz5Ko\neo7VA5ZTG62l7reRAHWU3vnOO/DOO8Gl2MvK4IQTYNGiTInjqaf0gw/ZjbJeghkUQe4NCnNX6itL\nPtnZXvp81Bleom5sJu1FNi+oIAN2OKy9x0aM8J+HYhD69njYdLaHUFvH4rUbFbOmfHvH1pXm1DKL\nDkR7DN8GlbtUEg6FUQlFOBRO1cYYWzeWptYmEiTY/OlmNn+6OXVOiFCaEVwQ9uq9V+q4QvHom4/y\nRMMTHLz5KeAIdMkSjeHf/ITVa3fA9n3S9nth7A7btztJ4txYtw7GjIGzz86+ovcSTD+dvvc8N/EF\nXUp20KD2u1aWyoCcr97dfX0Tp2FSzLvVJdB+otLee+0oY3tb7ExdSaVUyNhyte1IqcMyiw5GkOE7\nH8Q2xZj28DTiiXiKURjJY3HNYqY9PM038aBCcdi+h/Hi+y+i0Ezmox0fZbRrTbSyZt+LIbwE4uWI\nCJdeGqLfiX9i9cQD4dWTUz16mUYopJlFWZlWHRnDtRfNzfq7LQbKbATWeyxfdZEhErGYv3rL3W84\nrFVcsVj7X8x89e5+cRqmFom7EmIxCHV7DccdYWxvj52pWNdvrwRSyNiyte1oqcMyi24EY/NIkECU\n6DxQSUSqIoFZahUqzRjemmjVKig/VD0LPxoDDVFk/6WcPPV3rP13JeGjbiD++gmQKMePUYTDTsR3\ndbVWRZlocDdEtITx3e/qyn2F2ACyEVivKsGstvPpO9tLZ/o1cRK33ZYegd0e5Kub94vTcMe0mD7a\nSxzbazh2j3XLFv2dK6CwUPgxRbM/yCW5WNdui9eUHwoZW7a2HSXJGVhm0Y0QZPMw7rgjvjyCsIRz\nGr9zoupZqHoWhVC3po65a+aiBjYRPmccR348i6X/GpZm1xg0CDZtSnd/feih9C6Nh1UioW0XoF80\nYwMoNNle0DHIjInIRaxyvXSRiN7X2qrbNDXpjLjZsuIWYwXqvn4kkr0mebGIYzHUMXfe6UiQkD2g\nsNDxelfalZX+jL4U6pm2ek35oZB5Dmrb0W7Tlll0IwSVZzXuuOFQmCOrjmTZpmXtZxhAWaiMLZ9v\nSaUeCVfFOK7mXvhsX556ZC+MhDFoEGzanEBCUFaugHBgwSU32qpvdxMCSCcKbsLf1KQjv4OKNRkU\n4ttvVpWPP669tLJFGLd3BepFNpVZV/GYqa9P95ADf6LaVhWKlyl6GX1dXaYUlk/fpiYLZEq7bsbv\nfgaM919lZed4kZVCesoGyyy6Gbw2D7c7bjweZ+nGpdotNgD9evVLK5YUBEGIDIxw/2v3pwzjZaEy\ntjZtZVnVyRB+DOLlhMuE2LMQb02AQOs3bqTv4FNRamhaf6FQZhp0pfTK/+GH8xenvd5BIulEwU34\ng4o1ZcxpHi9dJKLH+rvf6fog2fos5go02/2be4a2667bUwLX7/xoVKeo90oWXibcHhWKd6VtCDjo\nKpFGHQr59R2L6bGZMc+ZA0uWOPPhJ6lu3aqrRsbjutSw9zksRIJqj+2hIw3zlll0cxjVlFn9K1RW\nqSIbowgRQkRIqAQhCfH0pqfTCjJ9c8A3dZGmgQlt11hTw64fRdn25tcBHa4df3oa1z8jaYxBBL7/\nfe1Oa15qg3//W3/KyjTxzyVOu4mMuYY7Od8VV6TrzRctyszH5Idchu5YzMnV5K4kmI8UUky//SCd\nfVsIrx8hzFV0ySvV+RG5+npnlT5ihL8arlgqFMPEp0511IQGbgeAoPuvr9dOC25pyD2H3vlubNTP\n2PTpjkeaOdebJNJcIxcj6GjbQ1thmUU3h1FNmUjt1kQrIkJrwsd31QOvlHFo/0PpW9GXZZuWpXJO\nGQiSxjwAWH0W21or0OqopIeUCpGIp0s2ZWVw2WX6Y1QEXqax7766LnO2FW1sU4yN/dZTVn4mEE5J\nFiaLrju63AT2hcPajTYfQ3q2F9stLYRCmSVfvavHUvntBxHZtnhseYmUO1renZY+aH7OOiu/WBE/\ntFeF4p7vxkb/bMTjx1imPDsAACAASURBVOtnLtdq3sTlGKLvnteg+fZ6ybkli0IlqI62PbQVlln0\nABjVVM1hNSl7xtp/r2XBugUM//Jwtu3Yxh2r7qAlka5M3ta8LS39+eotwelDEiRIi+9riEK8Av0I\ntSISAlE6GE5J6sULh9MzwxpD7eWXO4ZugDPP9M+WmtIXH7SWCxddQfMLpxPa/36+f/gRXDa5P2vX\nOhlpp03T5yxYkO5qOmhQfsSovt6RBrx1MrwvdG2t3u/OEOtlMqVYHQYRWbfH1OzZcPvtOluvqcnu\nB7cEJALDhzsuz97EkaD7N/Oarwu0m6ivXav/m+HDoV8/vS9XhtygKolBiR2NI4VS8Mgjmln49WlK\n+Rrp9PzznePuew6ab+9+8Gd8+TCCYjLOUkokonJlj+smGDVqlFqxYkVnD6PLYtIDk5i1claatGDU\nTm0yhm86AuYuhng5hFs45sf/YtiuRzMiso1V761i3eJR7GjZwblnlzPx5OqM02MxzTBeeQUGDIAj\njsjMKAsOUSDUSjweh0QFAOHyOH+6uYw//MEVKS5xwmEhEQ+lrTJnzcokmn4v2OzZ8OMf+5/nNYC6\nx+bOECui+7j11sKntL0v/fTp8POfOyvs8nJ48snsfc2e7TDbXr20S7OpaeKWoCBdr9+rl9brQ/CY\n3UTdrLwNRKB37+zeasaW0NKi78Uw70mTnBoo4TD8+teOsXv5cmf85pibIZXK+cCLbE4Y2dq2hVG0\nN9ZCRFYqpUblamcli50ENYfVMHfN3FSUd0hC9Ar34rtf/S4LX00PiHBLG4FI5pWiIUpo/6Us2+dZ\nnhFBvai0CqtaJy1c81IF1YcvSTPKew2KH3wAq1dr42QopIlKWXmcw45dQ1PzCBJxQRJhUCGMB1a8\nJcykSWZlaNLmQrw1fdyhkCZGbgS9YI2NjiHefZ5fyg/3KjsUcnJiKaXvAwqLIfES7Xw9eNxEprIy\nXRXT2prbxdeocIwRvn9/TcS9Xl9nneXYA0R0FL57le03ttpaZ468UCq3t1pdnfOMGE+ntWt1rIvp\n09RAMefV1jrHQqFMlVwudWIx4GZIoZCW8IIkqPYS+460dwS7zVj0KBjbxjXfvoZZJ87imjHXsLhm\nMZf9x2X0KetDiBBhCXPM4GOyelOloepZOPpaEgO1q25ropW4iqcYjULRFG9Ky54L6UTAjZYW/YJp\nt9cEy99dTiL0BaGworxcCJfpXkFpCSJlPjFBgiH9EX3ArBpN8kGTlyrISByN6vbhsP52rwq97pl3\n3pm+gh8/3mEYLS165Tt2rHNNL4whPRbTnylT9HmJhCawtbXB55rzx46Fq65yrtPY6IwB9Pgefzxz\nHO5rGzWJcS6oqdEEa9w4h3G6VU7hsGYm2XJkmbE99lhw2vtQSH9MGhP3/2CwZUvm9pQpwUzLqNAM\nEgnNWNz3777fXr2KzyjMOAyzbW3VDDHovwx6FvOF9/8rpb3DShY7EYJSjbhjN+rW1AVHd7cR9752\nL7NXzs5IiujATVGSxnJJAAo57iLGffkMJhweZdWqEFu26NXviBGacGjVhnKd28rooz/l3DP3TKX3\nNvYEdyI+Pz2y1zDtZiJl5XESCsrKwR1HIqJTsffv7/TpVz7VDT9Dsdt7zBB5vziObJl1o1FHKjD9\neN12/Vayfvry2tr0bL81NdmDAt0wBDCo3vsZZ8DBB2faetyELhZLD+wsL9dz7J6nsrJ0puV1m/bm\nzzJ2pGLEJmRTHUWj6a7i8Xjwit/PplGIWqojYy0ss7BIYyKmUJLBkD2G8N5n79ESb0FEqN63OsMQ\nLgiD9hjE25+87du/QjH5wcksWr+I/rv1Z8R3JxO+rZp43OSYUtB7K+zY03VSGaw4HxVK8KWJ/+bC\nCzN119XVMGMGvPaasP6NBPF4gooKYea1e6ZemunToalZkYgL8bhi1izJqis3v9MMqH9bi6q5EDYc\niRq6jBHfvYmKudUpBmTcc8NhOOmk7O66XuPqjh16xdyrV3Yib87NllnXy+z8CLHfSvaKKzKJjOmr\nri59Xz7EyOs67K2P8sEHznhNRmGzPXu2NoLvsotj4xCB731P/y4vz15S16SS92bmdf8Pue6jvXER\nkUh6KWC3lOqFn6E831os7jF2hKutZRYWaag5rCbNc+qdT9/hoshF3Bi7kdZEq6/HlIgEMgqDuIqz\n8DVtGykP3cGX//tKNs+7QjMFBHb0dffo2CcSirv+vB/GNbe5WXH55cKwYaSkjOOPJ03qcGOrbCCR\nGIxWUQlKaQK9alWwEdrrFbVgUSPxAU+j9nuSuIRprHyAxYurUyv8225z1B+jR2sPHD9i4zWugiai\nixbBH/9IhiTkJnKxGJx3ni5fa+CXWdcdL+JXg6NQN03jglxIPiwv01q0SBfBMit9r9QU5GBgoJSu\nnWISKE6c6B9l7bUrtWXFXYy4CKMSvPnm/Nym3XOQLbNyIWMsBSyzsEjB5JiKVEVSqqiWRAv1b9Vn\nxF0YCJIee5EHWhItbP7aL+HwL8GKiUCyfmiaOslsZ6ZEf+oplXS7zTwmou0JRoX0+18MBhV2tVAo\nJcyZExwwVlmZHn09fP8qlsYr/GuZx9KLNQWt9LwShXu13drqjKO+PlPqicXgmGPSvYlCoWADupeY\njBjhHwMSRMTcq/v2RlnHYjrCOR530mMERbQvWODfl/GkMvPl5wrtR8Dbor8PYgTulB8bNwbXS2kv\nIc+HmXekUdsNyywsANJyTHnx/ufvB3pHBe0fvMdgNm/bnN0t97A6WH0WtFbgMAyAOIhyEXkT9OdS\nWwXU1TC2gro6TVQSrWFPWz3e5hbF5MmS8sQx6R0g0yuqnxqqAx8fWA8N34LNg6FKtw2yc2STKEIh\nTWzcgVzuhHjGtmJQX59ZH2TECP3tF23uJiZBHkdBxMW7ujfjNF5H+cIQ1+XLHQO5cWcFf0I4YQI8\n+mj6Pr+58huHm8iGw/q6V19dWH4obz/mf5k0ScecGAcEMya/YM+2EvKgypH52jk6ApZZWADpOaaM\nZ5Qh9G4VU1jCjP/aeN799F2ef/f5QGaxedvm/N1v62vhzbFaJSVx+MrjcNA/4aGbU3EVSAtaPSVo\nxuI1iruheODJd/lBzQ7Ky4fS3OwdRxyVgHhSNeUt+Wq8oozrY2UlsDnC3EsiNDXB7TdonbRb3751\nK1x5pSaIvXunZz91SxTe2AWTluSOO5w2psjR3Llayti4URNAt6dPNBqcXddr6A3Kj+UXC+BNK2+Y\nVEuLVo+de25woJ979W1UaV4j95FHwnHH+RPCiRO1ysqMwczVhAlabbhunVYhrl2budpvbNQSTH29\nbmtiLaAwou1n9/G6/5r/yE/C8WM2boaeT5Cht3JktjGW2qjthmUWFgAZ6c9nHjeTBesW8Nibj6UR\n/YRKMHrAaKJDoqnqfO4qfAZKKcTlxzmk3xAG7TEo09Oq6lmI1sLbR0NcQbhFb1c9C/u+BGuS7i6H\nJS2tDVHo8yGsPRPePoZMlZUex+ZX92XGVc2MPvUJXnp0NNu37orxltLf6QzmySf1CtJ413z3u3Df\nfU6iuHPOcYh5IgGTJ0MonEilGlEJx93YRH8DjPl2nKYmzeRCISEU0oxl7Vpgn7XcdsdBxFsc6ccd\ngbxjhx4TOLEcSmkj77Zt2aWHmTOdaOmbbvK3gfglZBR/gY1EQq/WlyfLpfgFOfoFKXoxbFh2QnjZ\nZTry2ox3woRMgr18uU7meNNNmWo9rzFdxD8FSjYjtpG8jP3Ay/BCoewr+iAju1/uLUhfTBipOBcj\n6Cijths2gtsiBWOzcKc/j86NpqmmeoV7seSsJanjdWvqMlKJCEJIQiildJqQJI4ZfAzLNgakT990\nhGYEQ55Eqp7NLpVsOgLmPOkqxKRA4oz+z6d4Y11fPnp5eNJwbhhDyHVyHC2Z+KmzdD8hCZNIpFPN\nY46BZ55xq4OMWiyE19YSCsHTT8OMP21h4V+/lLxeK4MPaOLt9bumzg8ddB+JV07ErYIbPRrWrHFU\nHqk5dQX9mbxHDz7oEHjTNhTShNxtR/Hz/Jo+XcdoGFuCt+/t2zWBfsrD2wG++tV0ScxIT48/7khP\nRhIyfScS6Z5s2eCWGBYscPr1juGtt/yrMZr5Ki/XRbgefNDJH/aDH8Duu+eXwtzLUE84Qe8PKtrl\nDcY78URt2Dfz8I1vwMqVetvkLJs7N53hhcNOITG/sZUitYeN4LYoGN44jEhVhPqz6qlbU8eWz7bQ\nf7f+1BxWk2oTqYpQt6YulbRQEI4edDSxzTFaE60ZBD9r/Eay4JJOQRIKNKgDmqkkkt5SSQLP9yax\nfNjtsPsR8OpizSdw2zwAErDLh7B9X1dnCRxJQ0CFSfhcdulSTWjmz3cTKDfDcU469FDt0nvf/XuT\nYiai+KJlO7Brql1i275akkomXgyFhP32g+gpG/jXv4Q3VuyfVLs5xM+46Br3XCOFpPpM6JTv7hxO\nq1ZplYkbXh2/2yZgku/FYrpmujfp44YNmii606O7o9l79dLSmEnhHaTfD0IkQirnl9uw7capp2rJ\nwlzXSBTGnnDOOU6kvTsr7F13pfeTTUWVzRXZ6zQA6V50iYRWhYVCzrVXrUo3jJvruz3jlHIWCt6x\ntSXKv5iwzMIiK7LVDI9tinHn6jtTRL0iXMGwLw1j2aZlue0VARARlNJ1wkf29y8Ty5B6KGvWDCGU\ngBOmwKjb9bGUHeSXsGEc+hF3rfy37538rUBak0Z0r+4lUxejFPz971rn7nhitabaSkgxqKqMjRsV\nq1fr9CVpEowq48NNlWl9hr+ylMTIOajYT5DGg0gkYOFCBfcNhK89CDIAVAUgiDjutRs3asIRpBRo\naEjfDlpFu11rwT9Z3pIlev/LL8Pf/uYQNLeqzaRtNzCSjCGaQfr9IJiIdq9R3+Dkk+G662DoUIeh\nhMNw8cVOgkJzLXeciBcmhbnXruCGVyWVzWnAG4xn4mXcVSLPPddxdwYtWbhVbEa686ZX986Je/47\nynZhmYVFm1HfUE88oZfZgnD28LNTOahM5b4jBhzBi++/mJYKXRDKw+Wc8NUTeL3xddZ9uC51zKio\nEomEP6OAtLxUDKnX297j0au1TaMV7Vm192vw4YEp9VTfYSvYtmc9LPs/MqUDfxfelpYES5929ksI\nFHFIhFEJ2LgpgVLiOc/pKxEX9toLPvooea/LLiIUVqiWcFoyeBIV8OpJrnM1kVq1ShP2GTP87QF+\nMOk0ID2dhNe1dtUq//PdxNJtDwiHHULmJpCJhFYdTZjQ9sjk+vp09ZIJQHRLPrGYvo7JkKuUZhRe\ne0hNTXocDDjqnvPOy7Qr5FNNERyJxxsdfsstuHKWaZSVOYzFK13lW1+9vj5TLRlUUrZUsMzCos3w\nGsWNisqdPgQgOjeaOkcQTjrwJC77j8tSdo9j/nJMXvU3QoQIhUK6bVJt5QdBUF6GAsksudqIvst3\nZrDtpa+jbRhu6cMwDj/DeQKVcKQFlTD2kHByG/CopMyITF+GUWimEE4SALdrsEE4OTZS5z75pI4P\n8curBZnGXS/KyjSBcRtUm5q0sd4QU1MlDtKz7Ho9xNzR0xdfrBmYwWOPabWdm+i5U8l7VSi5EiJe\ncomWJsx41q5NN3pnMzhHInosOtIf1q93bAYjRmiGk49x2R3Rfscdznx5XYqN4d99rzNn5mbGuVKp\nRKP6Wua/D4V0nx0Zb2GZhUWb4VcT3Ow3v6cvnZ6SPkDHZSxav4jL/sMpNHDiASemoruDcGb1mexe\nsbsu8EQwYxGEP5/4ZwB+u/S3vF11rXPQxTy27PksDDnCUWelDOFug7UzaiQOg56Gt7+VulIQQ4EE\n9NsIWweRoQZLtff0jwAJKvZ6l+aP93W5CDvtX33VywycjVBIckobxx+vbQlugmOS+Rm4EyWadoaB\neNNSGNVNv36Z6hd3lHyQCsXYRbyrY2+cy7Zt6atv4w7sVt1ceGF2Q/A99zhGfaNGmzw5XVUETkZb\nw9AgvR/3Ct+byNBg4kTHrbqyUs+DidMIqjPiDmL0U4lFItoOY1KzmzF0ZLyFZRYW7YLXpuH1qPKW\nfQVSmWj779Y/Vd0vRCjNcyokIc445Aw++PwDJgybQPU+1dTW12YUcHIjJCF++h8/pXF7I5W7VPLe\nZ++lHZeq59hv2Cbe+fQdvcMtfbzzDXj1lFTb3b/0MZ9+0BcnpkNB9V2wKeLEfoRaCGGkAzdDEPhk\nEOEyIR5vTRJ+45GVRdUlcZo/2Vu31/64yetrI7wmjm7GFIf+a5FwC4cO/Dqrn9sjNf7+/dOztpaX\n629HKlEkEoqhB25nw6u7paX83rIlXXppanJSnV9xRabXz8UXO1KHm2HMmuWkZHEzMrcKyy+Izayi\nTQ4oI025U4+7pSiltDF9aGQtjZUPUNl4ItN+UJ2hnolG0+NV3ExSqfRtE3vj9irz1ng3aiU/GELv\nNv6DnoepUzUzKTSNR01N+ngKSe5YDJSUWYj8//bOPUyK8kz0v7e6h0FUboNyneGyAkpCYJRFRtSg\noEFQ5FlysjHuQhSdmCMJiAkb92x2PXGfwzmuBowSIt4CWY2bhCwoAl6ACUSHmwKiXARh5A46CIjI\nTHfVd/74qqqrarqnZ2CGufD9nmceuuv6VVXzvfXeZSTwBPoX/6xS6v9G1t8H3I/Wt08CxUqpLSKS\nB/wJ+Fvgt0qpSfU5TkPdEMwCbxFrwbLxy3ztY8rSKSEfxKsfvRqKeJKIU1kpxZ+3/pll4/Xr3fB5\nw32B45mjbMcO7T+mzxieXPMklXalburk2KFjxq04f9v1b9m/bX9qoWfKOl4AViU4cYgl+OLqabB4\nViDqSuCrDnDXMD/3o23Ldpwo/XuqRkXFQCktKK56Fk52DAki2n8ERy8jpMVYNvR5FbaPQf+3tF1h\nY6PrZAEhjcQd05GvoZTF5iOpaKl4jk3B4E0cenWg3lcUEyemd+Lv2HIhQQHmKMVrr8WI8uabsHy5\nfisuL09NgI4Djz+uw22PflXOyjfaucJRC7cFC/S4vAKAYjkU/Y/VPDqrJ53mdc5YATgYzptIpCZb\nkVS01WOPBSu7Ku7/9R9xui+HkkGoiq+jHKniUwi+nVeHUjqQIWiiKilJ9XgPTtBeeZRx48KJmp4g\njJ4rUxXaaCfC6DbpkvFKS3XAg2eia5JmKBGJAbOAm4B9wDoReUUptSWw2UtKqd+4248BfgmMBE4D\nPwe+7v4ZmgDBLPBKu9KvM1WUX8TMkTMZNncYCTuhczAiiXxVkvpQ/jEAP/nPW9cnrw9bP93qbx+3\n4nS6qFMqC11ZWGL5DvOYxHig6AFOnD5BjpWT0lD8jn8twErCoDk6AdATIotn6Qk3Vplyprvrju0d\nAqvHAC3wcilSmog7mbfZo4+3Y5TfVZBey+Hzv3EnVRu6rkNGPqjHu3Mk2IqcFsIDD5e5IbQ9XIFh\n4wsjAHH0chXHsW1uu+MgnbpW8vyxCayzExB70z9n68FvMPbysTzzjINtBwVH0DRmYSdtHFH+8m7d\nYN8+PYElk9p5e8cd4QnQtpXOupbWrtwJC6ZEAu67D2j9Cc+U/oGVv5/iBhroPiWjRwMXHaLTNW9A\nt96U/GdRKCzYiw7ych06ddI+DC8ayrYhlpMk2fIw6rdvuOVj8GtRHTuWMu2MH69NQeF8mei90Of1\nOjB6eSPBxL50xQ/feEMLRdtRxOJJvnPXYZAuiAWWpUDFfBNX1GRUWhrukWJZ4fMFzWqeEz/aRMwz\nF9aXwKhPzWIwsFMptQtARF4Gbgd8YaGUOhHY3n/FUUp9CfxVRC6rx/EZ6oCg2Snq8PYc3JDK2Sgp\nK+FYxTFmlM4A9CQ/sONA1h9c7xckjImeDIPHsCwLx32NVCi2fJp654hJjKdGPaW3c3M0LMtiatFU\nTpzWP7HWLVszo3SGFiRipboBlg3TgkLFESXQZp92joMOx+34QfVRV54Z64LPtOZxujWUPhgSMJK/\nBuuum7B3X5tytm+ckMpYH/kAKn81DuIfz+n5V55IrqPyG1fBhtSkz8jJcOhKfYxO78HSJyAJCodF\nJx/hnusUyfdWoZQTGttjL3Vk/+B22GoI4CUzegRMY1YSKxYDJ06LFjq3JOi8dhzFiy8FgwAC5jQV\njxwvdY7W3T/m1WP/B/vt2fiVhtGCZOFChYq3xmo5h7lH3+NHsc3A3/i5JdGKvBWVimeeTzD03j8w\n+jvD6XRxZwq/tY37f92RpNcXXpTv23j00VT+x8yZ6bQKLbQHDtvDyS8VO9f1IJjIOWiQTpR85hk9\noXs5HEVFqa6IHomEvjeObfHi0x21KdFyoOgJbiv4Bzpd3Nk3XQV9E9EIMNvWgsgr+RIs0f/kk6kQ\n6kTAKlvfTu76FBZdgb2B7/uAq6Mbicj9wFT069mNtTmBiBQDxQAF0awjQ72TzuyUzuHt4X0fPm84\ntmNjWRZP3vIk/S/tHzrOzJEzKT9VHjrGrFGzmLR4UtpkP4CPP/+YJ9c86WsMSSfJzNUzKZlQAsB1\nL1znaxlKBbSaHiV6UrcVykrQ/vJNHA0euJqoq4zrL38F65MbcbqvcNcJ5JdidXsn5ZdJE/qrUP7x\nbNzJI/+d6sOEAV77NTgxkot+yZK/uQuntZMa2+Gvw+JZOI7Fi295vpN0yYSulnPRYYaPLadXq0IO\nHYp2bnP3UZHv/ufUcS5qV8HJzy8ABMtSPL7sBWx1aTiZ0t1eKYFkDs7u66gAHp/b3Z04FYmkDZdu\n5aHi/qHeJNgWK2d/B5RFvEUlvSuW8fXcW9gcB+UoLNEO/6Cv4HSF4qFffI7ttEtzDyw2Jf6AaqGA\nfwqMD1q2FJLJVBhysG7Xe+9VfRze8VJl9pPYb09m4dtxWuaGw3WjDbmCSX2gv8+cmdIeKiu1Yx5S\nIcWewKhvJ3eDO7iVUrOAWSLyPeBfgAm12HcOMAd0uY/6GaEhE+nMTg9d91DGJL7gPg4OooTyU+UZ\no6qCFF9VTP9L+zNv0zxe2PgClXalP+Hbyuaxdx4jWrqm0q70mzkFS4wI4mtA0RDbo5dkFgwWFn07\n9GX7Z9tDzvgq5K9GCtbRL68vWz/TGkyVEidZhJCnXdnKrn7bQ1em3tRti09WXg+3vqzX7R3i+l0C\nZVFCqMhnC07k88a8fG2Sc6JVfyGc8R4N9wXEJqeF4vT1P4HXHtMaUdzG7u6GFsWSYOt9ewzYz8Ft\n3UkkHRxJID1XYn0yHDuZOq9jC/f/+o/0v+okw4YVIZbt7i/u+GIkK5Js/e2PAEUsDsX3Cq1ba6e3\nZ8oS0aHNR/d7QQDRUGmFevvBgDKUEoJvvx2OGvN8CvPng6OCAkfcj+699O+PHqvC4vRpeOKJlG/C\nthW/eRpycx2+/f2DLHm1FUf3t/PPr5SOggsSHMegQdClS+YSJHWJlX2TM2Y/fiFnALq5yzLxMjC2\nHsdjqGM8s1NMYlXMTrXdpyi/iIeu08bY6aumU7q3atPiovwiZt86mxUTVvCDq37gT6hAlcKF1TGm\n7xhWTFjBTb1u0o51t5d4dZO3IOTGc/lm92/WKDvdVjbbPtuWdVvPsR908AvCvVfey4PXPFij68lI\n2bDIm3zUdBSYzKKmo6Bj39/Ghq5rIVYBktABAblHU/uJQ7+rD1A47SfYhb/RQvjGf8X5xxuq3ttY\nkoJxs5k8ewHWjQ8jE24iVrCWqXdcSU5OQBBZSZKfd2HKM//F5sObA5Fl3p8bMeb6buykcOiLg8x4\nIkEi6SCWzZ337aPL5Qe8EwPQNv8gd963H3Hb9+rri5HK6E/dG9sOm3tAT9THTh/FsW13DNp5H4vb\n2j8R0rpSIdmeLyQV2QYooeI0vPh0J47u95qAKf886ZzxXj2w9et14cX6FhRQv5rFOqC3iPREC4nv\nAt8LbiAivZVSO9yvo4EdGJoMNdEIarNPpmiqdMcoyi+isHMhkxZPwlY2cStOvw792Hg41ckvbsUZ\nP0AbiD1txBKLA18cYPORzTw87GFW7VkVCusFPVl3vKgjR7484h/n7oF3+8d65r1nQpqChUV+m3wq\n7AoOn0z1/qhW+wByrBweKHqAx995PHS8uBWnsHMh87dk6AYUZMA82HBXyqfhVecFUmVRvIkfqpid\nLMedgBWonMj6wD2xHJRVCSMfgMP9YfWPdUZ8RfvUca0EW772HYi5giGqEZUN09FmxMBWrJw7jL/e\n8O9wbSlKOSgV48TpEwy5bQsrP9yuj7ljFLx7D2s3VrJu4O9QyX5priU1XhEdaWdX3g3KwrGTvPTm\nh3Rq1wbo4m99bN8l/MEegRrdV5vxqgiJIFV9MI4Da1e2C2/VZwHOtY8T+3QA8tpTOLYX7hzV6lTo\nWCmzlbdtkvZdT/D5gfbpBYWluDjvBCc+bY0TifiqT+pNWCilkiIyCXgdLc6fV0p9KCK/ANYrpV4B\nJonICCABfE7ABCUiZUBroIWIjAVujkRSGRoB1dWOqu0+maKpMhE1TW06vMlfJwj3FN7j779iwgoe\nfftRFmxfwNoDa1l7YC1jLx/rl2J/a/dboY5/R786ypg+YwBCBRTTaTwKxZ7je8iJ5XD75bezYFv1\nCYaDuwzmys5XMn7AeErKSqp0Gkw4Cd8/k5X81fD9G9L7NFwTW+5bs6n4ZEB4P0lCrJIr/nEOhz9N\n0r5yIDvfuIFwrxBXoFy+kK5XHKLDFR+w6YiFWvxkKtfEn/QcKHyhev+O5x/yijx+PAJn941I0Qyk\n5Qnkos95bun3SFQCsR4wcK4WLiqufUo4ocKLmqC5BxCF3aLcjRTT0Wlq13AOiqcBpDSmxK6hcN10\nve9rswMCwwkcO2p2C/4bvH4FX3RBdXsHp9tqir81BN4fz2+edlK+i5AmR2B/FV4visu+8SnrDrQN\nnBv/PEolOfHZBf73eFzOSQOkevVZKKUWA4sjy/418HlyNfv2qL+RGRoj1UVTZaIov4iSspKQ41sQ\nWsZb+pqAt92peW0NrgAAGTxJREFUxKnQvgu2LeD1na8zc+TMkIbhhe16WeWWWMzdNNfXiKK+Ee+8\nlXYlKK0ZZJroc6wcZo6cCWjhmNcqLxWZFaC65EMgnMRYnU8jfzUVI34Iz68MRCElodcyBn9vCWtj\nT0B3OLp3CFh/iZitbIhXwND/YF/+avYBbPy1Kygib/exyrBW49Lt4m7s+2JfapzRIo+OQr09DVDY\nlmtyUZaOFPOO60WNDZin/zaN56LKy6jYNpxk0kFE+zZAUDY6Gs2x3Mtw9HUrL7kRfV2xSqyeK/US\nrwilFyZtJfX+Khg1prL8Cxy8EvYOwclfTeHg0xT/ELbsPsbK1z3tK2oCzICKs3ZJH1Ll9COajVUJ\ndiv/eL0Gb4duR4H6VS0a3MFtMHiciVkLwkImZsVCJqPpq6b7xxrXbxxv7Ar37axIVlB+qly3TU3j\nPAfd8MnTdIb1GEZuPJeKZIX2W0a0gk4XdWJq0VQefTsVb3pn/zvZUb6DLq27+GVOgua2a7tfW335\n9gC+j0MEUVWFTFryV8Po/xnKGbl+/ApOdy6FA4FtRt0fnjALXwjnnFRBAQ5cvhAZ+ngq5DjA4S8P\nh/Na8lfDFfPh428RjUZSfm14OywcolpT/mpOAld8dTd9T/6APt3yePRfO0MyB92O1wLiWrOwbFdG\nWPgTrygYORmn29upgQ56Fjp+SLej/8C+9r+DpTNg/9Wp8eUehz6vwKlLodUR/W+njVyyfyKflnXQ\n2zmW3q/zBpZceIz+l5bSfsQqeHNyRBNz75t//VENxru3wVIxpD7brUL3eOtXKxg+b2pGs21dYYSF\noVFxpmatqJBJ5/8ovqqYJTuXhMxEDg5LP17KnuN7GD9gPIWdC3nuvefYcGgDtmPj4GCJ5Ws6wXPl\ntcrjR0t+5DeHyrFyfNOS9+YvCBe3uJg1967xzzl91fSQua19y/ahNraZiEkMhcJRDo7Sb9Se8IgK\nrSpEckZK5V2cg06126QVEkEfiThaCA16NqPISjgJruhwBbmx3JQ/6asOZC7g6H4fOTkkHNKx9YLn\n2XrB8/Rr3Q/Gt9H90S/4TOee+Dksbl7Ku/cQzO/QY0ghCC17buTnP/w+9y9eT7LwOVdYuONJtILB\ns0NjscTi5rYX8+KDd6X6yO+/GvZfzYINFbz20bdIdl0Fd/03vP1T2H6b9g+JK7AcnV1PzyWw+ya8\nRMUUQZNV1G/iBQAkYMDcGpltzxYjLAzNgqiQyeT/mHbNNBZ9tChkJlr5yUpWfrKS5zY8hyW6qm3M\nilF8VTGFnQur5HwEz+X5TABfm1m7P1XWRKF4YeMLoaZRUU1oyc4lKKWIW3Guyb+GVZ+sQqEQJNTf\nY2rRVJ5c8yQVyQptglL4y2eUzqjWdHVJq0soL1iL4052iUyypSZ5JZl8JBnY+tlWYhJLmdt6/AXJ\nSaDctupaE/D8EO7bdWQyr44tn27RcZf5rj+p4we03HcLp7stSY2v03tVs/EDeJWQQRe2fEU9j7Pz\nFtg2Vo9Nib7mwPU6yuHlY5Nhwovwp9/B8V74k7ndgsSua6DrSr3Pd8elukEeL4D196K1HwW7bw5c\nuw0dtkL55a7pLBpwEPneZxGSvwaRGHmtwv1S6pr6DJ01GBqM6kJ0Z42aRY6VQ7QeVcJJ+ALGqysV\nFRRBvOz18QPGM/vW2fq8c4exYPuCUCRU0klSUlZC6d5Spq+aDsCy8ct45IZHuHvg3SSdJA4OSin6\ndehHy3hLt2Og+AmEjuOw8eBGZo6cyYheI1IlU5SibW5bJhZODIXhXl9wvT9Bt4i1YNwV4zKayXOs\nHHJjuXjtcG/udTM5Vk7mbXtswLruUWIF60LhyxYWPdr2qHJfAT9iLSYxLui5kcunToLhP4fRPwTx\nypiA/zZ9wWcZz399wfXpL8QjfzWni/4tLMgGPQt3fRNu/Ln2m0Q0hE4XdmLepnncMPcGFm5fqDW1\nof8B8dM6TDiWqCJgvOsSBI73CCx1NYfo9l6I9oB5OgrNu1aF1tIkoX1EPVa5OwSd3kkdstx2d/iY\nX3RB7b0a27GZsnRK2gCMusJoFoZmSXX+j2AUVbB/eI6VE9IsvIq4QT+Id5x0Zq6SshISdvjt3pus\n81rlVdn+oeseonRvqd8syusJ4oUEe057QXBweGv3W6zas8p3yEcDAYLHufMbd7Jm/xpdVBGhdcvW\nxKyYXzIlOL7RvUcDOuRUoVhetjytWcsSy6+vBXDoy0Ms3LYwdSwR9hzb44856E/JsXJ4atRTbDio\nGzt8UfkFWy8I1BVd9BtSkUp2Rs3CUQ79LunHnuN7KDtelv7hZyKD1iQIz214rmp1gGxNtly6H5tA\nWdSMds2j1QYdhPxDsUptLvuqA+nLwUTW/XaFNgNiwYFBMHcZasJwKgvW1aspyggLQ6MkWur8TKjO\n/+GtGz9gfBUzUklZCXuO7/HzKWzb5ul3n/YjorwIrKiZa1iPYeTEckI+jNG9R9Ppok5sOLghY5HF\nqFDzwmm9Cru92vVi17FdvqPdc8hH748XBjyu3zjKT5X7k1/CTvi5HNFJ3BKLJTuXhJ36rnkrVBYF\nPVHPKJ2BoxxiVixU9deryeWNuW+HvpxKnGLP8T0AfsLk3E1zU2Y0Fxn0nD5KNWai4HiDAv5MiN4D\nW9mZ/UVZzHIWFnvazYP4P0ASxLJod+NzHL32n6sfRDb/UHVC6vs36IiyXSP8sGIpu4EWPTfVKILw\nTDHCwtDoiL61p6sVVVekEyieg3zuprlVwmm9ST5dmK9XLNETPoWdC5mydIrvm4hbcXBIW2QxOIbo\nsX869Kf+cYLnCmo5XiRXwk6wvGw5U4um+seAVLkThdKOcqWwLItb+9zKq9tfDYUd58ZzmTlyJkt2\nLOGV7a/4E7tXxddRji6dHiAqWLZ+tjW03nZs5m+Z75d6Ce2Lyjh5xiTmCyFBuKz9ZVWOXVN6tOnB\nyMtGsuijRalw3rNEoVDd3oYJw5GyG7h9ZFsWffW/IEu8AZA15LnadV7bYFf7uHzQYZ4z0VCG843g\nW3tFsoJJiyfhKKfarO66xnvj9ybhpJOs4vuoSZdA7zpw4N4r76WgTUFWoZfu2P0v7V9t1nswC91x\ntAbw1KinKD9VztoDa0MRYH3y+vDN7t/0NaklO5bg2A5xK87Ewon+8ilLp1RpSBWTmB8cEHwbD2kg\naWbKuBVnXL9xrNqzytcsquSXRCbIfh36MXnI5JCgbBFrUeXYNSHHyuGlcS8BsHrf6jMSFp6Q9SLk\nQgIyfzVWwTrIvw17W3otJSYxhhYMpV+HfrRu2ZrH33lcR7W5ZsZaETGR7bzwXeCeWl9TbTDCwtDo\nCL5Zi4j/NnsuwgODBE1V6SbqbGG+mXqU1+bc2c7lCdZovoWtbF8bW7t/rW8mAthevp2yY2W+UPC1\nChF/jNNXTde5JAEc5XBP4T0UtCkgr1UeP17yYyrs8DbpEIRbLruF8lPlvpZ4rOIYJbtLQqXpg8Qk\nxrNjng0JymMVx0L5K9no3qY7e4/v1VqJCJuPbA6FOgNc0eEKtpdvr6IZeWPwBGLQpFjYuZAX33+R\nlXvCuTH9L+3P4h2LM+a+2Mrmnb3vcGf/Oym+qpixfcf6v6vNRzYzc/XMKlpTuoRNn4BwTTpiQmcN\n5x/RXIaoCaYhxnMm/wnPNMkQqvfZzHl3ju+biApWb9LLjeWS1yqPYXOH+ZNjMCfDq8i76/Ndvm/D\ndmx/wslrlVelpFFMYiGB1//S/kxZOoV1B9aFzFgt4y35uyv+jt9/8Hud0R6Ls2TnEl796FXfrPjI\nykd8YRR9S/cKKUavu2R3Sei75QZziugormBeTG4sl1suu4Vn3ntGm81cM1g0AGHn0Z062Eh0VJI3\nhrF9xzJt6DTmbZrHoZOH/PHHrBijTo5i1Z5VRHn/yPshwefd7+Bkn3SSTFo8if6X9g/9roryiyg/\nVc4/Lw/7Oq4ruI51B9ZRkazw/T6ez0gQ33cTt+L1/n/DCAtDoySay3C2zu6G4kwETXUFFee8O4cf\nLNKt2d7Y9QZP3/p0SCAB/udodJZCkWPl+JON5+OIJh6W7i1lytIpflkTQYhZusFU9Fo2HNrgT4be\n2zfAxS0uZvbo2Ww4uIH3Dr7naxCVdiXzt8wPObljxBjTdwxLdi7xzX2e1hO8F9Gqwj8Z+hPa5rYN\nXXdeqzxfo4JwhNi4fuMo+aTEF56e5uDgYCmLuBX3zZ3Thuqci4I2BRz68pCvvdm27ZeBiZJOQ4pb\ncV+IedgqJZSjzcOivehPJ0+HfHbB6/R8Sp7mVN8YYWFo9Jzpm31TpbqCitFKtPO3zKf4quIqJiuP\nYHRWbiyXX93yK9+PsXDbQj96aUTPETw87GHfBOVNjhYWI3ql1gWZt2leKCqpqFsRi3cuDkWDWWKF\njuVN2svLloc6Hw7uOphpQ6dVeSkI3osYMcb2HcupxCnG9RtH8VXFofGk+42k8/1kCkAYddkov2gk\nUMUXVFviVtwPFw5WKs6xcnyhHA3kiIY3rz+4ns1LN1fx1UXHFtQK6wsjLAyGRkZ1BRWj9a3G9RuX\n8TjR6Kxg5dyH//JwSiOI5YSEQfT86QRFOk4nT4c0mSrhraLDe/tf2p9be9/Kqx+9qlvgikVeq7y0\nLwV5rfL8BETvjT/TWNKZ7rL5foJViz0zmeejCvZ99/CSEIMmv7gVZ0jXIfx1z19T2pLb6rf4qmI/\nEVPfAuGugXeFhLL3UjB/y/wqZqyor650bykPlzxMhZ0am5fLY8xQBsNZUBf5GueabAmFgO+ziL5d\npztWMMR2+qrp7Dm+x89QD05eNTl/kPEDxvP8xudJ2AlyYjlMvHIiGw9vDGkWjnJCUVMbDm7w3+ZF\nRJtdlMOUpVN8O76HZw7zWvDOHDmzWkGRrRdKJmHiVS2O5swE+7571+NFmEVNQlOWTgG0kLit721M\nu2ZaRuHraS7R5V60mKfpCBKKwvOu0TPhWWKFeq3U9+/bCAtDs6WmzZQaI9WZ3oqvKs4qJKIE70U0\n5yNYyr0m5w9uUzKhJKOZZ/yA8Ww+stlvUJUbywXw36ZFpRzA6SLdgi14cbSAjAqU6LaZeqFU91vI\nlDMT7Pvu+WzSmb48DcHBIUaMwV0GhwR0SVlJ2lyhbCHS3nV564LniZoOzwVGWBiaLbVtptQYqC9N\nKHgvapPzkY1sZp50E6DndE739hzEm8S9N2mv3Ek6oZ+tF0p1v4VMmpRXFibb88h07kwCKvqMs4VI\nl5SVpD3PuRQUYISFoRlzJs2UGpL61ITOJufjbIlOgJmit6Lj8Sbxh0se9jsZRif64MRbnemsugnd\n28frAV/d2DNdX7pzpxNQQI2rE6T7PaQ7z7kytRphYWi2nE2eQ0NQn5pQY7oX6d6mq9vW65Vekzf3\ndBO+d5ya9Dw50/uSTqikE1C1qU4Q3PZ08jTzNs1j9q2za2xeq2uMsDA0a5pS2G19a0J1dS/OddBA\nbd7cswmeTJNxfZgpM427ptUJhvUYpgs22rpgY7Qvyrm4hiBGWBgMjYTG9PafiYYKGqjpm3ttOBdm\nynRaVE2rExTlF3H3wLt5+t2nUSi/L0pUoJwrU6tEm883VQYNGqTWr1/f0MMwGJotXoy/5z+ISYxH\nbngko+nnTI5fW0F5tlpOQ4dWZzv/mYYE1wYReVcpNSjrdkZYGAyGbKSL8c+N5daZZtGUw5zrm/oW\naDUVFladn9lgMDQ7gjkPXox/XU7omSKHmgteQmR9tj2tb4zPwmAwZKW+Y/ybWphzbTgbrakxaVxG\nWBgMhqzUtfM9XWJaY3funylnE7HUmBJLjbAwGAw1oi5Db9O9LTelMOfacDZaU2PSuIywMBgM55TG\n9LZcV1TnhD4brakxaVxGWBgMhnNKY3pbrgtq4lc4G62psWhcRlgYDIZzSmN6W64LmqOmlA4jLAwG\nwzmnsbwt1wXNTVPKhBEWBoPBcBY0N00pE0ZYGAwGw1nSnDSlTJgMboPBYDBkxQgLg8FgMGSlXoWF\niIwUke0islNEfpZm/X0isllENorIX0WkX2DdQ+5+20XkW/U5ToPBYDiXNMVaUfXmsxCRGDALuAnY\nB6wTkVeUUlsCm72klPqNu/0Y4JfASFdofBf4GtAFeEtE+iil7Poar8FgMJwLGlO9p9pQn5rFYGCn\nUmqXUqoSeBm4PbiBUupE4OuFgFcv/XbgZaVUhVJqN7DTPZ7BYDA0aZpqhd36jIbqCuwNfN8HXB3d\nSETuB6YCLYAbA/uujuzbtX6GaTAYDOeOppqX0eChs0qpWcAsEfke8C/AhJruKyLFQDFAQUFB/QzQ\nYDAY6pCmmpdRn8JiP5Af+N7NXZaJl4HZtdlXKTUHmAO6U97ZDNZgMBjOFU0xL6M+fRbrgN4i0lNE\nWqAd1q8ENxCR3oGvo4Ed7udXgO+KSK6I9AR6A2vrcawGg8FgqIZ60yyUUkkRmQS8DsSA55VSH4rI\nL4D1SqlXgEkiMgJIAJ/jmqDc7f4AbAGSwP0mEspgMBgaDlGqeVhvBg0apNavX9/QwzAYDIYmhYi8\nq5QalG07k8FtMBgMhqwYYWEwGAyGrBhhYTAYDIasNBufhYh8CnzS0ONoIDoAnzX0IBqQ8/36wdwD\nc/1nfv3dlVKXZNuo2QiL8xkRWV8TB1Vz5Xy/fjD3wFx//V+/MUMZDAaDIStGWBgMBoMhK0ZYNA/m\nNPQAGpjz/frB3ANz/fWM8VkYDAaDIStGszAYDAZDVoywMBgMBkNWjLBoAohIvoisEJEtIvKhiEx2\nl7cXkTdFZIf7bzt3uYjIr9we5u+LyJUNewV1g4jERGSDiCxyv/cUkTXudf6XW90Yt1rxf7nL14hI\nj4Ycd10gIm1F5E8isk1EtopI0fn0/EXkAfe3/4GI/F5EWjbn5y8iz4vIERH5ILCs1s9bRCa42+8Q\nkRr3CkqHERZNgyTwoFKqHzAEuN/tU/4zYJlSqjewzP0OcAu6rHtvdHOo2VUP2SSZDGwNfP9/wAyl\n1GXoqsUT3eUTgc/d5TPc7Zo6TwBLlVKXAwPQ9+G8eP4i0hX4MTBIKfV1dBXr79K8n/9vgZGRZbV6\n3iLSHvg3dIfSwcC/eQLmjFBKmb8m9gcsBG4CtgOd3WWdge3u56eBOwLb+9s11T90A6xl6Na7iwBB\nZ6zG3fVFwOvu59eBIvdz3N1OGvoazuLa2wC7o9dwvjx/Ui2a27vPcxHwreb+/IEewAdn+ryBO4Cn\nA8tD29X2z2gWTQxXpS4E1gAdlVIH3VWHgI7u53T9z5t6D/OZwDTAcb/nAceUUkn3e/Aa/et31x93\nt2+q9AQ+BV5wzXDPisiFnCfPXym1H3gM2AMcRD/Pdzl/nr9HbZ93nf4OjLBoQojIRcB8YIpS6kRw\nndKvDs0yDlpEbgWOKKXebeixNBBx4EpgtlKqEPiSlAkCaPbPvx1wO1podgEupKqJ5ryiIZ63ERZN\nBBHJQQuKF5VSf3YXHxaRzu76zsARd3lt+583doYCY0SkDN2r/Ua0Db+tiHjdHoPX6F+/u74NUH4u\nB1zH7AP2KaXWuN//hBYe58vzHwHsVkp9qpRKAH9G/ybOl+fvUdvnXae/AyMsmgAiIsBzwFal1C8D\nq17BbUXr/rswsHy8GyUxBDgeUF+bHEqph5RS3ZRSPdCOzeVKqTuBFcC33c2i1+/dl2+72zfZt26l\n1CFgr4j0dRcNR7ccPi+eP9r8NEREWrn/F7zrPy+ef4DaPu/XgZtFpJ2rnd3sLjszGtqJY/5q5Oi6\nFq1yvg9sdP9Goe2wy4AdwFtAe3d7AWYBHwOb0VEkDX4ddXQvhgGL3M+9gLXATuCPQK67vKX7fae7\nvldDj7sOrnsgsN79DSwA2p1Pzx/438A24APgd0Buc37+wO/R/pkEWrOceCbPG7jbvQ87gbvOZkym\n3IfBYDAYsmLMUAaDwWDIihEWBoPBYMiKERYGg8FgyIoRFgaDwWDIihEWBoPBYMiKERYGQxZExBaR\njYG/n2Xfq8bH7hGsLGowNFbi2TcxGM57vlJKDWzoQRgMDYnRLAyGM0REykTkURHZLCJrReQyd3kP\nEVnu9hZYJiIF7vKOIvLfIrLJ/bvGPVRMRJ5x+zW8ISIXuNv/WHQPk/dF5OUGukyDATDCwmCoCRdE\nzFB/H1h3XCnVH3gKXRkX4ElgrlLqG8CLwK/c5b8C/qKUGoCu7fShu7w3MEsp9TXgGDDOXf4zoNA9\nzn31dXEGQ00wGdwGQxZE5KRS6qI0y8uAG5VSu9xCj4eUUnki8hm670DCXX5QKdVBRD4FuimlKgLH\n6AG8qXRDG0Tkn4AcpdS/i8hS4CS6vMcCpdTJer5UgyEjRrMwGM4OleFzbagIfLZJ+RJHo2v+XAms\nC1RYNRjOOUZYGAxnx98H/i11P7+Dro4LcCewyv28DPgh+P3E22Q6qIhYQL5SagXwT+gy21W0G4Ph\nXGHeVAyG7FwgIhsD35cqpbzw2XYi8j5aO7jDXfYjdFe7n6I73N3lLp8MzBGRiWgN4ofoyqLpiAH/\n6QoUAX6llDpWZ1dkMNQS47MwGM4Q12cxSCn1WUOPxWCob4wZymAwGAxZMZqFwWAwGLJiNAuDwWAw\nZMUIC4PBYDBkxQgLg8FgMGTFCAuDwWAwZMUIC4PBYDBk5f8DAkVpn8pWhMcAAAAASUVORK5CYII=\n",
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZIAAAEWCAYAAABMoxE0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsnXt8FNXZ+L/PTi6giLSxikIgeKug\nkWuR/ARdDFrwBhbborbgDSqCrbWtL7ZaqfKKt1q8UBpUKKkKtvKCVkVUygpKkGsABS2okSBKNRXv\nJNnZ8/vjzOzObnazm2Q3N87389lPdmfOnDmz2TnPPJfzPKKUwmAwGAyGxuJr6QEYDAaDoW1jBInB\nYDAYmoQRJAaDwWBoEkaQGAwGg6FJGEFiMBgMhiZhBInBYDAYmoQRJG0YEbFE5EsR6ZHOti2JiBwv\nImmPSReRESJS4fn8togMS6VtI871iIj8trHHtzdEZI+I+NPc52MiMj2dfRoaT1ZLD+BgQkS+9Hw8\nBKgGbOfzz5RSjzekP6WUDXRKd9uDAaXUd9PRj4hcDfxEKeX39H11Ovo2pAcReQzYpZSa3tJjaa8Y\nQdKMKKXCE7nzxHu1UurlRO1FJEspFWyOsRkMhqYT755t6H3cFu97Y9pqRYjIDBF5UkQWisgXwE9E\npEhE1orIfhH5UEQeEJFsp32WiCgRKXA+P+bsXyYiX4hImYj0amhbZ/8oEfm3iHwmIg+KyGsicnmC\ncacyxp+JyC4R+VREHvAca4nIn0SkSkTeBUbW8/38TkQWxWybLSL3Oe+vFpEdzvW842gLifoKm1tE\n5BAR+ZsztjeBgTFtbxaRd51+3xSRC53thcBDwDDHbPiJ57ud7jn+Gufaq0RkqYgcncp3E2fMM0Rk\nkfP7+FJEtojIcc74PhaR3SIywtO+i4jMd/4ne0TkNhHxOftOEJGVIvJfEfnEuf7DY76fG0Rkm/Mb\nWCgiuQnGVW9fDqc5/5tPReRRty8ROVJEnnd+O/8VkVWefk8WkVecfdtE5LwE579aRAKez+Hfuohc\nC/wY+K3znS1x2nQXkSXO9/aeiEyp53vvICL3iUiliOwTkT+LSAdn3wgRqRCR34rIR8DD8bY5bZP9\nDq4VkV3AW4nG0mpRSplXC7yACmBEzLYZQA1wAVrIdwS+B5yG1h6PBf4NTHXaZwEKKHA+PwZ8AgwC\nsoEngcca0fZI4AtgtLPvBqAWuDzBtaQyxqeBw4EC4L/utQNTgTeB7kAesEr/LOOe51jgS+BQT9//\nAQY5ny9w2ghwFvANcKqzbwRQ4elrD+B33t8LBIBvAT2B7TFtfwQc7fxPLnXGcJSz72ogEDPOx4Dp\nzvtznDH2AzoAfwb+lcp3E+f6ZzjXNMI59gngPWCa83kysNPT/p/O+Q4BjgI2Alc5+04EioEc5//9\nGnBvzPezFujq/F/+jdag440rlb62Ov/jI5x+3e/nHrQwznaOP8PZnuNc243OvhHO9358nO846n9A\n/N/6dM9+H1AO/NY5z/Ho+7E4wfU9CCxxfh+dgeeB2z2/qyBwh9NXxwTbUvkdvOCco2NLz08Nns9a\negAH64vEguRfSY77NfAP5328G+YvnrYXAm80ou2VwGrPPgE+JIEgSXGMQzz7/w/4tfN+FZ4JCjiX\nBILE2b8WuNR5Pwp4u562zwJTnPf1CZLd3v8FcK23bZx+3wDOc94nEyQLgDs8+zqj/WLdk303cc47\nA1jm+XwR8Bngcz5/y+mvE9ANLXRyPe1/CryUoO+LgfUx3884z+f7gIdS/P/H68v7P77Q/b+hJ9v/\nA46L6WM48AEgnm3/AG6O8x03VJCcDrwbc75bgIfjXIsPOAD09GwbhiOwnd/VASDHsz/etlR+B2ek\n8v22xpfxkbQ+Kr0fROQk4I9oc8sh6B/d6/Uc/5Hn/dfU72BP1PYY7ziUUkpE9iTqJMUxpnQu4P16\nxgv6KfwS5++lzl93HOejJ4QT0BPAIcD6JP2B1jYSjkG0Se+XaG0FZ+xHpNAv6Otb435QSn0uIp+i\nJ3r3O2nI/2yf5/03wMdKqZDnszu+nkAusE9E3PY+9AMMItIVeAA9qR7m7Ps45lyx4/p2vAGl2Ffs\n93uM8/5O4A/AChGx0Q839zj7dytn1vUc1y3eGBpIT6CHiOz3bLPQWmksXdHf4xbP9ygxbfYppWqS\nbEvldxB177cljI+k9REb+lqCfgI+XinVGfg9dX/I6eZD9JMSAKLvoPpu4KaM8UMg3/M5WXjy34ER\nItINbXp7whljR+ApYCba7NQFeDHFcXyUaAwiciwwB202ynP6fcvTb7JQ5b1EBBAichhac/gghXE1\nhUqcyV8p1cV5dVZKnersvwsdNVjo/M8up/G/q1T6iv1+94KeUJVSv1RKFQBjgP8RkTOd/fnimb2d\n4+J9b1+hHxpcusbsj/0fVaI1ii6e12FKqQvi9L0PbW7+rqft4Uoprw8o3m8gdlsqv4M2m4rdCJLW\nz2Fo88VXItIb+FkznPNZYICIXCAiWcAvgO9kaIx/B64XkW4ikgf8T32NlVIfAa8Cf0WbR3Y6u3LR\n9uiPAdvRToobMIbfOs7pHmi/jUsn9A3+MVqmTgRO8uzfB3QXJ7ggDguBq0TkVMfBPBNtNkyo4aUD\npVQl8Apwr4h0FhGf6DU6ZzhNDkNPwJ+JSD7aHNlYUulrqud/fBPaJ4fzGzvOERifoc09IfTTexD4\nlYhki8hZaLPnk3H63gKcKiKFzgPFrTH796F9Zy5lQI2I/MpxpFvOsQNjjkPpsPlHgFki8h3RdBeR\nc1L8blxa5HfQXBhB0vr5FTAB7fwuIf6NlFaUUvvQkS73AVXAccBm9FNnusc4B1gBbEOboZ5K4Zgn\n0HbosFlLKbUfbX5agnZYX4wWiKlwK1ozqgCWAaWefreina3rnDbfJdps9xKwE21C8pqC3ONfAG5z\nxvUh+qn6shTH1VR+AhyKDh74FO1jcJ/WbwUGoyfvZ4DFTThPKn0tBF4G3gHeRvtGQH+f/0I70l8D\n7ldKrVZKVaODJ0ajg0IeQPvGdsZ2rJTa7vQXcPpeFdPkEaCvEzH2lNKhtec6Y65w+i9B+y3i8Su0\nWW2dc40vos2nKdPCv4OMI9EmSIOhLiJioVXzi5VSq1t6PAaDoXVhNBJDXERkpGPqyUU7sGvRT2QG\ng8EQhREkhkQMBd5F+wa+D1zkmBsMBoMhCmPaMhgMBkOTMBqJwWAwGJrEQbEg8YgjjlAFBQUtPQyD\nwWBoMxxxxBEsX758uVIqYf47l4NCkBQUFLBhw4aWHobBYDC0KUQkpQwOxrRlMBgMhiZhBInBYDAY\nmoQRJAaDwWBoEgeFj8RgMGSe2tpa9uzZw4EDB1p6KIYG0qFDB7p37052dqKUcfVjBInBYEgLe/bs\n4bDDDqOgoIDopL2G1oxSiqqqKvbs2UOvXr2SHxAHY9oyGAxp4cCBA+Tl5Rkh0sYQEfLy8pqkSWZU\nkDj5mt526hRPi7P/GqcWc7mIvCoifWL293DqLP861T7TSVkZzJyp/xoMhuQYIdI2aer/LWOmLSdj\n7GzgbHSpzfUi8oyT8tnlCaXUX5z2F6LTlnsXv9yHTuvdkD7TQlkZFBdDTQ3k5MCKFVBUlO6zGAwG\nQ9snkxrJYGCXUupdp+TkInRtgTBKqc89Hw/FUyFMRMYA7wFvNqTPdBEIaCFi2/pvIJCJsxgMhnRR\nVVVFv3796NevH127dqVbt27hzzU1sZVw43PFFVfw9ttv19tm9uzZPP744+kYMkOHDq3jlzj//PPp\n0qVL1LZ7772XQw45hC+++CK87eWXX+bwww8PX2O/fv1YuXJlWsbVUDLpbO9GdA3iPcBpsY1EZApw\nA7q63VnOtk7oSnlnE11tLaU+nT4mAZMAevRIVr21Ln6/1kSqq0EE8vIa3IXBYGhG8vLyKC8vB2D6\n9Ol06tSJX/86ulijUgqlFD5f/Gfo+fPnJz3PlClTmj5YD4cddhhr165lyJAh/Pe//2Xfvn112ixc\nuJCBAweydOlSfvrTn4a3Dx8+nKVLl6Z1PI2hxZ3tSqnZSqnj0ILjZmfzdOBPSqkvm9DvXKXUIKXU\noO98p74qsfEpKoJZs8CyIBSC6683vhKDId2UVZYxc/VMyiozd3Pt2rWLPn36cNlll3HyySfz4Ycf\nMmnSJAYNGsTJJ5/MbbfdFm47dOhQysvLCQaDdOnShWnTptG3b1+Kior4z3/+A8DNN9/MrFmzwu2n\nTZvG4MGD+e53v8uaNWsA+Oqrrxg7dix9+vTh4osvZtCgQWEhF8u4ceNYtGgRAE899RQXX3xx1P5/\n//vfBINBpk+fzsKFC9P+/aSDTAqSD4B8z+fuRBe6j2URMMZ5fxpwt4hUANej62lPbUSfTaKqSguR\nUMiYtwyGdFNWWUZxaTG3rLyF4tLijAqTt956i1/+8pds376dbt26ceedd7Jhwwa2bNnCSy+9xPbt\ndd2sn332GWeeeSZbtmyhqKiIefPmxe1bKcW6deu45557wkLpwQcfpGvXrmzfvp1bbrmFzZs3Jxzb\n2Wefzb/+9S9CoRBPPvkkP/7xj6P2L1y4kHHjxuH3+3njjTf45JNPwvtWrlwZZdqqqKhoxLfTdDIp\nSNYDJ4hILxHJAcah6zmHERFv3ePz0LWvUUoNU0oVKKUKgFnAHUqph1LpM5245i3L0n/9/kydyWA4\n+AhUBKixa7CVTY1dQ6AikLFzHXfccQwaNCj8eeHChQwYMIABAwawY8eOuIKkY8eOjBo1CoCBAwcm\nnKR/8IMf1Gnz6quvMm7cOAD69u3LySefnHBs2dnZDBkyhEWLFmHbNt27d4/av2jRIsaNG4dlWYwZ\nM4annnoqvG/48OGUl5eHXy2V5TxjPhKlVNDRIpYDFjBPKfWmiNwGbFBKPQNMFZER6DKunwITGtNn\npq6hqEhHawUCWoiYqC2DIX34C/zkWDnU2DXkWDn4C/wZO9ehhx4afr9z507uv/9+1q1bR5cuXfjJ\nT34Sdw1FTk5O+L1lWQSDwbh95+bmJm2TjHHjxvHDH/6QGTNmRG3fvHkz7777LsOHDwegurqaE088\nkWuuuaZR58kUGV3ZrpR6Hng+ZtvvPe9/kUIf05P1mUmKiowAMRgyQVF+ESvGryBQEcBf4Kcov3lu\ntM8//5zDDjuMzp078+GHH7J8+XJGjkxacqNBnH766fz9739n2LBhbNu2La7G48Xv9zNt2rS4Zq0Z\nM2bwm9/8BtBmtJ49e7Jnz560jrepmBQpBoOhxSjKL2o2AeIyYMAA+vTpw0knnUTPnj05/fTT036O\n6667jvHjx9OnT5/w6/DDD0/Y3ufzhYWFq9UopXjyySdZsWJFuJ2IMGbMGJ588kn69u0b9pG43Hrr\nrVx00UVpv55kHBQ12wcNGqRMYSuDIbPs2LGD3r17t/QwWgXBYJBgMEiHDh3YuXMn55xzDjt37iQr\nq/U+u8f7/4nIRqXUoASHhGm9V2UwGAxtlC+//JLi4mKCwSBKKUpKSlq1EGkq7ffKDAaDoYXo0qUL\nGzdubOlhNBstviDRYDAYDG0bI0gMBoPB0CSMIEkzJvW8wWA42DA+khQpK0u+MNGknjcYDAcjRiNJ\nAVdA3HKL/ptI2zCp5w2GlmP48OEsX748atusWbOYPHlyvcd16tQJgL1799ZJmOji9/tJtoRg1qxZ\nfP311+HP5557Lvv3709l6PUyffp0RIRdu3ZFnUtEosZUXl6OiPDCCy9EHW9ZVlQ+rjvvvLPJY4rF\nCJIUSCQgYs1YJjeXwdByXHLJJeEsui6LFi3ikksuSen4Y445JiqPVUOJFSTPP/98nboijaWwsDDq\n2v7xj3/Uyd+1cOFChg4dWidDcMeOHaPycU2blv7CskaQpEA8AeFqKTffDGecAXPnRnJz3X67TkEf\nCBhficFQH+n0KV588cU899xz4SJWFRUV7N27l2HDhoXXdQwYMIDCwkKefvrpOsdXVFRwyimnAPDN\nN98wbtw4evfuzUUXXcQ333wTbjd58uRwCvpbb70VgAceeIC9e/cyfPjwcF6sgoKCcKbe++67j1NO\nOYVTTjklnIK+oqKC3r17M3HiRE4++WTOOeecqPN4GTNmTHjM77zzDocffjhHHHFEeL9Sin/84x/8\n9a9/5aWXXmpS/fXGYHwkKeAKiNLSyLZAQBe9ctPMT50KhYURn4jxlRgM9ZNun+K3v/1tBg8ezLJl\nyxg9ejSLFi3iRz/6ESJChw4dWLJkCZ07d+aTTz5hyJAhXHjhhQlrlc+ZM4dDDjmEHTt2sHXrVgYM\nGBDe97//+798+9vfxrZtiouL2bp1Kz//+c+57777WLlyZdQED7Bx40bmz5/P66+/jlKK0047jTPP\nPJNvfetb7Ny5k4ULF/Lwww/zox/9iMWLF/OTn/ykzng6d+5Mfn4+b7zxBk8//TQ//vGPo4pwrVmz\nhl69enHcccfh9/t57rnnGDt2LKCFojeNyk033VQnp1dTMRpJA1iwAB5+WP/48/LAW2TNtiMmL+Mr\nMRiSk4n7xGve8pq1lFL89re/5dRTT2XEiBF88MEHcSsRuqxatSo8oZ966qmceuqp4X1///vfGTBg\nAP379+fNN99MmpDx1Vdf5aKLLuLQQw+lU6dO/OAHP2D16tUA9OrVKzzJ15eqHiIFsJYuXVonn5Zb\ns8Rt5zVvxZq20i1EwAiSlIn90VdVwezZkJ2tBUpubsQnYnwlBkNyMnGfjB49mhUrVrBp0ya+/vpr\nBg4cCMDjjz/Oxx9/zMaNGykvL+eoo45qlPnnvffe495772XFihVs3bqV8847r0lmJDcFPSRPQ3/+\n+efzt7/9jR49etC5c+fwdtu2Wbx4MbfddhsFBQVcd911vPDCC1H13TONESQpEu9HP2kSvPIKzJgR\nrZZ7fSXGrGUwxCcT90mnTp0YPnw4V155ZZST/bPPPuPII48kOzublStX8v7779fbzxlnnMETTzwB\nwBtvvMHWrVsBnYL+0EMP5fDDD2ffvn0sW7YsfMxhhx0Wd/IeNmwYS5cu5euvv+arr75iyZIlDBs2\nrMHXdsghh3DXXXfxu9/9Lmr7ihUrOPXUU6msrKSiooL333+fsWPHsmTJkgafo7EYH0mKJCpylahe\nialjYjAkJxP3ySWXXMJFF10UFeV02WWXccEFF1BYWMigQYM46aST6u1j8uTJXHHFFfTu3ZvevXuH\nNZu+ffvSv39/TjrpJPLz86NS0E+aNImRI0dyzDHHsHLlyvD2AQMGcPnllzN48GAArr76avr379+o\nsriu+crLwoUL65i6xo4dy5w5cxg/fnwdH8nIkSPTHgJs0sgbDIa0YNLIt22akkbemLbqwaQ7MRgM\nhuRk1LQlIiOB+9H11R9RSt0Zs/8aYApgA18Ck5RS20VkMDDXbQZMV0otcY6pAL5wjgmmIi0bg0l3\nYjAYDKmRMY1ERCxgNjAK6ANcIiJ9Ypo9oZQqVEr1A+4G7nO2vwEMcraPBEpExCv0hiul+mVKiEDy\n0ESjrRgMdTkYTOXtkab+3zKpkQwGdiml3gUQkUXAaCAcdK2U+tzT/lBAOdu/9mzv4G5vTtwoLVcj\n8YYmGm3FYKhLhw4dqKqqIi8vL+FCP0PrQylFVVUVHTp0aHQfmRQk3YBKz+c9wGmxjURkCnADkAOc\n5dl+GjAP6An8VCnlBlgr4EURUUCJUmoucRCRScAkgB49ejR48LFRWqA1EL8/vrZiBInhYKd79+7s\n2bOHjz/+uKWHYmggHTp0oHv37o0+vsXDf5VSs4HZInIpcDMwwdn+OnCyiPQGFojIMqXUAWCoUuoD\nETkSeElE3lJKrYrT71wcP8ugQYMapdG4oYmxGsisWYm1FYPhYCU7O5tevXq19DAMLUAmo7Y+API9\nn7s72xKxCBgTu1EptQPtiD/F+fyB8/c/wBK0CS2jxFvVbhYcGgwGgyaTGsl64AQR6YUWIOOAS70N\nROQEpdRO5+N5wE5ney+gUikVFJGewElAhYgcCviUUl84788BbsvgNQA6r5Yel17Z7i5INALEYDAY\nMihIHCEwFViODv+dp5R6U0RuAzYopZ4BporICKAW+BTHrAUMBaaJSC0QAq5VSn0iIscCSxxHXhY6\n6iu6ikuaKSuD667T2oi+rkyezWAwGNoeGfWRKKWeB56P2fZ7z/tfJDjub8Df4mx/F+ib5mHWSyAA\ntbWRz8Ggca4bDAaDF7OyPQl+v87w6xLPuW7WlBgMhoOZFo/aau0UFcGDD8Kjj8Ixx8CNN0ZrI2ZN\nicFgONgxgiQJZWVw/fVaUJSXQ9euersrLMyaEoPBcLBjTFtJiBUUJSVaA3HNWH4/ZGXpiK6srMRr\nSoz5y2AwtFeMRpIEN1XKgQM6Yksp/b60NKJ5uJFciSK6jPnLYDC0Z4xGkgQ3VcrPfhZxuisF8+dr\nAREIaG1FKR3dVVpatw9Tw91gMLRnjCBJgaIimDMHrrpKm7AgEgbs9+tFiqCFycMPw9yY7F+mhrvB\nYGjPGNNWAxg/XmsiNTXg88Hu3Xr7lVfCX/6i39s2TJ2q31dVRYTGhAmRPoxZy2AwtCeMIGkgrp+k\ntlZrHgsW6CSOWVlaSwH9d+pUCIW0FiKit+XkaEFiMBgM7Qlj2kqBssoyZq6eSenS98OpUkALCjeJ\n4w03RLYrpQWHbWuBY/wjBoOhPWM0kiSUVZZRXFpMjV2DtX85WdkrUMoiFNLmLdfnEQjoz6FQJBTY\ntvV7d7vxjxgMhvaIESRJCFQEqLFrsJUN3V5l4n2P02P/ePLyIj4Q1+eRmxsJ8b3uOvjTnyIRXQMH\name98Y8YDIb2hhEkSfAX+MmxcrRG4rMgvwz/+SdQlB8tEWIrKgYCWgtxX+vXw7ZtUFhohInBYGhf\nGB9JPZRVlhGoCDBr5CwmDpiIIDy86WGKS4spq6y7RL2oCG66Sf91Q37dcGGljI/EYDC0T4wgSYDr\nG7m59Dmm3LSHj97qRTAUxFY2NXYNgYqAbpcg9Yl3IWNubuprSEwqFYPB0NYwpq0EBCoCVFcMIPTX\nFwnZOTwTCGJd/hx0f40cKwd/gT9p6pNYE1ayNSStKZWKu2o/ni/IYDAYvBhBkgB/gR+p+ArsHFBZ\nhIIK3hvKoO8dwJ91E4HHiti9u/7Mv7GCIdkaktaSSdgdd3U14ei03FyTI8xgMMQno6YtERkpIm+L\nyC4RmRZn/zUisk1EykXkVRHp42wf7GwrF5EtInJRqn2mE1+v1WDVAEEQRajDf1j3usXdPzuHm29R\nzJunw3wTma3qy7FVVgaTJ+uXN5Nwa0il4o47FNKf3fUyxr9jMBjikTGNREQsYDZwNrAHWC8izyil\ntnuaPaGU+ovT/kLgPmAk8AYwyKn7fjSwRUT+CagU+kwLgYoAwW6rYeQv4PnZoHzwwv3QbwHU5hBC\nqLGhd28488z4ZitXMLgaSV6e9n/k5enw4Joa3W7+fFi5sm7kV0s9/bvj9mokZg2MwWBIRCZNW4OB\nXU6ddURkETAaCE/6SqnPPe0PRQsKlFJfe7Z3cLen0me68Bf49ZtvjtBCRGWBreDLowDLGZKwYwe8\n+y70719XAHgFQ15epECWCFEr5L1mLPcVi+uzaA4BEztu4yMxGAz1kUlB0g2o9HzeA5wW20hEpgA3\nADnAWZ7tpwHzgJ7ATx3tJKU+neMnAZMAevTo0eDBF+UXMaznMFZt+AQkBATBqnX2KrxWwerqSG6t\nnByde8s7+RYVaU3ENXP5fPrlCpNEdeBdwQHN74RPJNAMBoMhlhZ3tiulZgOzReRS4GZggrP9deBk\nEekNLBCRZQ3sdy4wF2DQoEEJSk7Vz2VdHmLVC8dByAe+EJx2P6z9JSBElCSwLMG2tSCproZrr9Xv\ns7MjmkasmWvWLNi8WR8faxabO1cLJtvWTu4JE1qHE95gMBjikUln+wdAvudzd2dbIhYBY2I3KqV2\nAF8CpzSizyZRtaMQn+oIZIES+GgAhLLQgiQER7zJ4As38+c/R9aKQCQtSk1NpNCVay66/Xb9d9Ik\nLUBilaWyMpgyRSd7DIV0NUZoHU54g8FgiEcmNZL1wAki0gs92Y8DLvU2EJETlFI7nY/nATud7b2A\nSsec1RM4CagA9ifrM534/ZCbI7r+SJZg91lC6P1h2ldi1cLoiXQYmkNh8Z2sKCzi7rvh6acT9+c1\nFyVaM+JWXHRRSvtfxo9veSe8wWAwxCNjgsQRAlOB5Wjv9Dyl1JsichuwQSn1DDBVREYAtcCnOGYt\nYCgwTURqgRBwrVLqE4B4fWbqGrxO591dnqDkP3PhyC1Q4YeCAOSvZdX7cMZfz+CG7v/g2WfHRNVt\nt6zEa0cSrRlxKy66tU18Pu1vMT4Lg8HQWhGlGuU+aFMMGjRIbdiwoUl9lFWWMXT+UEIqFHe/79Xf\nof51Oyqkk2v5fLo876RJCfrzaCSWpassur6SWB+JWQhoMBhaAhHZqJQalKydybWVIkX5Rfz6//06\n4f5Qz3/hy6rF59OLFL1CJF7+LFfbmThRhwM//LAWLGVlOkPweefBoEHaKW+EiMFgaM20eNRWW+Ku\nEXdx3LeOY9baWez4ZEdkR+UQqDgT+5yp9Ok0jF/8uB+TxhQC9efPcn0ibjVF1zk/b15kseKWLSb1\nvMFgaN0YQdJACo8s5Ken/pT91fsp/7Ccd7cdxa4Ff9E5uawatk8oZurzOWwuK2X8mJ715s8qK4Pd\nu7UGA1rQgI7YcjHhvgaDobVjBEmKlFWWUbqllPnl8wmGguRYOVx3zBO89MTREOwA+HQ015bx1JZP\noCTUgQUPatOUd/2IG7ob6yOZOFFHZ23erAWLK0y8qVVMxJbBYGiNGEGSAm5tkgPBAyhnIWJ1xQDu\nuXUUynbUCBT4nLhdOwelfNTU6IirePmzSkv1GhGl9GvTJp1zKxjUgmXMGOjaVQsXN7VKS6eWNxgM\nhngYZ3sKuHXbXSEiCFLhR9nu4kRnpXv/+dD3b2DV4LNUWAPxVk4ErY3Mm0c4VDgUgnXr9Kp429av\nwYO1w76qKnEGYYPBYGgNGEGSAm7ddksscqwcRh96Jydmnw2+IDpVioKsGi1E8suQCWdzzIVz+P5t\nd7Nt37Y6EVuxiw69iESbwLy8hq5kAAAgAElEQVSp5S1L+1QSVU801RUNBkNLYNaRpEhZZRmlz+7k\no9fOZtlTR1MbVISohhOWQaePoG8p5K+NPqhyCCxYgYRyycmBB+63qKqKZAJ2TVsuPl8kdUps7q1H\nH9X+k1Co7roTaF3VFQ0tS3Nmija0b1JdR2J8JKmyp4gFvyryTP6Cz8qh07Fv8fng32qhsXpaeMU7\noFfA2zkoZVF9IMi1U0KgfFFJGx99NOJYd1fCx1ZZjBU6tg1/+Ys2j7kRXalUVzQTTPvHPFAYWgIj\nSFLEnajdyVxEEfId4POj/xnWPNwQYJlwNip/jRYqVo2O5gLsoF717jrh58zRfZWU6H5DoboCIPa8\nXtx1J/GyC8dLS28mmPZPaynXbDi4MD6SFIktg9t75CoYPwLyy8Kahy5+lY2qOEMflL9WV1g8ZqOn\nJ0VWVmSi799fh/uK6FdeXvzz+nz63AUF8ccXm104dvKor+yvof3QWso1Gw4ujEaSIlFVA3tv47o3\nzgFbLz+XXqtQruZh1WpNBLSm8sL9EMxFy2wBCXHFFUJRUcRsFQxqjcO2dQnezZsjJq6iIm0Gc3Nv\n7d2r65wEg3qi8CaFrC+xYzKNxdA+aC3lmg0HF0aQNIBwtcPVz2KHdNiVIIwuPpJnZSTBd0+P6yOJ\nlOa1yc5RjB+vFcFYs5Vbw6SkBBYsiGgWVVXa7BUKaWEycaKuY9KQicJMMAcPJlO0obkxgqQRuOHA\nNXYNls+ia6eunF8MS7vdGd3Q6yPx2dB/PjLgSbbte5DAzELy8rR2UF2thYSLK1ASVVeMdciniplg\nDAZDJjDhv40kNmWK5bOwQza2il4gYu0Ziv3eMChYCflrkcr/h/xtBdi55OZIuL77/v3wxz9G1pfk\n5sLKldHhvUabMBgMzYlJI59hivKL6HF4D4KhILaysUM2F5x4Ab49p+sw4MohZPuyuaD4CMZcvYPc\ngs348KEqziBUm0XIFg5U22x+531uugm6dIn0LQJXXEHYjzJzpt5+0036b7xFh7GLEVNZnGgWMBoM\nhnRgTFtNwGviyrFyGNXhNpY91oeaGpCsIEw4h3+qf2L5LM49/lz2frGXdYdUgYRABVGEeOTtuxhf\n+VP8/qI65qvYkN1Zs+Ln3Uq1nRcTDmwwGNJFRjUSERkpIm+LyC4RmRZn/zUisk1EykXkVRHp42w/\nW0Q2Ovs2ishZnmMCTp/lzuvITF5DfRTlF7Fi/ApuH347K8avoGpHIcFaCxWyUMEsgu8OxVY2NXYN\nS99eyvrXs2DZLAj5AAtCFsHn7qX02Z1xw3djQ3YXL44fwptqOy/eY6qrYfp0o5kYDIbGkTGNREQs\nYDZwNrAHWC8izyiltnuaPaGU+ovT/kLgPmAk8AlwgVJqr4icgq7R3s1z3GVKqfQ6PRpJUX4RRfnO\no7w/4hTPygZ13GvUIuFkj6riDCeKKwsdxWWBnQ0VZ1JWWUYgGMD/E3+4v1gne79+2m+iVPx8XG67\nsWNh9er6Q33dY1xH/8sv62OMZmIwGBpKJk1bg4FdSql3AURkETAaCAsSpdTnnvaHomdXlFKbPdvf\nBDqKSK5SqjqD420y0SG2Ftv2Pcj9i8rZ3unPOiTYjeIKAlggQbBq6XzSZopLLw2byGad/DpVO3RU\n14QJum83nbxt68WJsSV43XZuRFdhYf3OeXd9yj33wDvvaGFiVkIbDO2H5gzQyaQg6QZUej7vAU6L\nbSQiU4AbgBzgrNj9wFhgU4wQmS8iNrAYmKFaUeiZG2L7P3e+w7039yEU6gPWWJhQrBv0WwBfHqXf\nd9oHff9GIKjNX7ayqa4YwNTbT8Ku1ZO7z6cjuEBP9KGQdsZXVeltsb4Od4FislBfdzFkdbXWcHw+\ns1DRYGgvNLcPtMWjtpRSs5VSxwH/A9zs3SciJwN3AT/zbL5MKVUIDHNeP43Xr4hMEpENIrLh448/\nzszgEzB36Tbu/l0+IdsHytLmrC3jdT6uDRPhrYvgrdFQPgH2ncyGv5+Nb8/pWGIhW8YTrMkKrytx\nNQWIn/qisalP3ONcYTVixMFn1jJRa4b2SnOnRMqkRvIBkO/53N3ZlohFwBz3g4h0B5YA45VS77jb\nlVIfOH+/EJEn0Ca00tjOlFJzgbmg15E0/jIazuJlVY5D3Sl4JY5UsHOJrHK3IJgDz88mpHyQbXPh\nz1fw3JbvYysJ9+VqCuPH61esqtrY1CdeH4nPp/0qB5sQMVFrhvZKc6dEyqQgWQ+cICK90AJkHHCp\nt4GInKCU2ul8PA/Y6WzvAjwHTFNKveZpnwV0UUp9IiLZwPnAyxm8hkYxdlQeL85zfCG+EJw3FUss\n7I1OESxXwCCgfKCyCNUq9q4rImRbgDZfjR6ty+26xDNXub6OxYu1M9598qhvIaO77brr4E9/0k8t\n11+v/SoHy2RqsuQa2jPNnRIpY4JEKRUUkanoiCsLmKeUelNEbgM2KKWeAaaKyAigFvgUcFzGTAWO\nB34vIr93tp0DfAUsd4SIhRYiD2fqGhpL4cAvyb5iFLXvno7VazUXfPcC/vm7651QAgi/kZBOnRLS\nyR5rTlxIzvprw08RJ54Ymejd3FsQ/ePw+jpefDHiU1mxArZtiyR7dLdB5EkctGkrNiXLwYBJYmlo\n7zRnSqSMLkhUSj0PPB+z7fee979IcNwMYEaCbgembYAZIlARINT9Nei2CsTi6523EwpmoV1SNloj\ncdxT/efD4buhIMDW/HXMeWJYOGJryhSd5Rd0YavSUi1QvOaY0tLooleuT6W0FB55JHJ8dXVEW3Gf\nxL14U9sfDJgklgZD+jAr2zNA7Ir3saPyWLnAprbG1hoIQMjSKeedOu+gFZPNWX9mzk1zmDkzerJX\nCj76KHoR4fXX65TzseV6c3KcU3gSQVpWRFDk5EQLn9iULAfL5GqSWBoM6cEIkgzgrngPVATwF/gp\nyi+ERdu4dvaT2D0d+1KFHwpeQfLX4o0E+OjLj5j87GQ+yu6Fz/o1dlBrLj5HgfH5Iinl16+PFgaj\nR8PgwRGBsWBBxJn+0EORSdPVZObPj65rYhzQBoOhMZjsv82ImzF404eb2PDhBkIqFLXf55i7Qjjb\nN0xElv0ZURZZWYJIpL67UtFCpEOH6NxbgYDOKFxeriOyJk2KMx6P9gE6TcrLL2shZVk6XYubKDJe\n+2Say8Gk3bQWzHduSCepZv81gqQFKKssw7/AT41TYdFFPOlUAF1hcct4BGFYz2GsXtIbFfKB2FiW\njviyLLjyysiK9rlztYPdFTixQibueBxNxE2X4nXYeyO9XG3FsnS/rjZjkkK2Dsx3bkg3Jo18K6Ws\nDAKPFXFuh9ujtvvEV1eILFgBGyeiysezquZ+lO8ASC1Y1ch5U5n4q/cJBGDOnIgmMmVKRIiA1lq8\njvZ4pLI40RsuW1ur+0w1KaSpEd88mO+84ZhFqemhXh+JiHSOyYfl3ddDKbU7M8Nqf5RVllH67E7m\n33AZwVoLX9YN8JMl4bK8I3qN4MV3X4wc4JbpVVm6wuLXeTrNSoUfCgKoHuvpMTyfoqKI7SkQiHaw\nu4jA7t36Zon3hJrK4sTYJI8u3mgvr1nFhNc2P+Y7bxhGg0sfyTSSgPtGRFbE7Fua9tG0U8oqyygu\nLaZk8dtUVytsG4K1glQMB7RJa/P6DuGCWEAkwaPU6uiuggCS/zoMuxNfj3XkWDn4C/y6f+epKi8P\nsrOjz+3z6dfDD+ubJl7hK3dRo2VpIXH99XWf0Nxw2REjIo7/2Giv4mK45Rb9F+qmxW/Sd2ieHJMS\nrxRBpmgP/w+jwaWPZFFb4nn/7Xr2GeohUBGgxq5BFfwLrN8hISEnB9RxawjiI1Q5mI//+qROKW/V\nwuXDsXqsp9cvr2HXpm5QEMCXv44Rx57N2D5jqfq6yokGK4p6qvJZNrbtpF9BO+dF9I3ize4LdZ/E\nqqoi0WCxixO9msb06ZEU9ZYVvT/2przppvRMZubJMXWaI6S5vfw/jAaXPpIJEpXgfbzPhgSE15X0\nWI915blc2WUB48f0hO4zmR6YzovPjnHycAnYPtgyHjt/Le91eoLsM33YIRufz8fYPmOZNFCHX5WV\nwczHtMnKncD1uhPBTcGilKBUxDnu3izxJn2/X5up3LUreXmEzxM7aXjDhx9+WIcZz5qVuZvSpDNp\nXbSX/4dZlJo+kgmSI0XkBvTM5L7H+fydjI6sHVF3XUlPdw/T/dN5+cGtxHFtYCsbN6rODtlct+w6\nCo8shD1FURFUWVkQUjZKat0DASscfTVrltY4vDdLvEnfFSK2DT//eaSmSTxNIxDQUVvu9qqqzN2U\n6X5yzESIbGsMu83UmNrTk7xZlJoekgmSh4HD4rwHeCQjI2qnRFVSjNn+68n7uHtTdcS01TeSzDjk\nETE1dg13P7mar18qinJ6T5wI29/9lNVvvov69tvw5o9BWeECWLFrSOI9icWupPdqKrGTRlmZ1oSy\nnF+Puz1TN2U6nxwzYZZpjaaeTI7JPMkbYqlXkCil/pBon4h8L/3DOTjpcvwOfFfcS+i9YVDwCmec\nnsNrlRYhFUJEIgsXK4fwz79dTyioUCEQnwJfiBWv72Pn5mOAPPjge4gQNmu5BbBiiZ30/X7tqHeT\nOfp82rwVO2lA9HqSiRMja1gySbqEVCbMMq3R1JPpMZkneYOXBq0jEZE+InK7iOzCUzvE0DT8BX5y\nCzZhnXEPHXuVc9mpl5Hl0zLeEotsXzaCYL0/glAwCxUSwEYdvQ47FGTn5qOdnnT8g4jUKYCVjKIi\nPdmMGaMFhFKR6K2ioojj3DtB2Tb06NG2JhRXw2ro99PcfTaV1jgmQ/slaa4tESkALnFetUBPYJBS\nqiKTAzuYiPWhlG4p1VFeKEIqxAUnXsDeL/bS4YxaXl1Vg6p1Ej4evRn2DkI/D7i1TrRjvTGaQlGR\nztX1z38mjt7ymrQsK3p9SjKbfHP6ERKdKxNmmab0manvxJifDM1JvSlSRKQM6IyuXrhIKbVTRN5T\nSvVqrgGmg9aWIqU+5m6cy7XPXYuttMMi26cXhtSGHEd65ZDwokRAr363s9FFsnTYr89SzLhdwhFa\n++Udyt+rZOyoPCaNKaz3/Ils67EpUs49F5Yti6RJmTVLazDufm/alvr6jT13a/WDZIK2Mk5DemmN\ngRmJSDVFSjKNZB/QDTgKHaW1ExP2mzHKKsuY8vyUsBARhP5d+7N+7/pIo/y14dXwQGS1e8dP4IX7\nwc4m5Ktlv3xIcfFxHKhWqNCxID158dEgyy75iBuv7ZrwB5zoSdZr0gL4+uvoqK3Fi6NNXiUlkWJc\nsSYxt16K9xwNmVST3Yit0WcRj7YyTkP6qO933pYETCzJnO1jRORw4AfAdBE5AegiIoOVUuuaZYQH\nEYGKACFP/pEsXxZXDbiK8n3ldRI8ejUTGXYXAOqoN6DCj/RaRfl7/0tNzXGOP0U5qVYslj5+FMsX\n1z9Ru45Ud/WyN+WJW8fkO9+JjuYaO1YvVHT3x1Zd9EZ/WVZ0CntXcKUyqaYicNpKeGpbGachNVIR\nBIl+521dO03qI1FKfQbMB+aLyFHAj4A/Obm28jM9wIOJvKrz8b32DarHCqye63jo3IcoPLKQK/td\nydo9aynfV64bugkd7RywalATiiOaSv5afGIx9tQ8An/FWekOYUVSSUpPv/F+2NddB3ffrYXE44/D\nGWdAnz4RE1ZhYd06J+7k6NV0du/WCxljF0SmMqmmInDakn9gglNcujki31wy+eTblp+qm0KqgiDR\n77zNa6dKqUa9gJ4ptBkJvA3sAqbF2X8NsA0oB14F+jjbzwY2Ovs2Amd5jhnobN8FPIDj56nvNXDg\nQNXaWbNGqY4dlfJZIZWdW6NKlmxVa3avUbm35yqZLsr6g6VkuiimoyieppBa/dwvNfrzdMKvfnP6\nKaWUuuYapURc/SDkvJTKzdXnq4877lDKsvSxlqU/n3OO21fk1bFj3b7WrNHtE53DvVbLij4+2XH1\nHdvWaKnryOR528v/pjHEu18SEe93Hu+7S+V+yDTABpWCPEiW/feZJHLownqOtYDZjlDYA6wXkWeU\nUts9zZ5QSv3FaX8hcJ8jfD4BLlBK7RWRU4DlaF8N6LDjicDr6HrwI4FlScbZ6nGfSEK2IGRTtaOQ\n0qzJVNvVUDkEu8KPr9dqfPlrkWNfI/hKjV7B7iR09LJ131YmPzuZ/t+/lg4LCvnmG1cr0eHBo0Yl\nf9qJNUXt3g39+sGLL0a3qy8vV0P9MKmsTWhL2kZ9NMcTaLz/RSbPW1oaMW0251N1a9CCGmKmjPc7\nr2+9VlswdSUzbRUBlcBC9MTdkESNg4FdSql3AURkETAaCAsSFZ2i/lAc+4tSarNn+5tARxHJRSeO\n7KyUWuv0WQqMoR0Ikng/xNIqos1YWUEGTpuG/0e5/FF9H/u9odrJXuHXnThO+BAhSjaW0CFrAdfN\nfoJ7bjkG9YG7flSxN7SZssrquCvtvUyYoOvEL1umTVE5OXDjjfrHvnmzDhH23jRlZTB8eOQaVq5M\n7odpDI09tjVMOC6Z9o8kMrVk6rxlZTBvXqRqp7e8QCZpLb6FdDzgeH/XM2e2LVNXMkHSFa1RXAJc\nCjwHLFRKvZlC393QQshlD3BabCMRmQLcAOQAZ8XpZyywSSlVLSLdnH68fXaLcwwiMgmYBNCjR48U\nhtuyxP0hVo7n4fvzsJ26JKFaYd2yk9iUfR0qPwQfnQTPzwbl0ynnJ4yAfJ3XW6E4EDxAIDgTGZWN\nmv9yOAXL+iOvo7h0MyvGr4grTLw3p0h0VuAuXeD11+NPyqWlul4J6L+lpa3nx99aJhyXTGtWiTSP\nTJ03EIhE9HnLC2SaxmhYmVy7k67+2logRrKoLRt4AXjB0QguAQIi8gel1EPpGIBSajYwW0QuBW4G\nJrj7RORk4C7gnEb0OxeYC3odSTrGmmlif4hF+UX8eUonrn0F7FoF+GDz5dh9/4ZPfPDcn3U0FoKE\nfHyv9kbWcVH4eIViw94NWD0s1OXFqIozdFGs7muptn0EKgIAnmSS+uSxob6WVXeFdFtLkdHanJmZ\n1o4STUTNlchx/Pj09d2Q8yabcFvbA0Ui2poJN5WV7bnAeWghUoB2cC9Joe8PAG9UV3dnWyIW4Um7\nIiLdnfOMV0q94+mzewP6bPNMGlPI5qugpESnhSdkYe0u5v91HscqR4iALtV71UXHseWNXO1XcQgR\nQoUUvQfsZ0f+XeFyvoKQd0gexaXF1Ng15Fg5YQ3F79eCw5vEceBAuOqq+n/Q48dr80Ztrc7b1b9/\nJHw4HTdCUybBVCecVM/R0LF420PmJ7N4E1F7TOTY0PO2tgeK+mhLD2vJnO2lwClop/YflFJvNKDv\n9cAJItILPdmPQ5vHvP2foJTa6Xw8D73gERHpgjajTVNKvea2V0p9KCKfi8gQtM9mPPBgA8bUJhk/\nHhYsEKprFFYWPHTtD9m8vA+rPG0uuAAKB36Jvc2OXv2evxaF4u1P3ibLlxVeIe8TH5s/3EyNXYO9\n+3scqDiL0m/vpGhyEUVFemV6SYm2eds2rFsHmzbpc3mzCZeVaROWO85AQL/y8upf6V4f8SbpZJNg\nsok9lQkn1Ym2oRNybPsJE5pnMoudiDI9ibbUxNeQ87Y1k1Gbob6QLiAEfOG8Pve8vgA+TxYSBpwL\n/Bt4B/ids+024ELn/f1oZ3o5sBI42dl+M/CVs919HensGwS84fT5EO0k/DcZsaGAa9YolZ2tww2z\ns539q+5QXFWkyPpKhwdnfaW4aohiOsr3B58aPHdwOITY+oOlrvnnNSpn0pnh9rkdglH9d+zoDR/W\nL59PqRtv1GMpKVEqJyeyzxtW7A2HBN1PKiGhiUJIvaHMseGV6Qo7TTWE09tORI+tIf1ec037C/1t\nS7SGsNq2AukI/1VKNSg7cJzjn0drM95tv/e8/0WC42YAMxLs24DWkg4q4j11uaV0xYml8xf4sd7/\nKuycx1ZIxXAkfx25Vi5XDbiKbf/ZFjZlje87HlYfQ0koF6UsamtVHafs3XfD009HonFCIb3N54uU\n8XWJt5I9dqW7mxpl/34oL9cr4r0aTrynZkgcEVRWpsv/uvVZmvKknerTqtf0p5RegFmfthXPf+Bq\nb63ZDNRUWuvCx9ZsMmpNkYUNIhVp09Zf7UEjiSXR03PJkq3KyqlW+CIaiXX1UDVmyut6kdPuNeqO\nVXeoNbv149iNM3cpfNVag8n+Si+EXBNf40j2il3oWFISrZVkZ+s2sVpOSUnkmHhPzYk0gPAiTl9E\nW0pV60n0RJrq02p9GlJDz5lpWuLcZuFjw2mN10WKGkmLT/LN8WqPgqS+H92N85bo1e5XDdGvrK+U\n+ILhFfNKaYEy5p67lGR/raBWr5A/6f/UmF8/F+7XKwTcSdydPGOFweDBdX/4d9wRmeRFdJvYPt1j\nY812sZ9dgZGVFRE8sf2fc07jTWfp/P5bEy01zoas9G5NfbckrfG6UhUkSaO2DK2DssqyqDDd+swU\ngeBMGObk1Fw9DewclLKorQlx7ewnIb+Mqc9PpfaVX0EwG8jSc/pbF/LPXULIjpiRvCilzUo33AD/\n/reuW6JUpC58rCqelxcpB6yUHue2bRFzl8umTbBxY7TjOioMukj3P3WqNiddf73O6xXb/9ixzRe1\n437/bqBBIppiqkiHmaOlopQaE5ab6rW2RYd5KtfXFq/LxQiSNkBZZVncMN14tt65G+eyYa+n9kpB\nQC9WDOqP9p4BzHziKWo713r2Cbo4llVHiFhWyPGDCKDL937+OSxfrtu5deFBh/rm5enyvn6//uvz\n6cne59OLGV3h9+absHCh3hd0xhYvvbxLVVX0wkjXd+L2L6JT2RcW1h/Nle6bdcEC3Zc3Zb733KlG\ngTU0Si1VWmpyaog/pqHXmqjv1upfSPX6kn1nsRGSrekaW9zs1Byvtm7aumPVHcr6gxWOtrpjVXyd\nd83uNcq6emjErOUmcjz/am26chM3+moVp9+h251/teKkxXq/BJX4gp4IrZCyvjdXMWiOwvpG+ayQ\n6thR+wfiRSHF+ipKShKbVeJFdWVl6W3xfB3xTDQlJZHItXiRYU1JDpnS/yWJKSIVU0WiMabTzNHa\no5TSca2t2dSYrutLFCGZSTCmrfaDv8BPjpUT1kj8Bf647Uqf3Yn91+Xh9PIy4WxU/hr45gidRsVN\nlRay4LVpIDb4gogISglIyNFGdFuxbFTfBdD9NXz9HmOEbwbTL9fndp/Ec3J0l27UFOi/1dVaQ5g1\nK6KheJ+gYpNCnnuuNpW5UWDV1dFmmHhJ7a6/PqLNgL7F3OMgcTRXuqJ2kj3tp6INJDI9NUWTiH0y\nz1SUUioaQHOZdFrzQsN0XV9tbeRza7tGI0jaALE13RMmW6w4UwsRJ/R3dIc/Mer8ch61t7PhFZtQ\n0I3mjhS7kpAvIjyUHd4vAhf86GOWF2yiOujD1+N1xp77b4oG+oHoSX3btogQgUh+rpdf1sWu4qny\nsYIhEIBnPLmmLavuDRcvqV2sL8eytHmtuDgiRHy+ujfw3Lnw6KNwzDE6EWVT/CSJJspU/CiJJplU\nfTCxNFcKkFRLJ6fLpNOW/QvpCLv2+3W2iBqnvl1ru8YWNzs1x6utm7ZSZc0apXI7aPNUboegKlmy\nVd2x6g5VsqFELzzsudJTlySk9EKTasesVauwvtEvqVHZubWqpESpM658XsnV/0/JdFEdZ3QMhw17\niY2eOv74yOdUQ2OvuUar67GRWSUlOhrLGyLsHuM1p7mmsZKSaFOCz1c3mqukJGIi8C7oTDS2+iLK\nUvmfJDO5JOqzMeaa5or8SeU8zW2yau0mvKbi3ifXXNN814gxbR18FBXByn9ZOkVJ721c/+Zp1Ng1\niAj2MTYcvxzeH4pWRBUgoLIBG3whGPInqO4CQG3XTVx73UPYtWeDdSZMKOZA/usEKgJ1NCK/X0du\nuU+Dv/mNrqZYWxtfs/DifWq1LL040U34d9FFsHSpfu/WQXEXL3qf8rwOfjenlPfpdPr06KfAxYuj\nx1BbG99MEPtEPWtWJO1LqqlRki2WrO9pO5G5pr5jmuvJPJXzpGMsDalx0poXGqaD1nx9RpC0M9wf\n28zVz+o8WspGlOMbKQiAFQTbclo7Ji6yIBSEsl9FUtL3A7vWCpvJqPCj8teyv3p/3HPG+i/c1faS\npIKNd6IAcDP+FxfDN99Et128OCJI6ptMEyUsdD/HFujKzo6fHTd2Il+8OHU7vCuE6jOvJTP9xJuI\nkx3TXKvXUzlPU8fSUjVODA3HCJJ2iuugP/Be/3D6ePLXQv/5sGESYIFbx12CICEtRFzBAVqgxFRh\nLP+wPLymJe+QPKq+rtJ+m6Ii6K637372UoLBniilneGJnsLz8uJPFIFApK6Jl+98JxJinEwz8D69\nxWo93rQyw4bBnXdGhI233bnn6jGBPs/Ysdrnk8oTtiuEXCEyYkRdzSiZgzjeRJxKwaPmenJN5TxN\nGUsg0PAaJ97fVrwgD0NmMIKknVKUX8Ssk19n8m0noGqzwlFcPU76lPc32dpF4gtB0R+hw+e60uIL\n90cER99S/arwR4QQ0O/ofhSXFlMdrCZECEHIsXJ4YNQDXP/C9dTYNVj7l5OVvQKw6n0Kd53yLn37\n6r9+f2R9iJfHH48IAdfDkYrJxzthexcwWhb06ROJ8vK2s21tVrMsmDgxErdfWJjaE3asNhErROK1\nif2e3PPcdFNqx7Q3YiP7QH8vDdECc3Nbb82R9oQRJO2Yqh2FWjAoAVvh23o5e7ZMgJBPC5Fzp8Cg\nRwDw4cN39Nuc+PlEdnZ+hGC313XtEkeAHP+t4/nN6b+h6usqqm0tRAAUimq7mpmrZ3IgeACFInTM\nKkbf/kcG195Y76Tu80UmiFAINmzQk8eVV+rV83/6k9ZovJFZrgCBaHNRPJOPe768vOgJSUT3a1k6\n4WIwGPGB5OREm9RsW1VuvR8AACAASURBVJcb9oYhp7KosCmmn/rMV81lukpEcy7680auzZ+vyz3H\nW/jp4tUCoelJPA2pYwRJO8bvh9ycSB2T8757Ac9sykY72Wv1+hK0EBlx7AimXzmdovwi/uflN7jn\ntdXhfrJ92fzm9N+wbOcyNn+0mZAK1TlXxWcV4feq8jSeef9z/vvdx9i9dBjb9n1OVd6z5FWdz+7d\nhWFzUVa2zaipy/n3S0PZsbVz+MYvKYEOHeChh3Rt+PnztUM8VkPxmotiTT6lpdFrXbzrWUBPLrt3\n68nJPaaqSk9SP/whfOApl7Z3b/3fc6KJvzGmH9dB7/qNYtfTxDumuWiJ6oJFRfr6g8HkvilXg0nm\nl2opIdyeMYKkHRN5ehX8/mygK8sXQ3WNQiyF79g1hMQix8phul8Lkbkb53LvmntRlaeFzVq1+WuZ\n/OzksBaSkMohsGU8bL6CkJ3FqpctVokN1neQURWoF47DF1JkWcIFl3zEs1/cxtLAqbAt11EztOPC\nNVlVVcGcOZGU62++qc1bLoccEnkfa/KBiGBxF0d6zUuuKcwrbNzJ5fe/h5/9LNL3VVfVf9nJfB2p\n4k7UXo0oFNIaVXORahRZdbX+PseOrRsxl+pEnWrbVM159UXyuedr6TK7sZUy24tQM4KknRP79BoR\nLDnQ/c6oRY5llWVMeX4Kod2DYcGK8Ap5JhQTckxcCakcoo8J5uKujHcXPWIr1PaLIJhDSAk2sPeL\nvQSfuzemfaTOSeziPFfr8PpOnn5a5/xytY1Zs7QGA7rMr/fpNN7iyERmIjcybPHiuvVS4pEuv0W8\nIAOfT19bczB3rk6M6Zr9Zs+OvvbYJ/6XXtLRb64vIjY8Ol5WA68zPNVQ6oaY8+Jpa+45d+9u2dXv\n8YI+XLNqW/fjGEFykBF9oxVFrQkJVAQIhUJaE/GskKfCH/aVJGTLeEcouNFgNuDTEWFWLfReDO+f\ngYSErGw45rBj9DnC7bVGYllaA4iXlM5dr+ItlvXNN3DttXp/7M05a5YWBi+/nNhe7r53He5eYZJM\ngHiZMEH7Urp2Tf2YWOIFGSRbh5MuyspgypRIyplgUAsVbxJMd0KfPj3ynULku/WGR1dX6+NDoWif\nlTuRuselsj7EPXdTtDx38vZG4TV3oEKioI/24MfJqCARkZHocroW8IhS6s6Y/dcAU9CzzpfAJKXU\ndhHJA54Cvgf8VSk11XNMADgacA0A5yil/pPJ62gvhMN2q86nakdh3fxXBX5ys3I5ULAKZdUgIUH5\napFeq/DtGUp25dkc6L4sSqgIos1gm69AaxYKfDUwYB503aT9ME7Ul3TdDu/5sTt9yn+/mYovK49Q\n0NZ9OMU4bRs2bf+Uj/6vjK5LT2b8mJ5RE9msWTq1ycaNkdBQ92/szVlVpSe9eCG7jXkyjvouY473\n2uXrcwjXR1GR1gK8WsFDDzXPBBMI1PVB2XZ8wet+p7G+CG94tFs9MzZbszuResnk+hDv5A06Aq9H\nj5YxJ8VGoXkfejJx/c3pD8qYIBERC5gNnA3sAdaLyDNKqe2eZk8opf7itL8QuA8YCRwAbkGX1I1X\nVvcypUvuGlLETUVfXTGA0IJf4AspcnMk2tTjyemVd/47VO0oJK/3OyzbNZqlN03VJXx9N+q1KH1L\nOWNoDq/veZ3qCj+EstDmKVsLkfOvrTMG1X0NqBDBBStYZefopJEICi1EdDiwYt2qzrBqFKB4dFaI\nVwK+sP3dnfTd9m4El4heWBjv5pwwQf91tZx4IcgNifBJdDzUnTgbeiNPmpR6iHE6idX2XHNVvAmu\nPl+EO3avgLYsbVZyzY3eBaiprg9pynV5zY4tmX491kQHmfs/N7c/KJMayWBgl1LqXQARWQSMBsKC\nRCn1uaf9oTgr5JRSXwGvisjxGRzfQUWgIkCNXUPovWFhX0VcU0++x9w1BqCQxT+r8pi6LL2gsXwC\nG7IvpLprdaSuiXcNSiIqhkf6clfcI/h8cOyxsOsdBco1d/morVGUlkaid7yhw1lZeuK2LB0y7KZW\n8d6o3pvJuz82BDnWL1PvdxnneIh+OncTRzbmRq7PjJOpp8xkjupEY3THE2+7a+5btkxHx7nmRjcS\nzxX47v8lE7R0uHS88cQzraabdAWApEomBUk3oNLzeQ9wWmwjEZkC3ADkAGel2Pd8EbGBxcAMJ7lY\nbL+TgEkAPdy8Gwcx7kr36l6rCWXV4AtZ5ORIwonTW5Fx7Kg8XpxXE1UACzubr3d+D/b0gh1j4bT7\n9cJGx4wVD2vPUE7M+T47syAYrAWftjf48JGbI5z2o1fYdddpYOfGvwZ/9NNlohT17vt4IcGlpXpy\ny8qKPHn/8pe66FaqE02icXgn4EzcyI19ykxV+DTUD+FdAOjzRZzz9Wl8sZF4icaUrhT13usqK9O/\nidYgUDJNcy9cbXFnu1JqNjBbRC4FbgYmJDnkMqXUByJyGFqQ/BSo8wislJoLzAUYNGhQnMKxBxdR\nZqvz3onrI3GJrcg4a+QsfJdPJlR+mfaFhCyteRzoDCvu0Ae98304/c4oIfKt3G9R2LUQdhfx8dpR\n7Hz5dN6yffisEH1GruHEs17nxLwTKV/bhX5D9nPfnh9C/weiU7hIkP7ffwsojCxQW/o+FLxC4agT\nYE9RHUe5S16enuDcVeyPPhqp6eBqIaEQPPhgwzUG15EfG9XlTm7790fOna4buTHCKV7iyXSlDnGj\nzFxB4TrnU9H4kmldXge5q216I7/cRYreqCd3TPGuraFCuCXXm6QjzUtza2KZFCQfAPmez92dbYlY\nBMxJ1qlS6gPn7xci8gTahFaPLcXgUtdsFR/XDGYrmxq7hsXbF0P3Muj+WnTalMCtzhFOqO+aG+Gk\nZ8LC5NPqT1n1ag3Zj80gWGM5dnHBVjbba17grbdXIxXDCfX8Fy9XrtVhx6CFlO2kcDl3Kou/eJ/C\nSr3Ohe5lLOhcTM3HNcybMRQpXUGw1qozObj+FHcyO/dcHS7sYtsRH0usTyPZDez11axeHYlsik3R\nIaInQm8p4qbc1MmeMuNNft5J3RtJFW+Cro94fcdGmbnO+VQ1x0R4BZRt6wWqbgAD6O/Y62eJtwA1\nVlA0RAi35HqTdKZ5aWykW2PIpCBZD5wgIr3QAmQccKm3gYicoJTa6Xw8D9hJPYhIFtBFKfWJiGQD\n5wMvp33kBzn+Aj+WzyJkh7B8FmP7jGX17tVUB6vx9dzA7MlXAL2YFXqLHe98n3BKeoQ+X13Ldjym\nrQo/tbXi+EOcsGBRcKAzob++6KxVuQk18hdOrq8cXbVx0CO6OmP+Wl5+z8fq3atZMX4FpVtKI6lY\n3jkdagQVx1HuzSososNyvYWBsrP1Teo+0cYrhpXoBo6dlNw68+46BW/0mFLaJxAbGeb20xDBUt9T\nZiIzk3dS90ZSuRP0vHlayHbtmlio1Ldy340ys+2Ic76pT8N5edERZN4V/rv/f3tnHh5FlfX/z63q\n7oALoHFhSSCIjCATIYBIRDEIKigq7zAzOi4JiCCISnxnhpH3NwuOM+DgqLiLC0het5l3GFERkAEJ\na9iTGBEcQEKCiGIGVBTS3VX398et6qrudEggbML9Pk8/6a6u5VZ15Z4653zP91TUTNaHQur9gQzF\nwYR6jnZ+Idmxf2gyL0fMkEgpo0KIe4D3UXGKqVLK9UKIP6KapbwD3COE6AdEgN34wlpCiHKgCRAS\nQgwCrga2Ae87RsREGZEXj9Q5nKwo+7IMy1b5C4Eg85zMpB0aR0yF35wNf/2r2i4lxWDMTV0YXRYg\najsFCRmFBIMSK2pjy4ijMGx4kvVurUrxMK8ORQJNK2KejS1twlaYgtICppZMVRpgQOC8pRjLJNFI\nTWpvoqpwbq56uR0HExPztf0Du0Yi8UncT+N0QyxunYKU8Yl3OLB8y8EYltoK7vyyKv4wU2ISPT8/\nvg4nHPZ6vkybBgsX1m04/ZNabSyzxHEeTKioqqpmPY1tq3Ch/3cNBlXNUVaWMtYHqhE5GON2NPIL\ntV0P99gHknk5HnFEcyRSytnA7IRlv/e9H3OAbTNq+arbYRmcRlK41e2W03Y3YkcoLC9k3OXjkrb4\n/ctfoF12GTPmVDF4QCojBmWS2W0xk5ZNYse3Oxg2cCiZwwLkPz+TVR9/DmuHO4ytqApd4STdP88i\nVociosiMhbFjuArDQJyBG3bDheQONeNYWhMnwqpVXi4kkV6aOIH4P/v/gV2V4cQ4vDtBupOSX68L\nvDqFPXugpETlUDIza/a4r8uw1Pfp0++J+Ckn/hoQ/6SemQkPPACLF9fcV21PvnVNrHWFUA42VJRI\nRQY1oZaUxMvKu4Wr/nyKq9QMNUOJ9Qn1uBP84cwlJTtGfUQ5a5N5OV4YaH4c82S7xvGFWHW7A1OY\n5GTk1Lp+UWUR+ev7Em4VZsn6EJndFsD2bJoveovmQOaPgbQi1p3/Mwh2h5I8jybcf4wqWPy6tTIw\nbh1K1jQvaV+ZTcfvRjHmpi5kdt7L1JKpsZDbzr07mbT3v2ie2ZyZG6/h8VHXEY0EkLbXTSsUUk+s\ndeUnYon8ApWUd1WHXbHIxEnWzwTyGwF3EnMnCleWJbF+4ECG5WDCGH5Pyt9npbYaEIDly5Mvr+3J\nt6Ghqto8mtomRf9v4Tfkif1gXOaXv+DQ3xitLsOVePxkxASX6deQ+pPE49QVOqvN4B0PWmG1QRsS\njTi41e3V0WoMw+Dpa59O6om4SEzMF8zaxNT87FguYto0GPCnJSrUlb4C8vrG9zhxhR6NqOqR4q9D\nqewJ0+fzsRXi7n/a/PL52aoKHknUjjLzk5neeoU3QtifixEIAQMGePmJQNBi6GOvkTuwfY1z8tdD\nuNIdLtvLjcOnpiZ/yk2cZEeNqtke1u0p4q53IMNSnyR67PfKObjEdmFhfMjIMOCGG9R7V94l2QTr\nPiG71+hgJrBkHk1tk6L/2MlowsnCaIn7rk97XldXzM3tuL+Hn5hw992egaot7FcXkp1nfUJndREn\njru8SX0au//QX926datfp3sNKaWUyyuWywmLJ8jlFcvrtW5oxBVS9P0fGRpxhRw5tlwK4U7DUgoh\nZccBhZLuz6rXsJ6S8ajXsJ6SwHcSEZGY+2p+3/1ZCVG1LxGWou//eN8l7oOIVJZI/TUMWzZuLOXI\nkVKapozbR+M/NZbLK5bHznPKWx/Kxo3VeqGQlCkp6n3jxlJOmSLlhAnqb+PGUhqGlIGA+pz0eiyX\nMhj0zj8lRS1bvlzGjhEISNmjR/w+li9Xx1m+PH7ZyJHx41me5CdJtm2tv9dydY7u+IJB79xqO3/3\nvEH9rW0cdR3XP8aRI2XsPjFN9XnkSDUeIdQ46ns+7rbudfafn3v9E7cJBLx1DMMbm3uu7vn67+MJ\nEw7+XCdM8O4/0/T2caDfzH+v+K91bcuPJFD57Drn2GM+yR+NlzYkRw7Ll0uZ0igqhWHJlEZROWVK\nzYnKNN1J3paIas9g9H1AGRFnkqfvA55x6P6sxNjvbWfuizcy7su/DyKSdnMkA4fLwFW/k1Pe+tCb\nWIQV24cx3pAdn+4ozQdNaTxoyMBVv5PCsNSkYtpy5Mia/+QjR8ZPLO4EXNd6gwap5f4JJfH7A00m\nfqPsn4gS162vIXHH6J/Er77aG5sQtX93oHE01JilpNS8NiNHHnj/ySZW/3UWIn4f/vNP/C3d/U6Z\n4hkz/zrJDFKy8xo0SB3fNbh+I13fyT/x9/EbpWT33JFEfQ2JDm1pNAiFhRCNmEgbohEVViks9NhR\nO/fuZOarZ+PKxCODsOYulSvpP0ZJq9iA4fSFd+XorUZOmCpJ3sSBQNC4/Wq+XxSGKCp533EGdH8J\nWxgUB3ZRXJKNLW9RDDFAYGBjs+GrDWonlT2xd7cAEQZhYosITXrMY9xQr9DGZYH54arjJqrbrlsX\nv54bLnLDGf5eI6AYU++/n7zuITGBnkzc8FDi5rm58aE0N/dQXa3CeC5jyv2usDBeaNEw4sdxsGPw\n708IlcNau7b29Wvbf2Ioyu2P4pImDEPtO3Ff/t8yURizqsoLbfrXefLJugkFOTkevRzUGNxmafXN\nLyVjHPrldtz6n+MNxrEegMYPG+4EaZrxzaGee069ml86D4QN+MUFDLCCsO9sgkMH0OO2WRhDrlaG\norwPwk6JTfxgQaA6qX6XRLKv+QfKIBm22mbuE8o4SJsX1r3A8zM2OtLoBtgmrf5zK8I1aq7RWjtc\nfe72EuT15fHtP6eosigmqVFQED+RuoWGbm93l3nVt69qF+zCNKFJE7WPsjKlPdW7d81r6E6CRUXx\n19Xw/Xe67DNQ+3PXTRY3rwtuXuehh9TfESNUbsU0vbqb4cO97xInrkSV4GRjcK+d/5z85+beM40a\nKfZVKOSdrxAqb5FMG81/jjk58Tpn8+erfNi996rltq0++8eQaMSGD0/ec0V4fA2g7p4whYUeU9CF\n2wIgO1vlyOpbjOkf39Chitq8f793/lOmqHst2bU9VtAeiUaDUBejJ3dge16cex/Wu0+ixBgFCItg\nSDBscAdyB15P2ZdlFM9eBdIg0G45cqkkEnZowY7ScJw3UtkzlrCX6SsU88tfk1KeA+krVEvgjIXK\n63HKWvZt6A2Vz0HnV+L7rtgyVrtiSYOCWZuY/svsWCGfO8EGAl7tgr/IEGqyp2wbJk3ytneLHMeO\nVderuNgrEExsvJWdrfrWT5qk9i2lMkqJT+Z79nj7Pph6g0RmkPsk7hYrtm7tfZ+bCy+95PUqkTI+\n0ZuYPE5NhT59vM+JSepk94xfNTiRLHCg5LR/wnfZdSUl8fpe/lqgxH0lCkbWxhir67rm5MQXvApx\naC0AXONo22p/WVnKMPo9JFkLieBYUoO1IdFoMA7Ez89Oz2b48AKeP/sKKL8Cccp/uNi4i64tupLb\nJRcoIn9uPpZtYRgGT424heIur/P8jI3KCCQKQMZCX6FY98b2XXewaVHYkVWxFJ24sqfaNn2F8lje\nexZkgKqNnYBOUJwHA+6NUy0WbRfD9ksxtvVl57lX+2ilfm9KxKig/snPXxDnGhH3n9/9605szZrB\nypWO1MoDu1m9pCm2bdSYHJo188JMbh2F/8l80iSvmBDUhJOsWLE+k8uBJuvaKtj93/sNQ0GB1+mx\nupqYenPcfZFd07jURc1OxmJyjRt47Do/TdhfMOoa37pCTe5Y6hKWTNwmPz/e8B8qXOMohPewkYjE\nMOexpgZrQ6JxxJHbOZfppX0Jt16F+dlllBZ0Zm1ExenzHt2k5O2xkbak+PNicgfmMrVqOGHLebxz\nKcIufN0bzW196Tuoii27r/ZEJdcOVzmYvL7KkLgeC77HVyuolvvoyAITUfABthVktikIBMCypWNH\n1LaRiM0Dj26k/5i3ST0llbmRT1lyzx+QVgjTlHS77kNyspvy1B/bxYobXdTwGtKKKOkwDrl8NlhB\nAkGDnBwztr5bmJeYy3A/79jhv8qSd+d9S7NmTZLWRdSlrVWXZ+mvYE9GA/YbAjc/5mLnzvjPh/Lk\nnMzQ+I1f4vm5Y/UXjPqp2PUxuAcybsnWLymJX2fGjIPrsgmecZTSM5KhUN09XGoL/x01D6U+Gfkf\n+kuzto49prz1obx6xEI56LbP4+iQI8eWy9BDoRgLK+WhlBg1d+S7I+WgR/4Sz/oy9kvMfdIwLRlM\nCcuxU9+SI98dKVMeSpGi77iaLLBkDLBaWGCi77gYe8s0FQNHGNH47WLbZidhjUUlZ5VJ4+LnZe/R\n/ysHjV4pe9+yTJoBSwqhKKdjx/pYOIsnSPNBUzKspxR9x8mOQx+XI8eW12Am+Vk6/s9Tpsj4cRnV\n0jDtpAwml8XUUNpofSioifRaP+PpUCisdVFlD8RiSna8ZNe0PufkblPb+ur38F61UcTrOtdk462L\nBp643aGwxZIBzdrSOF5QVAT5t2TGnhz9mki5g9pA1R1MWTslVmjol2S54pX1WJaTWwGwQwy6dSc9\nOrUgteN68tffQrgyjBAC0Va1CI5Vzjf+yguDGVHo8Jbax2lfQOcCRPpKTCOALW31z3DesjjtLk7b\nCbiMMxtXmBLbhPIrIL1IMc2MqGr4hQFfdcL+qhOL11XDkD7wRR+weoA0sG14/HGPETX59YGEzIeo\nTl+FDWyYvoANVohpT1os/MCsIW8C8RX1xcXQ5qIKtlXthNM/g09uwLa9hmXuE3uitlZ9C9lcuXZQ\nsfqqKk+Ysq4n365dYfVq78naPWbik3MyLbPEMRwoZFOX7Emi5Ehi7sNtB+B6j8nGlOjZdekSv757\nbq73kay9QH1Rm2d4oFBbMlmXo128qA2JxhGH/6aGJH2zK3OZXjqdsBXGNEwqvq6gqLKIsi/LWLzN\nAi509iQxTcHY0S0AGP9KFdV2V+y0ZQgpkNKCLtMBEJ1fRZZfEZ9Mb7UaLn84Ni4hDG7qdBNvfvQm\nNjZ22jLuf/Zdmu0cRGrHMka/8hySyYBUisVCKkqy6aMql+bCmZth14XEkSCtoBMyU8l+YQsMw4zr\nY161QYlh5r/4N1YVDoiNNRy2kv7ju83GUqsGcu9NmU5itzWY5yK6TkVu6Y9hmwSCNhXNXoO09ixY\nkH3QiWNITmc1DPUQ4H8QSOwEOXmy18PeJSgIodaD5IKXkUi8YnFt987+/cnzLQc6B3+SPVF+3pXV\nd38Tw0ieU/GPwbKUlpt7PRKv54gRh2ZAEkNlB8rdJG5Xm6Gtq3r+cEIbEo0jjmRMmbinSqfpVkFp\nAdNKpvHiuheZXjqdjG9/AXQHI6x6wgvJ9ffPo+yLdPJvyWR/dW+kMQ8x5CpMYRKdPheiITBsZPN1\n0HiXQz2Oghmh+Y8/4QtHYgVUWPf1stdjny1p8fj2n7NoyCIKZm0i+t5fwTZQisS2qnXpOg2zy+v0\nSr+CxX8cn9DNMYHi3PgrSF+ByLuKiyO/pv0p3XnzpZYIIBCUpKaaFEzIpnjqJRC21XGIYhhGbOJ1\nUVRZRM6fxhHZ0gvjmyLsyI9xPSRhp3BVi18w+O9bKC5qwtQ9ebywLsqL0/K4/oK2DBi8i52nVbJj\nVTbDbj2j3jTURDqry+jyPwgkPvnOmJGcvZafr/ZRVeU9OVdUKKmSxMZY7vFdA+BSraVUk3x9dK8S\nJ9hrrqlZw+OX1TcM6NdPtXtOzKkkenbgrT9+fN31JbV5XH7pmcQ2AwertZbM88jLU3+PSp/6+sS/\nfugvnSM59qhP5XMsZzAeadzZSxqh/XHyKeadl0nzQTOuEh2iUnR/Tg4avVJVr+PmDSISozr2V1w/\nIi4fYow3YsdKfHV8uqNsceOTvtyHHZd3MR405NUjFkohbF9MPCppss23bsSr1B+Pyqm4Ui4iIsks\nkMGUSEIFdUQKw0oqQzLy2elxcjKGGYlt589BTFg8QRp39lLXLCGvhIhIM1Qtp7z1Yb1+L3/1OSSX\nRzlQbD4QiJdWUSoH8bH/RKmSkSNrxvb9ld6Goart64r5+/NDhlGzOl8IlQNLlo+oTZ6kPnI1idcw\n8doky7P4r1Nt6gXJ9u2X7qnPORwK0DkSjeMJdcWygVhf+bAVRmy7EtsKqVCShAvPP41P0ouwpIXR\n5gMw/h/YIcBAFg+BnA8ImAbRqEQ9qZsql4EAGUF+f2bcsbq37E7L01t6wo8+bPhqA5z1OpjDnPoT\n5SkgJDT+CoGgS889LJhmYUUclpUZgd4PqYJIO6S8qIxCb6flVyhvCadhyUe/IAKOEyMBW3lSUiTP\nZSSE6W64ZRfNT1chvqwsX5FeRg6i/DsVWovllYLOMUysSITRz/4fmd32HlCMExQLaudOVZ3v5kgS\nn6zrqglxn7T9T/9+9lQirRhqPmG7lfhuXiKx5iYZUjuWYQQ6IAkgEDUKShs1UvU8Y8fGs8z8dSQu\nXM8hN1ddBzcHUhejy59P8nenDIWUt+B+54bUEtsR17bfRA8mUahz4sSjL+6oDYnGcYO4vvIXDSR/\nqXD+WQKMuakL+euVkQllrOPiGzezeEYHXIPR3OjETWMX8dqEy1C5ClcF2PZyGj6UflHKsK7DmLVp\nlteEyw+3/mTDYBXW2nqNMkxznyDzoiBP7bgFmdcVo+R2mjRqyp4LnlTJ93M/ilc3dpFRqKrvbcfQ\nSdRnEUUYNrLLy9B8Hcx9AmGnEAqZcRNK7qA2THvSIhy2CIUMxo5uUSMRrNSNN9Grd5TFCyNguaXi\nUe96mBGiredTULorZkhemBnfT6a+NQn+7caNy/R+R99DQzKj4uZWXCXlRYsOrITsTu75+SqBX1vX\nwNhE27GM/PWXYN3eFZb9GmvjDfip3zfeqAyIe/3c402dqoxnVlb8MiG8xmXue3+LZT8SE/P+2iKX\n1uvW2PjDvffe6/WvqYt0AF6dUjisjIirLg1HpzFXIrQh0Tiu4O8rnxn3pJtJZjevSyNXdqLPe2pi\nNQKSb1vN4rXd90CHGbDxv7wdtloF/e8HoNnqv7Cn+UxILyJshSn+vJjFQxZTUFrAzr07eeeTd7Bx\nCj8qeyrvIhpCeSQAAqwQ5aUZ7L94PzJtGaQtY4//BNwiSHcffqNy7WiY/YyqaTHDTj+Ws1UTL2eb\nYIt/M+yMAnIHtVETXWURBaXq8XjMH35FycJ2scnG7YwY629u2zw/9XuMZiYMuA92ZsHec2HTtWAF\nlOHqPwbSi5hWso7czrmUrT2Nu37WDqyOzJsaZssLMylZ0Yzq8BXYlqj1ifaFmWXc9fN2EO3IvGlh\n+HsZIwZlkoj6GJUFC+InwtrqWUpK4nMUFRWeTIifUGAEOmDd3hVbWvDJgLjUlRDQo0d8UaM/kT5l\nildZ7r7ACXD63td2XWojluzZ4xUr2rYyVm7Hzp074YknDmygEvfrwtU9S8zFNKR/zKFAGxKN4xY1\nqK8+I0M6PPnGx4x+9v+w23zAG3ucGaXXI87EGXSaZykjwvQFfG2ngHEP5PVFpq9gWsk0cjvnkrXj\nOV5+bTcX/egNKcqgMAAAIABJREFUPky7F7uyBxT+QYWSCECsJ70EYbOn+VvEJ9aTIEkF/tm932ZX\nbR4LXtfHrBbvk7/uZRp90oiiyiIidgTW3Amz0xFIlixRT9cuO8q2UV6NsKB4CLZrNK4drYouP7lB\nnYeMqM9AxIowvnA829+7DayOKmQWhUl/boy48A2k0QODxoRCIqlQ5CMTTlVGVgYgKpkxp4oRg7zv\nEycx/7K6qKnJwqCFhfGTqG2rpLjrMYTDPiUBAhjbrkTaFtKOL0R1n9Bdb6pL23RCoXZxFGnb9rwP\nV38sGvW0wKSs/UnfH1ILhTwVhIkT41UKqqqU/pq/iRrUTqlOlvB3r0NZWfJk/QlT2S6E6A88gXqk\ne0lK+XDC9yOB0YAF7AVGSCk/FkKkAv8ALgZekVLe49umG/AK0BjVxneMkxTSOMlQlToLedkEbGkh\npDNZpK+AIX0wtvVFtlmITF+BWDIOaYWQ0kSQgizvA+kriNpRHnhAsvh1CTSDxaMwLtuLWHkvMhIk\nlhvx50h+NCtuDKYwVR2K37BU9vQMkU//a1f6CkjfVVP2xdlGbOvLgk+b8vxnWyHDgPTF3v4ciReJ\noLo6nh0FNue22cMX4c3Iz7oCDt159jNw7WhEIIKMSp98TDZ2ehHzt86H078Dc7CXC/q0L3Lb5dA/\nn+5nXsvkkYNqhI/69oX91W0d+xqFQITBA1JjdSeJ9FmoSRH2h16SNQxLhD9cI4RSHJC2cJQHPEMh\nBKSEBJPv/hnFnxczbZkg4mxz/fUqpFX2hc+bCoQZ+8ctfLOtXY36kqoqNbZ77lW5K8sCIQSmqb5P\nRs/OX98X6/auGNuuZPLdPwMymThReSSuwGRKitrv6NHJJV5cSnV1WGIGojz95kZGDMqM5W78rZ2l\n9O6FY9nw6ogZEiGECTwDXAVsB1YLId6RUn7sW+11KeXzzvo3AI8B/YH9wO+AHzsvP54DhgMrUYak\nPzDnSJ2HxvGL1FNSMYSBRJJipnDvJfdS8nkJgwcOJvOcTArLTyX1lDyKz2nEtGWCaAQCQZDtlmMJ\nE7H9Uha/cYmzN+Vx2B/eDNEg6l8jCu3mc2a3QkJf9mTnkv6w8Qb1OnsjXPIk19+6C4A5m+dQbVV7\nnogbEhPRpDmaGNxalOKh2FaATZggrJgXQ/oK9b0MxMYohGDwYFhYaGFZ6ol759YzgG7E+t4jQBoY\nOy+mSY+32VMVUp7a2uFQmge5/bCRUN4bBuTDxz+BT/t5hm9fKqXtb2bmovWMH6/CaZkDish/OIX9\n+7OQUiAMSbtuFfxk1IfMWRbi7smdsC0j6dO1f6IrLvaoqYnil7XlY/zhmj1iC5N+30KRGqQZO99g\n0Ovjnp2dCWSS26WmdzT+riqIdox5UyVbK3l/SrukBX+jfrONSKQVrmfqei3J1IDdbqF22jJE+gqK\nPz+f/FsyPa/R8XBcI+WXz/H3my8sVEbEtgS2jUeOyM4mO1tdMz9BIVE652jkRBJxJD2SHsBmKeWn\nAEKIN4EbgZghkVJ+41v/VFwOi5TfAUuFEOf7dyiEaAE0kVKucD4XAIPQhuSkQ1FlvNjj5P6TGdEt\nvhIsFgbrBlktvFDGN7uns/OUv/Hu1m+dOIGbmAe+bRFr+ysCUUSfP/GftGVQ9QBYA4mFunZ1gllT\nmCnuovElrzKm5xgeXf4oVnmOLyQWRZz3AeQ8iExfgSEMROWlWFsvV4WK4BidFOIIAu5kXpoL5Tk0\n+SYb/z/Kj7I/gW6LOLdXS7YvHIDymCSu5yQMAyltDFNilN7BniiqnsZVSI5KKL1d6ZE5dTdkPwrb\nenuqABmFVK/MZdKs8wCYN09iXr4Ua/m9MaZZKCj49f98x71zJhN+aR7Yfs9AYgSi7Gn+Ht/s/4ZA\n8FakVI/kL71sY1kQCNpk7fiQ6nBW0nxMUREUzNwGGYtUe2RnIp245O8Yee9hL/xtzPgJoYzIc8/5\n7pFaajgGD0hVeZ0IICRd2qar+yVJOGjn2X8D856YxyYMG9M0YvkZ//ou67C6vCti25Ux4c/EPIvL\n+kpJ8fqmJBZjmoGo2s6wsHa3pGDWJrJHqYMlq6A/kA7aUUF9OMKH8gJ+igpnuZ9vB55Ost5oYAtQ\nCbRP+G6IfxugOzDf9/lyYFYtxx8BrAHWtG7d+tCJ1BrHJfw1J+aDppywuHby/fKK5bLxnxqr+org\nd9IwbRlMCavaksB3qgbEXyvS/Vkp+v6P7PibO+Jb+hrV3npufUmrIin6/o/s8bsxMd0sAt9JjIjq\nGPnWh6qd75opctAjf4mvjWlVlFCrEvX+mvucWpCI75hKT0sMu7RmbQq2VCqW+2Sb2/4sR44tl4Nu\n+zy+e6RRrc4v8J3TxjgSt18G3unpk41HdZv0H7vJNum1PrZUJ8nFE5TGmW9fwoxK8+IpsXEaDxoy\ncOMoaQSiaowJ19r9TRLrNoKhaOxahEZcEWv9nOz3TFbf4u/cmahVduvISmkYaiyBYET2+N0YOWXN\nlBr3TeihUKybp3HZJNmxy9c16mH8mPLWhzKYEpaGacfaFrs1Iv5aGLeuxN8mOHE/5sUvxOp/Es/h\naLXi5YdSRyKlfAZ4RghxC/BbIO8w7fcF4AWA7t276xzKCQZ/zUnIDCkmVy2IhRy2Xg7RELYUQABz\n/7nYQ65GlOZB8TBsC6RhITq/itF6Jf8GL6eevgJx3b3Id5/BY3EBn3dF7uhO8VIw80qg9VLMO67l\njmbTHeZVJkWVeykoLeDd98/CjpiOx2HCZxcDhkPPdSrbpa1yGe1ne0lydxBCQteXkenLnTEVqfBX\naS6su0NV/wPbUmYxdc882lU9CEYzNW+bEYcldpYKs33xYzxPxtEP23dWnIQMHWfAlmu843/bAjd0\nZgQssq5ZD6ekIjPegUA41qVSXHsfsvuLSKkexW1pI/c2Q1rSd0yHlt25ADoX0PI/t/O73MudkJTK\nBUTCjpdmGYTX3UzBrE0U7skmJ8dHE79uC1UbMmt4HQUzt1Fd3QqkSXV1hIKZ24E2PgptWsxTiEZM\nVs25gFXmXQAxz7awvBDLtpyclkD8byEbI6GkoTuXfbVjRyZWxGHR4bG2UlOVBzF/vkdfLi72aMbT\np8eH9UYMyqS4qAnPr1a1UtX7Vdjr179WYbFksjEFBV4y/mjnSo6kIfkMSPd9TnOW1YY3UfmPuvaZ\ndhD71DhB4a85ycnIOWBxXSzk0HYJdiCMYZuxhGxVamNSq3py380GYQsCRgApjBoJ9B4tezBsYDfu\nbdGP8OL74NtWnN/mFLas6ISUBtGIxY3BSfTos8AZTxtAheD6FvRlf3Q/ss0lYI6DqEBNyCoUdUbb\nLewpb+cxjGwJ37Z0xCBR6wkLzOq4TpECoRp7lec4uQKHt1p6O+GSPDbaKSAiqvNjYnOw8hzUzh1D\nZViQUYghDLq36M6qHaug+0sANK+8lzObpLBh2flIqVof251f4r6P7mdoYCgifSXSJ8cv01di+HTH\nDGEg2i7Biolbos6n/5jYmLanr+C+j1KA1VRtyOTjrVWAr4h077m8fP/1WFE3AX0a4wY5nOFBKsw0\n6jdeGIyMTWD8HCzhnNsiCgtzY7kHj4nn4PMsqOzJjI9nxAxJXIFsRT9sK+gYEekkxgWpqTU1yQAM\nQ4X2sq7ZGKNFZ2bG5zKg9iR5URF8vKJN3D43b4a77pL0vqUIw7wEyzKRUpEbsrJqtug9mrmSI2lI\nVgPthRBtUZP9zcAt/hWEEO2llJucj9cBmzgApJSfCyG+EUL0RCXbc4GnDvvINX4QiKMDJ4ErcpiT\nkVPLE6xKyE6c6O8BYUDJrZC2FFCTdaNAIyb3n0x2ejaZv82kYNYmKG9OVrs23LfOoro6gjQizI6M\nZWzGxLgxFczaxP6F+V6tiOtBFA9VXoAZITMrypJyV+HYeWrf0V0Zku4vqkJF15PwGQOJVMYkozCu\nQRcA0RDSffpvWoEhTC6rmM03zd+lNPS82sbnRbS/7QnaXtGELi1+xWNFj3kXsftL7Oz+Ejsre0LR\nApAOrbr5OqoL72fBNztUdtNfP4NASjU20zC5M+tOsq7LYlTpdOzVw4nlgxwqsovq8q7c86cO6ome\npsrYSEP9/bYFkTAgExLQ6dkUFcEVfaJEwq3A/ClTS64lv2d+3L6zWmRBizJso52q9DcsTBHCiqrx\nsqM7TF9Al17z4u4vf4HsfUtsqi0JQiI6zGbyhPOo2pBZQ5MMJJw3HyvnQfLXryOz2wJ1rybUd0DN\n4ks/880tXEzE4pXfYnR+CbFmBFIKolHl7fjp0QMGnCD0XyllVAhxD/A+6tFrqpRyvRDij6i42zvA\nPUKIfqi01258YS0hRDnQBAgJIQYBV0vF+Lobj/47B51o10gC1xNwQ18Lchcw7nLvCdYPvzAgUkDx\nEOhcQKDNau7MupPczrmecdieHWvBGwrBgHve5+3iZciMD7BaraawvDC2blERTPvvW5HVMp6Flb5C\neQjOE/xiBEZgITJsOrZEOOEv54nZ8QySQSIhfQXmkGuwt/bGaLuYtP0D2LbGl4Df3wT7lXkssVOQ\nog9kXaSO338MFA+DwH42fXwam0N7WZD+KJZMqHpzCyvd0Fjjr1SxphVSnSnz/l3DwMWEMG2LnRvb\nUlU2gotaz6VktUsoMNV+/Ci/gki16yW44UNbXYvPuuOnYVuNvqCgtIDC8kJW/a0vkXDXGEkhvOVS\n/rn7PwgZQmJgYFC1IRMum6iS9Fsvx2i7hOFdR7DuzetZteT02LbfbMyK/XYqeZ1NxZaWVGQs4pJh\npSx+7ucgDexNV1H8+T/ISs10c7IOBGbQQuY8iJ22jLBlxt0TpBXBZYWQprzoZBL3/pqYxF8bgI7/\nwD73I8zSoWCFCIWUtL1LtQZ4910liHkoSsSHgiOaI5FSzkZRdP3Lfu97P+YA22bUsnwNNSnBGhpx\ncPMilrQIW+H4f+YEZGcraYwpU1ChG9uE8hzMjHXxRoQEWfNqC/adRaM+j1MdrUYIg9RTUuPWjUZM\n9f9vSXDqV4CEJ3ggty9meR/sxl8i33vSUR02lOeSrGe92zHS+a55x620u9RgWWURFYsvd57mAyr/\nsjMLnDoaMGDNcGUspXD0yoBtVyDX3ol13d3KcLnGw2c0YsbQZab5amSS1sYAsvISZk6/h3ekRIir\n8cJpUdh3lvKo3AkyUUYmxkST3jZC0ZrlnMd5ufkA7LQXoXo2mO/7GGcL2UwhmDdjiMakOEWVZaFU\nzNarIL2IFDNFhcB4l1XLfxrbdpH4Iy/MzHfUpSXSBoTydESXAo/5Zkl2LruKlyt3o553TcDiws7f\nMWb8NvLXryNsmZifXUbFrFt4oaqMOft/zzufvINEEjSDFOYVQhpUNNvEg2NuJVxt+gyI4/kInJCn\ns6zDzNiDxfV/ngzlOXzy/XIef+IebNubzi3LU1PWle0aGoeIg0nGgycMuL/aQhpqMnKbbPkNSU6O\n0rSybBtpRHgv/Gvuv+ReHi96HEta5M/NJ/OcTLLTs+OK6AJBg6GDO5DVYwpzFu7h7blfIzM+8BmW\nIobfmMnH685g8Sx34hAqge6fqNcMh1nPEntiX3snXHc3n537EZ+V3gzcrEJh/lBXxxmK2uvPzVjO\n/v15AhlQhY/gGQ8/bdg1Gm4ozUZJ6/trZBJlYRyjY0uhjJthqwp7MwIZi5CVPZUgpV9Gxim+dAbl\nXIqoMxaHrGBD5NNe0GoxIm1ZXMtkQO3zmjGc1zibX99yMWVfwOhnt2O1vhizzapYqJKB8FJxf6Kf\n9oLGX7FhzTmM+qAIWd3JmcA9OrbE8Syda/fOG82xXaMnomCG6X3XTEYMyiWz2wIKZm1i2sRbeSFi\nYIt9kLcT0lWGP2yFeWD+A6zesZr9C+9XXquTf0FIMMKIrFcwW32I+f5TRCIGwozCZY9jIwiZIU4N\nncprG99SRaZhvO2deycaPbj+LQ2BNiQaJyQOJhkPPtXXmduZuicPq9XqpAYoOxsGPPQoM+fuhoxC\nIq1WULh1P7a0saUd5/3Ex8RNIJeCl2DONBBRiTDDkNsX0osIGAE+XteUxQV9nEptT5KFjEIMDC7Y\nN4QNs10j4hgAd/IXtuddmNUw4D56n/Nf7G/1PmsCU5WC2HvPKqOAVPkX8LYBtU8ZUOEu1+OQUTX5\n407+hV5yvTQXgUHrphls3x7AKrnFyf0EPO+l8Ve+njDheOYYAqbP97yd/vmwLxUueBc23oj7lE+H\nt1VTsph3FHQMmKrFMYSB5Xp4lT3hlYUxiZzNQ67k3jkFWK/Mw4r8HswHiOb145FljwCKoXXnDZ14\nfmZhTNLGNqIYRtQxJL6i0s4Fykg7hs6WoAyzBS3XErruN+QOnMgLa19gxsczOGX9Q0QjJrYFiGCs\nLsg1mksqliCEUA8U5v9D2AIpIpA1DToXINNXIIXJgMsymDVvL3abDwi0Wc3wLneRFb2bUTe1h0hA\n/ZZO7ZO6DxTxQkpR7/4tDYU2JBonLOpKxtdYPxuys9uQWznxgAaoeYet8N3zsc8tT29J2ZdlSb0f\nf2vc+A59ApMUhp/5GnR9mJff+ZjF037vE4l0JvBr78FsvRpDmGxc09wp+nO9COdpWBoOa8tZbgVh\nXyor2g7i2vOvJbg5SHjfOciYB2JB16lqYizNhc1Xw57zvBOsPg1hADYEgoLzb32WjRVfIjMWYqSv\nouXprdiOgJI8ZDRExbo7MU2hMp3uMdyCypI8FaYzbLjkiZgRMVuvxlo81jNYUWD20+pcDGfith2K\ncK9HPI8sQatMIOiV3otllctUbqc012k2pmjDlN5OuGklRPxe1RVsTn+Yu2bdxZbdW8jtnMuUB7sg\noynq2tuSi65bw4f7Z2E3+jKe6FCe4xhj/29gYHzZlacGPEXZl0XcNUvRiNn/DcHAEiQmNlZNI5u+\nElOYIExE1mukN02nvM2D8aKf2/rBNcSkgCzbpHXT1lQtzURGHW/RloqZ17RCGfrSPOTau0DGtzk+\nktCGROOkgZ/FdSADU5cByu2cy9SSqUSsCEEzyNheYxnba6xi95ySSmF5YWw/LgpmbmN/dTpSqrCV\nq6uUO6gNhdHWRD89M64innbz6XLTO/TMNln3eTdWrwogP+uGmrj8mVhLTbb+fIfjOYStMG9/8jZB\nM8jlvS0WL3LCXYYFCAJGkEvvfpNdn6zh3489jxVx8jJfdYgxvgxhkn9DX+776GKnFXKAL777Aspv\ni/VXkbYkavsnVolhSi5r05vF69zJOQrLx6rvzTC9fvdHVrRbTrgwqkgFAqcOpubEaDg97YEauSWB\nYGnlUmxpY2BwxilnEadesvdc9dd9Yk+Qq5m0bBLz/tYGuW4EMXkZw6K600v8qt9ZPF40HUtaBIwA\nF57VhZKMQrUPV6LfGYUdhTnz9/HRjx6Ju1fO6TWH6390PR/v2sLif17gM2Z9aNS2lJ+c/ghvTB+G\nFQ1QboY9upEjtWPZKcxZCmbubGi1lJAZYs/mjhSuXIdpXkQUG2FGEV1ew05b5txbJoGyO7GjAS0j\nr6FxOJGMxXUw3oof2enZFOYVJjVKyY5RVFnE1D3jkMZskEGCQYNhd5heyKEyh2C7cYQXeXmN4JUT\nGfWTW8mfm8++FbfBe8/E8gZC2HDBO8jzZ3tP+CJAr91TqNhTQUXbPyHTVqopvfISIuVXsv/S7zCG\nXI1dcqt6Ml43HFF2JytEP6xW0xFDNnHOiif48qOLnOMobycSlbz2qkl4zy+RGQuxW69SuYIaiXFi\n24CFPLeERukVYLR1DJxDOkCABWd++ROeGnAro6aZqq5D+hLKhhVPMBAGvdN7xwyGCzdR77KmbGwu\nv34r762wiUScvMWma5WBMqLx9TQ+MkHJ7Du9JmhYkDWNDY2n8u/lJn2Dv+XT4tac16oJ8xcVQ5sP\nYEgf5fm4Ev0Ojfvd6l9i796iBrfmTnjvWT6TJi8uifCj2/6FGTwfK6J+X7PtEn7S8Se8/uwOZDRQ\nk7gQIzSYRCKSG4KT+L7t7zh79w1MuutqR9omCl1fQXR5NWZEAKX1dXsfrk95lLG39tA5Eg2Nw4WD\nYXHVB8m8ltqOUVheiNVqKeT1RZRfybDBF/DcqNy4fRX+diIFXf7BzvUdaN5pI7kDH6agtIB9Wzs7\nIR9XtBHA5Ma+rXjv3OlE7SimYfLMtc+Qec7X9C34CUSrMYQBldlY099HWiGKl0Igrx+RptuRdgCk\nSTRiwZZeyJaLoNUSvuwxGjbMjwuv2RIWv3U+yAfB/H9Yef0ItlmDaL0acX0+9ntPYVsC4aZsbAsw\nkTu6Mu/pH3vNwbb0w6vSl7yzqoT/7OyFHSMAeEaIrGk1vI790f1xNFvDKRpNRPMOW7n/+Xd55LXV\nyK/TlEil01WSphWeEXEl/oXtMOScnJRrxACr4mLmTR8L0RCbMUEMAvP/qbDUwLvVAR2DJNouxk4r\nUka2sie891ws3GhFBBvKUuD2HMXcy1iInbaS18qWQuM7fTkkn7fkrw0yLN6uvh8+LUIs7e7J99sS\nmm6LMyIurLSlvGdcxti0RcCRtyTakGicFDhYFtfhPEZseevVhNqWkjtwQY1ts9OzY6J80EN5MSVT\nofy/4yc6VEX12Ft7MDZtUZxXNHHJRCUFg40hDVpW/YLP7BSkNLGjMLzZdBi8KEEJeRkRl4KbXoTI\nuwpKc5G7OkBFL+fYztO6JTG2XcnTo4ZQ9X0VqdelMpp7sd+djJQmZkBit1iH3OHVdLDvLMh50GGN\noRhJQmKvuYMlxVZC9b6TkG++DpY8EEvsG8JQlfY+2BWXxLO9UAYnq0UWo4t/hrw8qib0krw4IUqA\nC7+7m4/tFIcB5uSi7CgYUrHGXCPmegWuAfSJaRrb+qqwW5uV2MLE3noFSNuXR0lgxAGkr8BovQp7\n9TBk4R+geTGsHOPlkJxK/95terPSXElkyNVQfgV2mw8gTY1JtlGJ+cRziqNRO4jYEfLn5nsMtSMI\nbUg0TgocLIvrcB7jUI4d03nyV6ALSe+rv+HhP6Q64Yp4rygmBROtxsbms9TXkOYQDKEaVGW1a0NV\nVS5PPuH2+DYhbSIFpQVMK5mmvBsziCwbpnSu/OwxJxfzi+tbMKLbCIoqixhfOB5rb7dYst+2LDpc\ntI9PdlnYEd9E51b0l+coqqrjJUhLQMvV0KJYGY/NA+CrH8V1kWx610/55uz34y/OmjvjO006hZ43\nXnAjVd9XYbsiWv7juk/6Sx5gwylLwRjsVen7mWT+ehiXcSbdnjROoea6YdjSRBiSHw+aT8nbvePr\nbBLzKEYk5uX02/035s0arJZvuQZPY81rOoaEoV2GQhdY9/l8Vq004gyrzOunjGjjrxypG0GHrD18\nUvVJDS9t1Y5V9Jneh4V5C4+oMREyeQnlCYXu3bvLNWvWHOthaGjUG/6cjth+KV2r/5th/9VO9VQ/\nAGnAneDnb52vEtDbe9HP+BODu+UcsO+Hu8+KWbfw4qNtHLkNJ7FvhiFrGqLzq/w5byA5GTmefljl\nJU6YyJmU8/opSuvWKxQ9N7FQsbInRsFC1WjMFl7vlUuegGUP+FYUICKIK/8Alz/shYxKcx1D5HhJ\nIgpX/g6j7RJGnPka3wbKeWPVv7DbfICRvtKrsk/sWOmv0k8wIq1Ob8VnH7dW1ORYmM8mPgTnGlh3\n/jRARDD6Poh92Z9rFI22urCSi1tdzI5np7JqcTNve+E0X3c7WnZ/CeF4MkEzyM+aPMZrvxwaG3f7\n/x5F+WlvENnWLeF8HOp0rI4mJ+6cJlw5wVN2OAgIIdZKKbvXtZ72SDQ0jkMk82KKKosYNWtUzHtI\nRhrITs9mfM54llQsUSG2jHWMz02h8NU62ts6OZ+iAEx/ymmsRDVkvQKdp2O0XkWKmUJq1fPkPxph\n32ePQefpsad+Ud4npicmAdKWO+EWDwYGKW1Lmfz3TTwy4VQ2r2ntUX/XDXfWcidoRf2VGQvV51jD\nsMTeLQKxvxnG/37A82ETZGvgMjB/yy8ee5k39tyncisJ1fhi3zmqfiOhHTLpK2h3Zjt2lPdCxrVa\ndivsXWPiJxmAYrgZpDc/hW0Qxy4zMPjq+yDvfvIuyJeAXxEzQD9+DdbfrLyruU/AuR8pIU5U0eKy\nxUFww3CWZPPaNIzedvz5RIUiYyB89UGBuHPyKy4cCWhDoqFxnMKf0I9TEXYmodpIA0lDaTnx7W1r\no4S6RZTjX1nEfPu32GnLMDDo17Yfg0//K/felEk4LIEsJbMyRMm+yATPQyC46ryrGHzhYJVPOSWV\nqu+rnPFkAmXc9XOnuRQm7HOVfp0JtsNM6PVXX76ij5OvML11ADAQK35F1AavW6IJlkHh262xezuh\nngRhS6PtYtJ3D6XcX8cy93E4fQdLT9+F0aIUywx7LYixiEn+C8ujKvsYa7Yt2PZmPuS9W0N7LGJH\nsCt6QJHTWVwAlz4Cjb6BMgNQY2i0fQD7fduWN5sGxm2xMJzM+ACr4mIVIhSWc86GV9vi6rNhxlhg\nRvoqqr5P0tLxMEIbEg2NHwBcRphrRIQjk1EbaSCRVZaoPHsgSmh2NoxPS2FJgdKLCpkhxueMp+Dp\nJoQjbogHFc5y6Kr+3vUGBimBFMbnjK81Lj9iUCb8vYxHJpzKlrVtnSpyC87cApc+gtF9Kt1bdqf0\nixSidhRx3lKii/wTu40bYrJtnEkVb2xAuzPOY5dDfkjMl1hpK+hywfV8/p6gutqZkD9TbZdtwAhG\nGZT/L5obnWhyRpRH//kB1rdnIU77kosHbGBV8Xcw6znietNgeNcEYqEt2Xwd7DsH8XW65+VIRwOt\neTH+Dpf7g9vjL1SyPI/rRcU8I5+XZChRS7eYU2QsJiWQckTIJX5oQ6Kh8QOAnxFmGiZ3dLmjhqBk\nXUjWSrbWddOzmdxpJTPmVDF4QCqwl6l78sCY61SOE9PLCpkhnhrwVBLP48AHGzEok8xziTWbCgRB\n/nQEVqsZ5GSqAAAPk0lEQVSlhMwUJvefDBDzrCb96GklTdP4K9jZFYqHYhDCFtUq57GzK6wbBjJA\nMAgP/7ITpBVSUFrAzr07ec94j4jvaX/O/t/z5BsDmPFcJvP+JR1ygYIdDfD9N6eQ+9sdFMzahFV8\nqxK+NMOs7nIVnGuD6RRT4uQ4pFDXpPFXjkxLSmx/EokwbQIBiEac5P2n/WDrlcQS7kRhw0/g3I8w\nWq/0Euf+IswlD3ghLaLEh9ksRIsSOg9YSyjckpwcaHb+QHIy/nrEWVs62a6h8QNBfSvzD8uxirwJ\nPhSCvEcLeHHXHSqsUpqnGn0NDVKVOuugjEeyc2F7tpJS71hGcUCJRiYzkkWVRfR+pTdRW+UBxPZL\naff1MD5tOlUV4SG48dSH6REZS2rHMqpSZ8WNqaiyiPy5+azesRqJxBQmD/V5iJzAOKc5lW8uNMIY\nd/QlJWMd13xeyMxnHUqziMCVfwAkfPCQt+yCdyFyihLI3Hc2LHgIz1txJ3rJoEGCHf/ZzaolTZ1w\nlEM/dmVuHPLB1X+cxLzwg3Hnb2BgV/bwyA2GhUEQ219lLyBgiho94A8VOtmuoXGC4WC1wxoCv1x+\nOAyUX0GoiVcLMzl3AdnpXSmq3HvQigHJVAZybotXBcjtnFtju+z0bJ659hnumX0PlrRIySjm1/2j\n5M/1QnBjb7ocSK5ikJ2ezeT+k+O+U4ZGne+kSYJ33kEp+gqJ7RSWNu+0kZSUboTDFoGgQLRbTsSK\nIH0Fg7EK+m29oX8+pimxrJoP6TvkOobdH6Rs9RlUh5WXQv98rPU3wqd9Y8nz4rmZcGWSi+cLdYmv\n22CvHY6/xggpiEYlI++2mPPNY4y96fKjcs9oQ6KhoVEDOTnxyfncQW3ITatZC3MoigHJtgHqtZ8R\n3UaQeU5m3Dj8nwHGF46n2qqOU2N2jxvXLdOvi5adTY8eqiGUku83EeVXOgWk7cntYnqdDdOUqOee\nXvN49//OYOvHzdhf0ckrWNyZFRMFVol16dS8RFhz9n2UrV/H5NdXUrUhk4pmb/Lirhfg7GJVG2Kp\n5HnV8uuhfc+4pL1NfKhLbO+FXDfMJyLphbmkJZj5t9OZs//I15CANiQaGhpJkDw5H88icyfjg1UM\nqE0BoL77qUEkcKnLjqfjFmQawiBkhkg9JbWmB+TUwsQty8kmFMLxFCTX92/K2JscDyvdn1/KpuzL\nMn63/FGsf73v1Zo4cvOGMJGudpeIF6C001YQtkyqUmcxblwmRZXtmV4Qojp9FXb72bBxEIqFFVQV\n+NRsGGY4hAI7bRlce7dXnCmsmIQ8GLD2TqqbF1N4RcPkgOqDI2pIhBD9gSdQZ/aSlPLhhO9HAg4f\njr3ACKedLkKIccAw57v7pJTvO8vLgW+d5dH6xO80NDQOHrUl5xNDU5P7Tz6oHEltlf4NVR5wPR0b\nO46yPOOFKqrtrk7r29o9oHGXZzP59TJGP/t/2G0+4P3qdYylppxNUWURo2ePxtr6q3jF5vMW0Dtv\nIbdm3kr+LULV4ohInABlItvOvRYFszbx4pYbsBzPIhgQjLmpC/nrG8cMo0BgGiYDfzSQtze+rQbT\n/aWYtL7Zdil2yW3INXeiquUDMPtpUod9ctDX8mBxxAyJEMIEngGuArYDq4UQ77iGwsHrUsrnnfVv\nAB4D+gshLgRuBjoBLYH5QogfSRlrJt1HSpnQ8FlDQ+NoIDE0VfV91UFXTSfL9zQ0B5To6Qw+/a/k\n35KpJnRjHkbe1YQy1sV5QKpFsogV7FWlzor1/qjRb913/rZt16xN6fMQ/XOuY8TlmWQugIKZFbz4\nn9uwHJ2soBFkWNYwlf/Zns3EV11vL5vCPdm4kSshYOhQh9XWbUGMddb8tOax3NHsTbMVrRkItlnL\nsEFdyO08ibK1pzHypxJpqRCXQVD1qx90yJe1XjiSHkkPYLOU8lMAIcSbwI1AzJBIKb/xrX8qXqXR\njcCbUspqYKsQYrOzv6IjOF4NDY164GgIYB4KEj2dwlczCYfBtgQGjeln/InxuSkxwzC5/+RY4t5t\nkVyfc8vJyCElkEJ1+ioYcjWU5yDbLMRovYrUU9REn50NhdHXYWGRUkJBMCxrGM8NfK4GI27BgiQ5\nKR/XYHrp9DgSgtvGoKBU6Xf5GW7Z6cCzql+7ZUFKivjB9yNpBVT6Pm8HLklcSQgxGvhvIITHU2gF\nccHB7c4yUMZmnhBCAlOklC8kO7gQYgQwAqB169aHfhYaGhpxOBoCmIeKOK8mxz85C8YPUQwtF1Xf\nV9VokTzu8nF1nlvi+Zd9WcY9s1djSTtmkLLTs2sYJdebSGTEFRbCuHHJC0ZrIzMcyHsbMQIyM+tX\nfHq4cMyT7VLKZ4BnhBC3AL/F6xFWGy6TUn4mhDgH+JcQYqOUcnGS/b4AvACqjuRwj1tD42TG0aQi\nHyrqquavzfuoz7n51yksL6xhkNzvkxmlRO/D9RiS5aQO1fs7mOLTw4EjaUg+A3z2nzRnWW14E3iu\nrm2llO7fL4UQb6FCXjUMiYaGRnIczcLGY40DTai1CWMe7LU50GSfNBd0AAOXePzj2fvz44hVtgsh\nAsC/gb4oI7AauEVKud63Tnsp5Sbn/fXAH6SU3YUQnYDXUUaiJbAAaA80Agwp5bdCiFOBfwF/lFLO\nPdBYdGW7hobC4Ww5fKKhIdfmcBjn4/G3OeaV7VLKqBDiHuB9FP13qpRyvRDij8AaKeU7wD1CiH4o\nDdDdOGEtZ72/oxLzUWC0lNISQpwLvCVUX88AivV1QCOioaHh4XC3HD6R0JBrczhCfYf7tzmanucR\nzZFIKWcDsxOW/d73fswBtv0z8OeEZZ8CnQ/zMDU0Thocr4yr4wHH+toczuMfbe/mmCfbNTQ0jh5+\nKDH3Y4FjfW0O5/GPtuep1X81NDQ0TjAcLo/kmOdINDQ0NDSODY62d6UNiYaGhsYJiKNZ62PUvYqG\nhobG4UVRZRETl0ykqPLkUz06Ec9deyQaGhpHFcdjvcTRwol67toj0dDQOKqorbHVyYAT9dy1IdHQ\n0DiqcOslTGGedLUsJ+q5a/qvhobGUcfJpPeViB/SudeX/qsNiYaGhoZGUtTXkOjQloaGhoZGg6AN\niYaGhoZGg6ANiYaGhoZGg6ANiYaGhoZGg6ANiYaGhoZGg6ANiYaGhoZGg3BS0H+FELuAbcd6HA3A\nWcBXx3oQxxH09YiHvh7x0NfDQ0OuxVcAUsr+da14UhiSHzqEEGvqw+U+WaCvRzz09YiHvh4ejta1\n0KEtDQ0NDY0GQRsSDQ0NDY0GQRuSHwZeONYDOM6gr0c89PWIh74eHo7KtdA5Eg0NDQ2NBkF7JBoa\nGhoaDYI2JBoaGhoaDYI2JMcBhBDpQoiFQoiPhRDrhRBjnOVnCiH+JYTY5Pw9w1kuhBBPCiE2CyE+\nFEJ0PbZncPghhDCFEMVCiFnO57ZCiJXOOf9NCBFylqc4nzc732ccy3EfCQghmgkh/iGE2CiE2CCE\nyD7J7437nf+Tj4QQbwghGp1M94cQYqoQ4kshxEe+ZQd9Pwgh8pz1Nwkh8hoyJm1Ijg9EgV9KKS8E\negKjhRAXAg8AC6SU7YEFzmeAAUB75zUCeO7oD/mIYwywwff5L8DjUsrzgd3AMGf5MGC3s/xxZ70T\nDU8Ac6WUHYDOqOtyUt4bQohWwH1AdynljwETuJmT6/54BUgsEjyo+0EIcSbwB+ASoAfwB9f4HBKk\nlPp1nL2At4GrgE+AFs6yFsAnzvspwC9868fWOxFeQJrzz3AlMAsQqCrbgPN9NvC+8/59INt5H3DW\nE8f6HA7jtWgKbE08p5P43mgFVAJnOr/3LOCak+3+ADKAjw71fgB+AUzxLY9b72Bf2iM5zuC43lnA\nSuBcKeXnzlc7gXOd9+4/k4vtzrITBZOBsYDtfE4F9kgpo85n//nGroXz/dfO+icK2gK7gGlOqO8l\nIcSpnKT3hpTyM+CvQAXwOer3XsvJe3+4ONj74bDeJ9qQHEcQQpwGzADypZTf+L+T6rHhhOdqCyEG\nAl9KKdce67EcJwgAXYHnpJRZwHd4YQvg5Lk3AJzwy40oA9sSOJWaYZ6TGsfiftCG5DiBECKIMiKv\nSSn/6Sz+QgjRwvm+BfCls/wzIN23eZqz7ERAL+AGIUQ58CYqvPUE0EwIEXDW8Z9v7Fo43zcFqo7m\ngI8wtgPbpZQrnc//QBmWk/HeAOgHbJVS7pJSRoB/ou6Zk/X+cHGw98NhvU+0ITkOIIQQwMvABinl\nY76v3gFcNkUeKnfiLs91GBk9ga99bu0PGlLKcVLKNCllBiqJ+oGU8lZgIfBTZ7XEa+Feo586658w\nT+dSyp1ApRDiAmdRX+BjTsJ7w0EF0FMIcYrzf+Nej5Py/vDhYO+H94GrhRBnOF7e1c6yQ8OxThrp\nlwS4DOWKfgiUOK9rUbHcBcAmYD5wprO+AJ4BtgBlKAbLMT+PI3BdcoBZzvvzgFXAZuD/gBRneSPn\n82bn+/OO9biPwHXoAqxx7o+ZwBkn870BPAhsBD4C/hdIOZnuD+ANVH4ogvJYhx3K/QDc4VyXzcDQ\nhoxJS6RoaGhoaDQIOrSloaGhodEgaEOioaGhodEgaEOioaGhodEgaEOioaGhodEgaEOioaGhodEg\naEOioXGIEEJYQogS3+uBureq974z/OquGhrHMwJ1r6KhoVEL9kkpuxzrQWhoHGtoj0RD4zBDCFEu\nhJgkhCgTQqwSQpzvLM8QQnzg9IVYIIRo7Sw/VwjxlhCi1Hld6uzKFEK86PTemCeEaOysf59QvWs+\nFEK8eYxOU0MjBm1INDQOHY0TQls3+b77WkqZCTyNUjMGeAqYLqW8CHgNeNJZ/iSwSErZGaWjtd5Z\n3h54RkrZCdgDDHaWPwBkOfsZeaROTkOjvtCV7RoahwghxF4p5WlJlpcDV0opP3XEOHdKKVOFEF+h\nekZEnOWfSynPEkLsAtKklNW+fWQA/5KqURFCiN8AQSnln4QQc4G9KLmUmVLKvUf4VDU0DgjtkWho\nHBnIWt4fDKp97y28nOZ1KP2krsBqn+qthsYxgTYkGhpHBjf5/hY575ejFI0BbgWWOO8XAKMg1qu+\naW07FUIYQLqUciHwG5Qseg2vSEPjaEI/yWhoHDoaCyFKfJ/nSildCvAZQogPUV7FL5xl96I6Hf4a\n1fVwqLN8DPCCEGIYyvMYhVJ3TQYTeNUxNgJ4Ukq557CdkYbGIUDnSDQ0DjOcHEl3KeVXx3osGhpH\nAzq0paGhoaHRIGiPRENDQ0OjQdAeiYaGhoZGg6ANiYaGhoZGg6ANiYaGhoZGg6ANiYaGhoZGg6AN\niYaGhoZGg/D/Aa4VdTffkKQ0AAAAAElFTkSuQmCC\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
@@ -565,9 +581,9 @@
"colab_type": "text"
},
"source": [
- "This graph of _mean absolute error_ tells another story. We can see that training data shows consistently lower error than validation data, which means that the network may have _overfit_, or learned the training data so rigidly that it can't make effective predictions about new data.\n",
+ "This graph of _mean absolute error_ gives us some further clues. We can see that predictions with our training data show consistently lower error than with our validation data, which means that the network has likely _overfit_, or learned the training data so rigidly that it can't make effective predictions about new data.\n",
"\n",
- "In addition, the mean absolute error values are quite high, ~0.305 at best, which means some of the model's predictions are at least 30% off. A 30% error means we are very far from accurately modelling the sine wave function.\n",
+ "In addition, the mean absolute error values are quite high, around ~0.31, which means many of the model's predictions are at least 31% off. A 31% error means we are very far from accurately modelling the sine wave.\n",
"\n",
"To get more insight into what is happening, we can plot our network's predictions for the training data against the expected values:"
]
@@ -577,7 +593,7 @@
"metadata": {
"id": "i13eVIT3B9Mj",
"colab_type": "code",
- "outputId": "afc103e2-0beb-4a26-fe18-c0cccc6d3d2a",
+ "outputId": "fbf1c81e-7d45-4a1c-d8f4-a05291688b9f",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 281
@@ -600,7 +616,7 @@
{
"output_type": "display_data",
"data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAEICAYAAAC3Y/QeAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi40LCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcv7US4rQAAIABJREFUeJztvXmcVNW16P9d1c3kiLQYvaLigANK\nBMXGUkB8GjDRi6hPkwhB41AgmheTFxm8zye5MSDo517yokj3zwnSSJKnVxxeEohDi9oVCEaMEYyi\nYsCIYCMIyNi9fn/sc7qrq6uqq7rmqvX9fM6nhrPrnH1OVa299lprryWqimEYhlFeBPLdAcMwDCP3\nmPA3DMMoQ0z4G4ZhlCEm/A3DMMoQE/6GYRhliAl/wzCMMsSEf4EjIhUiskNEjs1k2wz062IRWZft\n8+QCEakUERWRvt7rh0Xkzhyc9yYRqc/2eQoBEdkgIiMyfMw235uRGib8M4wnfP2tWUR2Rbwem+rx\nVLVJVQ9S1X9ksm0uKTYhp6o3qeqMjtqJyGsicn0OupRzSvnaDEdlvjtQaqjqQf5zTzO+SVVfiNde\nRCpVdX8u+lYuiEiFqjblux+GUciY5p9jROQeEfmNiCwSke3AOBEJisifRGSriHwqIv9HRLp47aNN\nEnXe/t+LyHYRCYvI8am29fZ/U0TeE5FtIvJLEXk9nrYnIgeIyK9E5AsReQc4O2r//xKRD73zvCMi\no733BwAPAMO82c/n3vujRWSViHwpIv8QkbsS3LOLRWSdiPxvEWkUkY9E5DsR++tE5EER+YOI7PTO\n1V1E/kNE1ovIZyIyV0S6R3xmqohsFJFPgOuizlcnItMjXl8Z0de1IjJSRGYBQWCed11zvLb9ReQF\nEdkiIu+KyFURx+ktIs97x/kTcDxxEJE/isjEqPf+5t23gPe9bvK+u7+KSP84x7lJRNZ438sHInJT\n1P6krk1EThIRjfpsy+xARPqJyMvedX/u/VYOjXd9Ecc4X0Q+EZFAxHtXi8hfvOdx/xsxjtVmtiJR\nM84OvpvLIu7TBhH5UUd9L3pU1bYsbcA64OKo9+4B9gL/iht8ewDnAENwM7ETgPeA27z2lYACfb3X\ndcDnwGCgC/AboK4TbY8AtgOXe/t+DOwDro9zLfcD9cBhwHHAamBdxP5rgKO8a7oW2AF8zdt3E1Af\ndbz/BpzutT/T6+dlcc59MbAfuA/o5n32K+CkiOv8AiewAl6bXwJPe/09BPgd8DOv/WXAp0B/4EDg\ntzHu23Tv+XnAVuAi79jHAKd4+16LvF/AQcAnwHjvuzgbaIxo/ySwCDgA+LrXh/o413wD8ErE6zO9\nY3UFLgVWAId6feoPHBnnOP+K+02Jd992AV/vxLWdBGjUsVvaACd7x+nq/bZeB+6PaLsBGBGjf4L7\nn1wY8d7TwE+856n8N6L73PK7S+K72Qyc5z3vBZyVb/mR7c00//zwmqo+p6rNqrpLVf+sqstVdb+q\nfgjUAhck+PyTqrpSVfcBC4GBnWh7GbBKVZ/x9v0nTgDH4xrgHlX9QlU/xmnzLajqb1X1U++ansD9\noQfHO5iqvqSq73jt3wJ+3cE1NwN3q+oeVX0J+ANwdcT+p1U1rKrNuEHsZuB2r79fAjMBf7ZwDfCI\nqq5W1Z3A9ATnvRH4/1T1Ra+v61X173HaXg68p6oLvO/yDWAx8N89bXUMcJeqfqWqfwV+leC8TwHn\niEgf7/W1wFOqute7vkOAUwG869gY6yDe7+xDdbwEvAgM68S1JURV3/OOs1dVN+F+T4m+T/9zivvu\nvwsgIj2BUd57dOK/EY+43423fx/QX0QOVtUtqvqXTpyjqDDhnx/WR74QkVNF5P95ZogvgX8HDk/w\n+cg/+lc4rSbVtv8S2Q/vT7ghwXGOiur3x5E7ReR6EXnLm55vxQmmuNfgTefrRWSziGzDaWmJrrlR\nVb+KOv+/RLyO7NuROO0/sj/P4zRSiLr26GuJ4hjggwT7IzkOON8/p3feb+Pu3deAimTPq6rbcAPc\nt0VEcAPXQm/fUmAe8BDwmYjME5GDYx3HM2cs90wdW4GRtN7nVK4tISJypIj81jPhfAk8TuLvM5In\ngKu8AfIqYLmqbvCOm+p/Ix6JvhuAK4DRwD+83+WQTpyjqDDhnx+iU6nWAH/DmTEOAf43bjqcTT4F\nfK0ST8AcnaD9Rpyw8GkJJxWRE3CC6BagSlV7Au/Seg2xUsf+GqfdHqOqhwIPk/iaq0SkR9T5/xnx\nOvIcn+FMa6eoak9vO9Q7D7hrj3ktMVgPnBhnX/R1rQdejDhnT3XRV7d5fWpO4bzgTETfBYbi/qvL\nWk6sOkdVzwLOwJl9fhz9Ye9+PYmb9XzN+16W0nqfU7m2nd4xD4h478iI57OAPcAA7zd8PUn+hr1Z\n0Eacxn8tbjDwSeW/sRNnUovVv0TfDd7sYjROQXgeb+ZRypjwLwwOBrYBO0XkNGBCDs75PHCWiPyr\niFQCPwR6J2j/W+BOEekpbh3BbRH7DsIJi824ceRmPJOEx2dAnyhH3cHAFlXdLSLn0mqSiUcAmC4i\nXcXFi38TJ9jaoS7S52FgjudkFRHpIyIjI67lBk+rPBC4O8F5HwFuEpELPUdrHxE5JeK6Toho+yxw\nuohcKyJdvK1aRE7xTGuLgZ+KSA8ROQP4XgfX/BzQDyfwfu3NzvCOWe19bztxA11zjM93w9ngNwNN\nInIZzi7fmWvb6G3jxK0nCeG0aZ+Dvb5sE5FjgJ90cG3RPAH8COe3ifxeU/lvrMLNIHqIyMk4v4lP\n3O/Ga3+tiBzifU/biX0/SwoT/oXB/8RFnGzHaTq/yfYJVfUz3LT3P3COrxOBN3HaWyzuxmnM64Df\nAwsijvVXnIN1hdfmFGB5xGf/CLyPM1H4ZqhbgJniIp7uxAnkRGzACZdPgfm4ENr3E7T/nzizygqc\n8FiKE6So6nPAg8ArOAfiH+MdRFUbcP6D/+Md52Vatfc5wHc9M8J/eKaaUcA4r58bcVp3t4hrPgwn\nWB8BHkt0waq6GzdgXExbbbin9/mtuO/jU9z3GP35rTiB+jSwBWfffr6T16Ze2ztxvqGTaPsd3w1U\ne8d5FjerS4UncA7pP6rqFxHvp/LfuB+nhGwCHsU57v1r7ei7uQ742DMt3ei1K2nEUyaMMkdEKnBm\nlP+uqq/muz+RiMjFwMOq2jfffTGMUsE0/zJGRC7xzDjdgLtwEQ8r8twtwzBygAn/8mYo8CHOJjwK\nuEJV45l9DMMoIczsYxiGUYaY5m8YhlGGFGxit8MPP1z79u2b724YhmEUFW+88cbnqpoobBsoYOHf\nt29fVq5cme9uGIZhFBUikmjFegtm9jEMwyhDTPgbhmGUISb8DcMwypCCtfkbhlGa7Nu3jw0bNrB7\n9+58d6Wo6d69O3369KFLl5i1bTrEhL9hGDllw4YNHHzwwfTt2xeXTNZIFVWlsbGRDRs2cPzxcQvC\nJcTMPoZh5JTdu3dTVVVlgj8NRISqqqq0Zk8m/EuUcBhmznSPhlFomOBPn3TvoZl9SpBwGC66CPbu\nha5d4cUXIRjMd68MwygkTPMvQerrneBvanKP9fX57pFhFB6LFy9GRHj33XcTtnv88cf55z//mbBN\nIurr67nssss6/flsYcK/BBkxwmn8FRXuccQI9360KchMQ0Y5s2jRIoYOHcqiRYsStktX+BcqJvxL\nkGDQmXp+9rNWk49vCrrrLvdYW9v2tQ0ARiGTaUVlx44dvPbaazzyyCP8+tet5XpnzZrFgAEDOPPM\nM5k6dSpPPvkkK1euZOzYsQwcOJBdu3bRt29fPv/8cwBWrlzJCE+7WrFiBcFgkEGDBnHeeefx97//\nPTOdzRJm8y9RgsG2dv5oU9BTT7V9vWCBa1NVBY2NbrZgfgKjEMiGD+uZZ57hkksu4eSTT6aqqoo3\n3niDTZs28cwzz7B8+XIOOOAAtmzZQq9evXjggQe4//77GTx4cMJjnnrqqbz66qtUVlbywgsvcOed\nd/LUU6lWs8wdJvzLhKoqCASguRlEYOBAePVV94eqrIRHH4X9+93+QAC6dTNHsVEYxPJhpfu7XLRo\nET/84Q8B+M53vsOiRYtQVb7//e9zwAEHANCrV6+Ujrlt2zauu+463n//fUSEffv2pdfJLGPCv0gJ\nh92fIBkNPRyG2293wl3V/Yl++UuYM8dp+StWwDPPuH3gBoBM/ckMI118H5av+fs+rM6yZcsWXnrp\nJd5++21EhKamJkSEq6++OqnPV1ZW0tzcDNAmzv6uu+7iwgsv5Omnn2bdunUt5qBCxWz+RUi0/b4j\nO6ivOfnCXdW99s07v/996z5wmn+iP5k5io1cEsuHlQ5PPvkk3/ve9/j4449Zt24d69ev5/jjj+fQ\nQw/lscce46uvvgLcIAFw8MEHs3379pbP9+3blzfeeAOgjVln27ZtHH300YBzEhc6JvyLkFRDOX3N\nyV8TEinc6+vdjADc/jFj4J574v/JUh14DCMTBIMwbVpmZqKLFi3iiiuuaPPeVVddxaeffsro0aMZ\nPHgwAwcO5P777wfg+uuvZ+LEiS0O37vvvpsf/vCHDB48mIqKipZjTJ48mWnTpjFo0CD2+3+qQkZV\nC3I7++yz1YhNQ4Nqjx6qFRXusaGh48/U1Kh26aIqolpZ6V77x+rWzb3frVvHx5oxw50X3OOMGfH7\nOGNGcn0zyovVq1fnuwslQ6x7CazUJGRsRmz+IvIocBmwSVXPiLFfgF8A3wK+Aq5X1b9k4tzliD8N\nTtbmD87E09zszDuq7rVPpDmoIxLZX30/RFWV8zHYCmPDKFwy5fB9HHgAWBBn/zeBft42BHjIezQ6\nSXQoZ0eMGOEWfTU3u0dfaNfXO/ORqjP/TJ/utnjHjjfwRIbjBQLumOY4NozCJSPCX1WXiUjfBE0u\nBxZ4U5I/iUhPETlKVT/NxPmN5PBt/pH5oHxNfs8eJ6xfeMGFgCbS1mMNPJF+CD9cNHqFsU8qkUqG\nYWSHXDl8jwbWR7ze4L3XBhEJichKEVm5efPmHHWtPPAdu76G7zuJfU3+4otb1wF0Jh+QP7MAd45A\nAG6+uf0gYg5jwygMCiraR1VrVXWwqg7u3bt3vrtTUsTL9wNOOE+f7hZ2xdPWOyIYhBtuaJ1VNDfD\nsce2F/zTp7tZhiWdM4z8kqtFXp8Ax0S87uO9Z2SRaPNKIidxZ5zI0YwfD/Pnx3cGX3RRq3mpo7UE\nhmFkl1xp/s8C48VxLrDN7P3ZJZZ5paNY6XRjqRMtxvF9An56icGDY5uEbPGYkQsqKioYOHAgZ5xx\nBldffXXLwq7OEJmy+dlnn+Xee++N23br1q3MnTs35XNMnz69Zd1BpshUqOciYARwuIhsAO4GugCo\n6jzgd7gwz7W4UM/vZ+K8pUyk1g6pa+TZyIeSDPGikHyfgB9Z9NZbbfdbARojl/To0YNVq1YBMHbs\nWObNm8ePf/zjlv1+LHwgkJp+PHr0aEaPHh13vy/8J02a1LmOZ5CMaP6q+l1VPUpVu6hqH1V9RFXn\neYIfb+3Brap6oqoOUNWVmThvqRKptV94oROcd90FB5/Xn+ZAAI44okP1OJGNPx9E+wQinc6QeNWy\nzQiMbP4Ihg0bxtq1a1m3bh2nnHIK48eP54wzzmD9+vUsXbqUYDDIWWedxdVXX82OHTsA+MMf/sCp\np57KWWedxX/913+1HOvxxx/ntttuA+Czzz7jiiuu4Mwzz+TMM8+koaGBqVOn8sEHHzBw4EDuuOMO\nAO677z7OOeccvv71r3P33Xe3HOvnP/85J598MkOHDs1OeuhkVoLlYyvXFb4NDaojR6oGAm45lojb\n3uI0bQaNWKeletxxrUt14xwr36tsI/sQvTK5pib+Pr/PnVnNbBQ2Ka/wzcKP4MADD1RV1X379uno\n0aN17ty5+tFHH6mIaDgcVlXVzZs367Bhw3THjh2qqnrvvffqT3/6U921a5f26dNH33vvPW1ubtar\nr75aL730UlVVfeyxx/TWW29VVdVrrrlG//M//1NVVffv369bt27Vjz76SE8//fSWfixZskRvvvlm\nbW5u1qamJr300kv1lVde0ZUrV+oZZ5yhO3fu1G3btumJJ56o9913X7vryPsKXyMzxHKKdukC1U1h\nTt+/BoA2JZs//hgmTHDPQ6F2x/NNML7SlOu4+limHN+pHGsVcCyHc77MV0YBkYUfwa5duxg4cCDg\nNP8bb7yRf/7znxx33HGce+65APzpT39i9erVnH/++QDs3buXYDDIu+++y/HHH0+/fv0AGDduHLW1\nte3O8dJLL7FggVv3WlFRwaGHHsoXX3zRps3SpUtZunQpgwYNAlyRmffff5/t27dzxRVXtKSXTmRK\n6iwm/AuISKdoIOBi7++/Kkz/20Ykts9Nm+YeYwwA+bSlx/rP+g7lmTPj74sk0+l8jSIkCz+CSJt/\nJAceeGDLc1XlG9/4Rrsyj7E+11lUlWnTpjHBV+I85syZk7FzxKOg4vzLnUg7fbduLiZ+wJsLqNi3\nFyFK649kyxY3Axg0qMUm6mv7Cxbkr5h7Ir9Dsj6JTKfzNYqQPP0Izj33XF5//XXWrl0LwM6dO3nv\nvfc49dRTWbduHR988AFA3BrAF110EQ899BAATU1NbNu2rV166FGjRvHoo4+2+BI++eQTNm3axPDh\nw1m8eDG7du1i+/btPPfccxm/PtP880zCWPwHx8HChW0/EAiw48Svs2/zVg7evYnK3REhaqtWwdCh\nfPCTh7jol6GWKl3+yttca86J8gDV17cWk+nIHJVqHiOjBMnDj6B37948/vjjfPe732XPnj0A3HPP\nPZx88snU1tZy6aWXcsABBzBs2LA2At3nF7/4BaFQiEceeYSKigoeeughgsEg559/PmeccQbf/OY3\nue+++1izZg1B79oOOugg6urqOOuss/j2t7/NmWeeyRFHHME555yT+QtMxjGQj60cHL4J/VjV1a2O\nXX+rrta/1jS0fOaWypq2DmBvawLdyBE6g8laUaE6cWL+Hb8+5sA1LKVz5kjH4WtmnzwSN7xx1ChX\nWzESEZgzh+cbgy2fqdUQbw8c2+64AhzBJqYym/k6jvHjM1cII11SLURjGEZ2MOGfR2LavWtrYenS\n9o2vvRaCwXaf2Tm3DmpqoH//lqaR/oFrmxcSHHUIjBuX1WuJFYYd671CW39gGOWKaDIVPPLA4MGD\ndeXK0l8LFg47p+zGjXDkkXD/H07nwHWr2zaqrobly9t8JuaK39pamDgxflWWfv1c8p0MTwFiRRRB\n/CgjS+lc3qxZs4ZTTz0VkbghDEYSqCrvvvsup512Wpv3ReQNVR3c0edN8y8AHnsMFi+G/fNqaV63\njjaiu1evNoIfEuTgCYXg9ddh4MCWpbT+sRTg/fdh6FA3SGSQWKacROYdv/9gK3fLke7du9PY2Eih\nKp7FgKrS2NhI9+7dO30Mi/bJM76QnMEUpjK7fYOZM1M7YDAIb74J4TA7xt/CgWvfQokIE21udmGh\nH3wAs2al13mP6DDsqirXhURRRonWH9jMoLTp06cPGzZswGp2pEf37t3p06dP5w+QjFc4H1s5RPuo\nuhQHIanRJqQlcqcZVHv1Spi6IRlmzFD9HSPbp4Xwt4MOUp08OSPX4adxqKlpjebp1s1FGsWK6Jkx\nozWFRSDQWgjeooEMIz2waJ/8kkweqtpa+NWkMA/qLQiKQKuWPnNmzBW7qTBiBFzVYwkTpYYmArSb\nZO/YAbNnw5QpaZ0HWk05jY2t5p79+9sXdPGpqnKTEHCPVVXuuUUDGUZuMOGfBToqVRgOwy23wKRJ\n8KOm2VTQ3GKWEYDhw9MW/NC6yKrvz0OsqXkNGT48dsMHH4QhQzLiC0g2mqex0aWwAPfY2Bj781VV\n5hcwjKyQzPQgH1sxm31mzHBmC3CPvklD1ZlFKivdvnNp0L1UtDX3BALZtXWMHdve/BO5HX102udP\nJptoIvNOLBOSmYAMIzkws09uiTTzxNN+w2G47TZnDjmXMHczHfG0fgUkEICHHsqul7POWxfQty9E\nJLFq4ZNP4Lzz0loXkExFsETpWmKZkMwEZBiZxaJ9MkBHqYt9oVVf3yr4X+ZCurIXQVEEqaxw5pcM\nmHs6JBRyWzgMF1wA+/a1b7NwIWzeDEuWZK0bHaVrsYyehpE9TPPPAPHSjY8Y4XLW+7b/rVudbeVB\nJtGNPQQ8J69UnwPLlhEeEMqtfTsYhFdegaOPjr1/6VLo0SPrq4PjYRk9DSN7mOafAeJpqNGDwqpV\nMJ9xDCIqH/hZZxEmmJ+8+8EgbNjgBPz//b+uA5Hs3u1mAStWZGV1cDLdM6FvGJnHNP8MEE9DHTHC\npVQWcY+160cxDpei2bfzAzB+fP5DHOvqXAmxkSNj73//fecLyEBYaDRWo9cwco9p/p0g1grUeBqq\nv4J98Z5RHLvGJWxrE88/dqxL2EaB2LeXLHECfnaM1cbg3v/Tn+DeezOiksdb6WurfA0ju5jwT5FU\nyiLW1ztNfoiGGUVbwa/A5pFjOaKuDohf+CQvzJoFY8bAddc5jT+aZcvg/PPhjjvSThERb8aTr9KT\nhlEumNknRVIxz/i+gOvEFXGONPUsYSSPjKhr0z6ZEMmcEQzCe+/B5Mmx96u6WcAxx6Rlr4kVFpt3\nE5hhlAEm/FMklXz0wSCsGzaOkM4DWgX/HxjJVT2WFEfo4qxZresCYrFhQ1rrAmL5Syznv2FkH8vn\n3wmStkcPGdKuItenYybyePVDLZ+NdayCtXd3VC9g4ECYOzdjvoCCvAeGUeAkm8/fhH+2GDWqfUUu\nEZdv35NmqRZBKQjCYbjmGqfxx2PsWBc9lIOuLHAWNcaPL7D7ZBh5woq55JMpUxKWYvRJtQhKQRAM\nwvr1TsB37Rq7zcKFMGhQVmM3/cXJ8+a57cILLVTUMFLBhH+mCYfhvvvav19d3U4bjmXbLhp7t78u\nYGz7AvKAW9GWYYkcuR5gwYK2WSkKcqA0jALGQj0zzYIF7W3iI0fGzJETL7yzYEI+k6GuzqWHiLUu\nYM8eZ4+54460cxZFm8hGjWq7PxAo4IHSMAoQE/7ZZvjwhMnRYi0OK7qUBrNmwYknwvTp8Omnbfet\nXevKRv7+9y5stJMXFm0OO/LI1kVxFRUZ8zMbRtlgDt9M4YenVFXBD37gbBJdurRmeSsXfJvM88+3\ndwp36eJ8ATfemPJMIJ5zvGhmSIaRIyzaJ5fU1rpE/U1N0K0bzJnjktGXs1SqrXUafzyqq2H58pQO\naeGfhtExOY32EZFLROTvIrJWRKbG2H+9iGwWkVXedlMmzlsQ+LHv+/a5YrR79kBjI+ER05hZHyzf\nCJRQyC0Oq652Gn80K1bAYYel5BAuqBXQhlHkpC38RaQCeBD4JtAf+K6I9I/R9DeqOtDbHk73vAWB\nr91GzZ7erhqRsIZv2RAKOe3+lVdcrqBotm51q4OvuKKMb5Jh5IdMaP7VwFpV/VBV9wK/Bi7PwHEL\nmrdrwzRPmECk2Fdgc9UpzH0zWNix+rkmGISnn3azgFgsXgxDh2akgLxhGMmRCeF/NLA+4vUG771o\nrhKRv4rIkyJyTAbOmzfCYdgyYaqrwuW95w8C/+vz23n0UZe/v+Bj9XPN8uVw3HGx9zU3u1nUySfb\nLMAwckCuFnk9B/RV1a8DfwTmx2okIiERWSkiKzdv3pyjrqXOq7PDnBlRjcsX/LOZTK2GaGqC73/f\nyg/GZN26+AVjwKWQHjo0KwOAFY0xjAhUNa0NCAJLIl5PA6YlaF8BbOvouGeffbYWJA0NuqeihzYh\n2gwt22cjx2qPHqoVFao9eqg2NOS7owVOQ4PqmDGqzmPSfquuzuhNbGhQ+36MsgBYqUnI7kxo/n8G\n+onI8SLSFfgO8GxkAxE5KuLlaGBNBs6bH2bPpkvTLgIozQiN9OKTsZM5YkmdFRtPBd8PUFMTe/+K\nFTBsWErO4ESafcHnTDKMXJPMCNHRBnwLeA/4APg3771/B0Z7z2cC7wBvAS8Dp3Z0zELR/BsaVGfM\n8DTFkSNVI7T9PXTRYZUNpkWmS0ODar9+8WcBIqqTJ3d4iESavWn+RrlAkpp/RoR/NrZcC/82Qj7i\nPV9ghKnW5giB1AwaplorKtznjAzQ0KA6caK74bEGgeHD40rtGTNaPxbvO4n1HRtGqZGs8LfcPsSv\ny+ubCp5tGsUQWouy+A7ex+RGi+bJJH5So0GDYheN8WsHX355uzxBfjZU/zv0v5PIVcGGYbRiwp/Y\n9mC/nOBMpvDNiOLr/uOW6pH0HRPixRFm3884ft6fWAOAqlsXsHhxm2ypsTKkRg7qFRXuo/v3l2fK\nJcOIxoQ/8bXGIGGGNLnc/BL5gepqei1fwrQc97OsCIVgwACYOtVp/LFYuhSOOAKeeQaCwXbZUCMH\n9aam1vf37nW550z4G+WMFXMhdhFxAKZOJYC2FfwjR6ackMzoJMGgSw1RU+MS9sdi82Y47zw+mFLb\nLtInsjBORUVOemwYRUPZC38/PBCcsKiv9wTIuHGwbFmLfV+hw9z8RpYIheC112LnB8J9N8fPnkDg\nziltcilFDupz57qEqyLucfz43HXfMAqRsk7pHG0TFnE24ZDU8uB+l45YcMKlGWF1zesMCJmtIK/E\nSRXt/4r3UsnfB36br7/ZvoC8pYQ2yoFkUzqXlc0/+s8faRNubnZtVOFyngJaBT/A/dxBc2OQHSZA\n8ovvC7jmmpZiMUrrd9WV/QxYtRBGbW43Syu6CmmGkUXKxuzja/mRaZYjbcJdusD5gTAvcwHneGGd\nvuD/FWP5aY9ZVFW1P4aRB4JBWL/ehXv27Nki+CViY+lSOPRQZ76LgeX5McqdshH+8cI5fZvwb24P\n83LTeVzAMg5jKwAf0ZdbK2oIT3SpGxobLUVAQTFrFnzxBYwc2dYp7/Pll7BwIfR35SV8gV9b2zqI\nX3gh3HKLDQJG+VE2Zp+44ZyeKWD7gZdTQdsUzT17VfK950NtTAWxjmHkmSVLYMoUeOAB+Oqr9vvX\nrGHvoYfzq69mUKshRJyZr7nZDeQ1NTB/vuVkMsqLsnL4xnX4jRqFLm1dyOXfEZk82WmXyRzDKAzC\nYZg0CVatavO2/52GqWZYYDnp9iXJAAAdxUlEQVQVFc657//8Kyrg5pvh2GPtuzWKGyvgnixR0SMt\ngr8TBcaNAqJ/f1jTNnms/90ulZF8PG8Jb74Jjz7qtP/IaK/IFB+GUWzktIB70RIOO4Ovhy8cvjru\nNBP8xc7q1e2KxvgmvZG6lNBPDuGh7eOor3c+nxtucILf/DlGuVCWwt93/H06e0FrjKfHe/Sj96bV\n5gAsBZYscQb9Qw5peaslGmj7dli4kOBlVUyrqmX8+NbIL/PnGOVA2Ql/P+QzcOcUeix+os0K3iYC\nXM980/xKiVAItm2LXzpyyxaYMIHg1AtYPidsxXiMsqHshH99Pdy9awqTmc2hfNm6I1DB/6h8iD9X\nBE3zK0WWLIGGBjjzzNj7ly1jwK3DmTYiHFPw27oAo9Qom1BPnxEj4AQeByIie3r1Qp5/nu8R5Jh6\ni/YoWYJBFwU0apRbBBbN/v1w4418esoF/PHI8fQbH2yXGtqcwUapUHaa/zEPTuEINgGtDl5uuqkl\nJfC0afbHLnl8X4C0XRqmgK5Zw5GL5zFu3nm8NmxKS2ivLe4zSo3yEv61tfzLwrb5+bf2PK5dLL9R\nBoRC8PrrLlNoINAa4hux/aRpNgdOGtcmDYiZBI1SoSyEfzgMv7uiFr3lFsTLz+//2beE7sxn14x8\nEgzC00/Da6/xZvVEp/l7u3zlYMCqhQRvOp23f1BrzmCjpCh54R8Ow4dDx3HJ4gnQ3NwmRfO9TObF\nE0P57qKRb4JB9sx5iEWBsQAtg0BLWOjq1Zw4ewLT6keZ4DdKhpIX/jp1Ctc2L2z5IzvBH2Ai87iT\nWTz1VJ47aBQEwSCc8Fod4eGTaepxUOxEcUuXwvHHu1XhhlHklLzwP+cv7o/qC34FJvIQD+M0/quu\nylvXjAIjGITzXplF5VfbnUO4d+/2jdatc+lAjjrKBgGjqClt4V9bS5cdW9u8ta/XkXw+JkR1tft/\nh8zqY8QiFHKF4ePVDt640Q0CceoFGEahU7rCv7a2JW+PP4UXoNvMn/L00y51jwl+IyHBoKsdPHw4\n9OwZu83ChXDBBbb6yyg6SlP4jxvntLLIvD0irvKTSXwjFYJBeOUVVzRm7NjYbZYtg/POczUFDKNI\nKD3hP2WK08Yi0ECA310+j/AYi+c30qCuDqqr4++fPRsOOshMQUZRUHrC/4kn2rxU4NbAQ4x+LmR1\nd430Wb7czQC6dYu9f+dOp3yMGpXbfhlGipSe8D/hhDYvNx45kFoN2dJ8IyEpJW6rq4Pdu+ObgcCF\nhZqmYRQwpSf8773XrcMHqKjg85/OtaX5RkL8xG133UVqs8O6OudH6t0bunRpv/9b33IVxSwk1EiS\nXGaPLb2snsEgvPpqS6HdAcEgLw6wurtGfGIlbkv6dzJrltvCYef0jWTrVrdNmOCcwnV1Ge65UUrk\nOnts6Ql/cHcs4q5FvTSMNviJ2/w/Xadmh8GgWzgyaZIbRaLxgxBsADDikJYS0glKz+xjGCkSDDot\nK1bituhpeG2t8+X6lpw2+0MhN+scMyb2iRYuNDOQEZecZ49V1bQ34BLg78BaYGqM/d2A33j7lwN9\nOzrm2WefrYaRT2pqVCsrVQMB1R49VCdPVoXWbfJk935FhXtsaIj68JFHtv1A5DZ2bN6uyygsGhpU\nZ8xwj5HPOwuwUpOR28k0SngAqAA+AE4AugJvAf2j2kwC5nnPvwP8pqPjmvA3ckn0n66hQbVLl1ZZ\nHQionnRSW/l90klO8IN7nDGj/XE/GzlWm0GbYw0Aw4erTpyY3j/dKGoaGhIoEJ0kWeGfCbNPNbBW\nVT9U1b3Ar4HLo9pcDsz3nj8JXCQiMRMnGkauiRXtU1/f1nQfCMCVV7b93JVXJp6mh8PQ99U6JkoN\nq6V/a+U4n2XLYN48GDbMTEFlSrSdf8GC4or2ORpYH/F6AzAkXhtV3S8i24Aq4PPIRiISApdu89hj\nj81A1wyjY2I52kaMcOu49uxxwv2BB5xJ/8QT4amnXDbYUMiZ9+NFkvnHrdUQj1SE+MuAcXx91cLo\n07sTe3moLP1IeTFiBFRWukw0gQA89pgrJZ2LaJ+Ccviqaq2qDlbVwb1jpdM1jCwQy9HmO4Hvucel\n9vFlcijkSgD7rxPVfY4+7s65dS4iqLra/eMjaW52IaGHHmrpIcoM9aaEzc2wb1/uakVnQvh/AhwT\n8bqP917MNiJSCRwKNGbg3IaRNvGifRIJ9k4fNxRyKSKWLXPThmjr55dfuqigQw4xU1AZ4JsXfUdQ\nRUXuon1EtZ0lMrUDOGH+HnARTsj/GbhWVd+JaHMrMEBVJ4rId4ArVfWaRMcdPHiwrly5Mq2+GUbB\nU1sLt97q5vqxOO00WL06t30yckb0wq45c6CxMb0FqSLyhqoO7qhd2jZ/z4Z/G7AEF/nzqKq+IyL/\njvM6Pws8AvxKRNYCW3ARP4ZhhEIwYIBbHLZqVfv9a9bA4YfDjBnmDyhB/NlhPjIQpK35ZwvT/I2y\nY8gQWLEi/v7Jk10qCaNk8SPNcqH5F5TD1zDKmuXLnUP4gANi758926qGlTCdTjDYSUz4G0YniE77\nkLFsjKGQqwkwcmTs+sHLlrmykjYAlBThMEyf7kKLcxXtU5qJ3Qwji8Ry0t1+e4azMS5Z4k40bFj7\nRHH797sOXHmlJYorAfzf0549rfH+uYj2Mc3fMFJkwQJXy8XX0J56qv0isYzgpycfPrz9vl27XEho\nt24WElrk+IsBfcF/8cXZX+AFJvwNIyXCYXj00daFOZWVbrVv1rIx+gXk/cVh0UVj9u51i8OOOsoG\ngSIlcjFgt27O/JOLqB8T/oaRApE5f0Tg+993Zvp4KaEzhr847Jo4y2M2bnSDwJQpWTi5kU0SpRTP\nJhbqaRgpkOtqSzE56ign7ONxxBFw/fUWFlqmWKinYWSBfGlpbfj0UxcN5NeqjmbTJhcWevLJFhVk\nxMWEv2GkSLo5fzLCkiUu6qemBo47Lnab9993dYXNFGTEwIS/YRQzoRCsW+cGgXjMnm0DQIGQsfUg\nGcDi/A2jFPDz/kyYEHv/7NnOW718ec66ZLSlIPxFEZjmbxilQigEDQ2x1wWAyxvUr19hqJ1lSH19\n6wrePXvc63zOBEz4G0YWyfmf218XMHly7P1r17pVw1dcYYNAjqmqcgu5wD1u3ZrbXD7RmPA3jCyR\n60RdbZg1y/kBDjmk/b6mJli82DmDR43KYafKm8bG1nRNgYDL4J2VleFJYsLfMLJErNrAOSUUgm3b\nXFho167tq4YBLF0KgwbZLCAH+HWh/ZW8WV0ZngS2yMswskQsBx/kp3AH4NI/TJrUPlEcuIHh2mst\nUVyWic7Xn4n8/dEku8jLhL9hZJHIPze0DgaVlS41xPjxOR4EwmG47jq3BiAW1dUWEdRJsiHIO4MJ\nf8MoMGbOdPb/yNxA3bvnKeRv3Dh47jlXMD6agQNh7tw8r2IrLgopjNPSOxhGgeFnb/RN76p58gWA\nM+9s2wZjx7bft2oVnH8+nH66ZQpNgnwUYskEJvwNI0f4eYEmTMivo68NdXXO1BONKqxe7TprEUFx\n8TX+F17IbSGWTGDC3zByzLHHwi9/CTff7MzveWf58tgzAJ+lS+GYYywiKAapFGIppNQOYDZ/w8gZ\nkXbhigqnXO/f7zTFl18uABN7OOzSQCxeHL9Nv34wf34BdLYwSNbWn0ufgNn8DSPPRGt6kXH/+/a5\nTdXZihcsyGtXHcEgPP104iRxfqbQQlFf80yyKb7zvuYjBpbYzTCyQCxNz3f47t3rhH6BTrrd4rAB\nA1zVsA0bYreZOhUuuST/cY0FQDDY8S2I/O4LxSdgmr9hZIFYmp6vJd58c9s6LJWVLt6/oAgGYf16\nlyPogAPa71+2DO6800UFWURQhxREEaAoTPgbRhaILModqekFg87h6yf4EoGbbioMYRCTWbNg587Y\nEUHgpi9WOzgpCqIIUAQm/A0jCyTS9CIHhu7dC1Drj8Xy5c4XMHKk63Q0s2e7tJUlPAsotGiddLFo\nH8PIA6mkAiiUtAEtjBsHCxfG3z92bMnlCCqkFbwdkWy0jzl8DaNAiCXkC1Lo+IL9qadg9+72+/2B\noYQGgHg+nGLGzD6GkWNi5fmPl/s/Uujs3l0gIaHgBPuuXc4MFIuFC+Hgg+GCC0rCThLPh1PMmPA3\njBwTS4uMFwc+YkRrZJAqPPZYgcnSJUtcRNBBB7Xft2OHiwoqgXUBhRitky4m/A0jx8TSIhNFB91w\nQ2syuP37C2OBUBtmzYLt2xOniLjwwqJ3BhdatE66mPA3jBwTS4tMpFmOH+8CbAre5FBXF7928J49\nLiS0f/+iHwSgNCJ/0or2EZFewG+AvsA64BpV/SJGuybgbe/lP1R1dEfHtmgfw2il4CJ+EhEOw7e+\n5SqUx6OII4LiOeEL5TvKVbTPVOBFVb1XRKZ6r2Ot9tilqgPTPJdhlC3JpBAoGIJB+OILGDIEVqyI\n3WbhQnjnnaIpGhMp2OP5ZwouKqsD0jX7XA7M957PB8akeTzDKHtqa10K/aK3jixfDg0NMGYM9OzZ\nfv+qVTB0aMFfaHQkVlVVe/9MISZu64h0Nf+vqeqn3vONwNfitOsuIiuB/cC9qhozZ6yIhIAQwLHH\nHptm1wyj+KitdaZxcGn0weVZK1r8TKHhMAwb1r54fHOzu+BlywrSDBRZpau52Qn2xkan2UebeAot\ncVtHdGjzF5EXgCNj7Po3YL6q9oxo+4WqHhbjGEer6icicgLwEnCRqn6Q6Lxm8zfKkVGjWoU+uDD6\nJUvy15+MEg7DpElO449FdTVs2QJXXukiiPKMr/H7gj8QgG7dEufsLyabf4dmH1W9WFXPiLE9A3wm\nIkd5JzwK2BTnGJ94jx8C9cCgFK7FMMqGq65K/LqoCQbhzTddjqD+/dvvX7EC1q51eYKGDMl9/6JI\npUoXFF8oaLo2/2cBvxDddcAz0Q1E5DAR6eY9Pxw4H1id5nkNoyQJhVrzp9XUpG/yKciQxFDIOXsT\nrQtYsSKvpSPDYfjHP1y67YoK6NIFTjgB3n67AO9nZ1HVTm9AFfAi8D7wAtDLe38w8LD3/DxcmOdb\n3uONyRz77LPPVsMwOk9Dg2qPHqoVFe6xoSHfPYrB5MmqJ52k2qePX9+m/TZ2bE67FHnfunZVHTPG\nPQYCrjuBQAHfT1UFVmoSMjYtzV9VG1X1IlXtp848tMV7f6Wq3uQ9b1DVAap6pvf4SDrnNAwjOYoi\nAmXWLFca8re/dbaVWCxcmNMcQZH3rakJvvrKPfo1GHzHb0HezxSwFb6GUaIUVTKyYBBeew369Im9\nf9mynA0A0fftqqvcoz82BQJFcD+TwPL5G0YJUygRKCmRqF5A376udvD48Vm9IP++VVW50M7ox0K+\nn8lG+5jwNwyj8AiHXZH4cBj27Wu/v0sXeOWVrA8AxbZqFzIY6mkYhpFzgkEn3B94IPb+ffvgG9/I\nqimoKHwmaWDC3zCKlIIM48w0fuxrdXV7h/DOna31Ampr074f0Z8vKp9JJzCzj2EUIcVqkkiL2lq3\nQjg6RQTQDLzFQH4QmMufK4PccENqboFCz9SZCmb2MYwSptRNEjEJheDVV2H48Ha7BBjIKl5pPo/p\ne6dQU9O2HGZHxLufxbZqNxVM+BtGEVLqJom4+L6AqNrB4m0BYCqz+blOSWlQLMf7aWYfwyhSitEk\nkVFqa+Huu2HjxjZvK84MtJdu7Kq+gF7Lk8uMVyr300I9DcMoD2IUjfGlmgCcdhqsLp90YmbzNwyj\nPFi+3NUOjigY45uBAFizBo46quCLxuQaE/6GYXRIwYeVzprlSkc2NEC/fu33b9zoisZMiVVltjwx\ns49hlBiZtl0XZVhp//5O448mEHA5hAr+AjqPmX0MowyJrjebCU29KMNKV6+OXy9g6lTo1QsOO6ys\nZwIm/A2jhEhWUKdixinaMMi6Orc6uG9fEHFafyDgVgV/8QVs3eqqhpXpAGBmH8MoIZIx0XTGjJOP\nMMiMntM/2IMPwieftN/fp4+rKVAC5qBkzT6VueiMYRi5IRh0wjyR0Kyvby1KvmePe92RzAsGcysX\n0/UztBs4/Avwtf1oNmxwOYIaGkpiAEgGM/sYRonRUUqCqqq2VamqqnLXt2SJNF/t2QPTpyfvv0jo\n95g1K3Ht4GuugSuuKOCwpsxhwt8wyozGxrZVqRob89ufWPh+hkDADVAvvJC8A7tDv0ddndPwY1UN\n27ABFi+G888v+XUBJvwNo8yoqmr1fXbrln0HbmfWCPjmq4svbh0Ako00SspBHQzC+vUuVbTfMBJV\nty6gb9+SHQTM4WsYJUg8Z6lvEtmzx8m8Bx5wyTKzdW5I33bfmc+n7CyurXXCPh41NZm/UVnCHL6G\nUaYkEpi+SaS52UU/ZtrkE33u665rb4JJVXh35MCORcoO6lAIPvggtjMY4Lbb3PRl2rSiGQQ6wsw+\nhlFiJLJ5ZztmP/rckPz5fPNQbW1bhy2knlO/U+koZs1yvoAY9QLYtw/WrXOzg1GjUjho4WKav2GU\nGL6A97XvSIGbTChoqkRq6dHnHj/ebR2dL3LGIOJmJpF2/lyYioDWegG1tfCLX8D777cvIL90KYwb\n5xzHRYwJf8MoMToS8JmM2Y8WtHPmOFMPtC2j2NH5ImcMgYCbKYh0bnYSa+YT6/wJ/QKhkNumTIlt\nClq40DmM+/dPrV5kAWHC3zBKkFQEfDoraaPj8W+91QXK+Fp/skTPGObMcf6IzvQp0czHJ+nZwaxZ\nbkXwwoXt9y1b5raaGnj99aIbAEz4G0YZk240TVVVq6ANBNwgEM9ck2iQyaQ5KtlVzkk7ouvq3Kg2\naRKsWtV+vypccAH86EdusCgSTPgbRhmTkhD0iGXqaWx0A8Htt8fWuDsaZDKdO6ijmY+/1sGfpXRo\nWgoG4c03nS9g0iR3wyLZt8+Zhx5+2HmaiyAiyIS/YZQwHQnVZEwk0UQPGI2NLhoHYMCA2OdLNMjk\nul5AOOwGKd+/MGdOCucLhdxFXnedcwZHs2VL63qBAh8ATPgbRomSjFCNNpGAU1wTaeAdRROlOsjE\nC03NVhbRtNc6BIPw3nsu4uepp2D37vZtJkyAn/2soDOFmvA3jBIlWZOOL7DDYbjwwlYB/fLL8dun\nap9P9Bl/YNizxwnjrVuzu6q3M7OdmNTVuW3UKBf+GU2hZwpV1YLczj77bDUMo/M0NKj26KFaUeEe\nGxoSt584UdVZwd02cWLnzjljRsfniqamRrVLF9VAoPURXN9nzEjuvKlca2f7GZeaGtX+/dveQH/r\n3dvtq6nJ0MkSA6zUJGSsaf6GUaJkY0FXItKx3Tc2ti7sAmeLTyXOP1XHdcbrE/jrAoYMgRUr2u7b\nvNltEya4FBIFEhGUVnoHEblaRN4RkWYRiZtISEQuEZG/i8haEZmazjkNw0iejnL7RzJ+vBO2vtBN\nJU4f0qv1G5l2ols3V3DrZz9LfgDpKG1Fp9I9dIbly+NnCgUXEXTBBQVRLyBdzf9vwJVATbwGIlIB\nPAh8A9gA/FlEnlXV1Wme2zCMDBIMOoHdmZlCOAz/+AdUehKlstK9DoeTC+lMdpbSmc/nOpqI5cvd\nY7xMocuWOV/AwIEwd27+/AHJ2IY62oB6YHCcfUFgScTracC0jo5pNn/DKA4i7e1du6qOGeMefft7\nTY2zr9fUxLbLJ2t/T9Wu7zNjhvtMKj6EjDF2bGw/QOQ2cmRGT0kB2fyPBtZHvN4ADInVUERCQAjg\n2GOPzX7PDMNIm0hzD8BXX7nnfsqH225rDauMTtgGyWvlnVmQBhmM7ukMdXUuS+jMmS4raCyWLnUR\nQ0uW5LBjSdj8ReQFEflbjO3yTHdGVWtVdbCqDu7du3emD28YRhaIrAzWtStcdVWr/T0QgP37WweD\nioq2dvlYAj2efb6z6ah9k1AqPoSMEgrBRx+5HEBHHhm7zdKlOa8d3KHmr6oXp3mOT4BjIl738d4z\nDKPIibVa1l8Eu2ABrF7tTNzgbBw/+hH07NnWLh+plVdVxZ8JpBO9lPHons7gRwTFWxeweDE88wzc\ncUdOIoJyUczlz0A/ETleRLoC3wGezcF5DcPIMpGrZVVd+puZM+Htt2H+fHj11da2gYAT/NHRR9dd\nBzff7AR7Y2PrTGD3bjeARJJK9FLBsmSJmwX06tV+n6qLCDr55OzPApJxDMTbgCtwNvw9wGd4jl3g\nX4DfRbT7FvAe8AHwb8kc2xy+hlF4RDtno5293bq555ELtUBVpL2TNpYDt6HBHcf/XLduGVyIVYjU\n1LibE8sRXFHRqYsnSYdvWpq/qj6tqn1UtZuqfk1VR3nv/1NVvxXR7neqerKqnqiqP0/nnIZhZI5U\n4t/9kEm/vKIfxunb02+4oa193y/K0rWri3iMtrfHc+DecINzDoM7XirrBYqOUAjmzWu94EiamrJ6\n8bbC1zDKlFTj3+MJ68jcQPPnJ1+QJV4UzvjxbY+T0+icfBAvU2hFRVYv3oS/YZQpqYZOdhQymapD\nNl77XKelKAj8TKFTpsATT8AJJ8C992b14sWZiAqPwYMH68qVK/PdDcMoWTqz8jXTRVeMzCMib6hq\n3HQ7Pqb5G0aZ0tnUzJkW+jag5AcT/oZRxvjC1vcr5lr4JltDwMg8JvwNo4zJRtKzVDT5BQtcCghw\njwsWmPDPFSb8DaOM6Wy+nHjkPIOm0WlyscLXMIwCpbP5cuKRak7/dGsIGJ3HNH/DKGMyHVYZHQ5a\nVZW4IHw6NQSM9LBQT8MwMopv86+qcknfzASUW5IN9TSzj2EYGcVPvhaZpC3Vso5G9jHhbxhG2sTK\nEZRpf4KRWczmbxhGWsSL8CnLNA1FhAl/wzDSIlG4aEEUUTFiYmYfwzDSwsw7xYlp/oZhpIWZd4oT\nE/6GYaSNmXeKDzP7GIZhlCEm/A3DMMoQE/6GYRhliAl/wzCMMsSEv2EYRhliwt8wDKMMKdisniKy\nGfi4kx8/HPg8g93JB8V+DcXefyj+ayj2/kPxX0M++n+cqvbuqFHBCv90EJGVyaQ0LWSK/RqKvf9Q\n/NdQ7P2H4r+GQu6/mX0MwzDKEBP+hmEYZUipCv/afHcgAxT7NRR7/6H4r6HY+w/Ffw0F2/+StPkb\nhmEYiSlVzd8wDMNIgAl/wzCMMqTkhL+IXCIifxeRtSIyNd/9SRUReVRENonI3/Ldl84gIseIyMsi\nslpE3hGRH+a7T6kiIt1FZIWIvOVdw0/z3afOICIVIvKmiDyf7750BhFZJyJvi8gqEVmZ7/6kioj0\nFJEnReRdEVkjIgWV9LqkbP4iUgG8B3wD2AD8Gfiuqq7Oa8dSQESGAzuABap6Rr77kyoichRwlKr+\nRUQOBt4AxhTZdyDAgaq6Q0S6AK8BP1TVP+W5aykhIj8GBgOHqOpl+e5PqojIOmCwqhblIi8RmQ+8\nqqoPi0hX4ABV3ZrvfvmUmuZfDaxV1Q9VdS/wa+DyPPcpJVR1GbAl3/3oLKr6qar+xXu+HVgDHJ3f\nXqWGOnZ4L7t4W1FpSSLSB7gUeDjffSlHRORQYDjwCICq7i0kwQ+lJ/yPBtZHvN5AkQmeUkJE+gKD\ngOX57UnqeCaTVcAm4I+qWmzXMAeYDDTnuyNpoMBSEXlDREL57kyKHA9sBh7zTG8Pi8iB+e5UJKUm\n/I0CQUQOAp4CblfVL/Pdn1RR1SZVHQj0AapFpGhMcCJyGbBJVd/Id1/SZKiqngV8E7jVM4kWC5XA\nWcBDqjoI2AkUlA+y1IT/J8AxEa/7eO8ZOcSzkz8FLFTV/8p3f9LBm6q/DFyS776kwPnAaM9m/mvg\nv4lIXX67lDqq+on3uAl4GmfWLRY2ABsiZoxP4gaDgqHUhP+fgX4icrznYPkO8Gye+1RWeM7SR4A1\nqvof+e5PZxCR3iLS03veAxdA8G5+e5U8qjpNVfuoal/cf+AlVR2X526lhIgc6AUM4JlLRgJFEwGn\nqhuB9SJyivfWRUBBBT1U5rsDmURV94vIbcASoAJ4VFXfyXO3UkJEFgEjgMNFZANwt6o+kt9epcT5\nwPeAtz2bOcCdqvq7PPYpVY4C5nvRYwHgt6palOGSRczXgKedLkEl8ISq/iG/XUqZHwALPUX0Q+D7\nee5PG0oq1NMwDMNIjlIz+xiGYRhJYMLfMAyjDDHhbxiGUYaY8DcMwyhDTPgbhmGUISb8DcMwyhAT\n/oZhGGXI/w++6U8tCYD1ygAAAABJRU5ErkJggg==\n",
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAEICAYAAAC3Y/QeAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJztnXuYFOWxuN/aZVkQb3HVSERdonhB\niSBEHVDEoxFjDGI45iKIl+iAxkSSXwT0xCPnaMBFnwRPEnU34oUsajwS0OSYQIyuqDuBoJKoYAQV\nAwoBQYwg1936/fF17/bMzszO7Mzu3Op9nn5muvvr7q/nUl1fVX1VoqoYhmEYpUVZrjtgGIZhdD0m\n/A3DMEoQE/6GYRgliAl/wzCMEsSEv2EYRgliwt8wDKMEMeGf54hIuYhsE5Ejs9k2C/06V0TWdPZ1\nugIR6SYiKiLV3vr9InJzF1z3ahFp6Ozr5AMisk5ERmT5nFHfm5EeJvyzjCd8/aVZRHYE1semez5V\nbVLVfVX1H9ls25UUmpBT1atVdXp77UTkRRG5ogu61OUU870Zjm657kCxoar7+u89zfhqVX0mUXsR\n6aaqe7uib6WCiJSralOu+2EY+Yxp/l2MiNwuIr8WkUdF5BNgnIiEROTPIrJVRNaLyP+ISIXXPtYk\nUe/t/72IfCIiERHpm25bb/+XReQtEflYRH4mIi8l0vZEZB8R+ZWIfCQibwCDY/b/SETe8a7zhoiM\n8rYPAH4OnOmNfj70to8SkeUi8i8R+YeI3JLkMztXRNaIyH+KyGYReVdEvhnYXy8ivxCRP4jIdu9a\nPUTkJyKyVkT+KSL3iEiPwDFTRWSDiLwPXB5zvXoRmRZY/1qgr6tF5DwRqQFCwH3efc3y2vYXkWdE\nZIuIvCkiYwLnOUREfued589AXxIgIn8UkYkx2173Prcy73vd6H13fxOR/gnOc7WIrPS+l7dF5OqY\n/Sndm4gcIyIac2zL6EBE+onIc959f+j9Vg5IdH+BcwwTkfdFpCyw7RIRecV7n/C/EedcUaMViRlx\ntvPdXBj4nNaJyPfb63vBo6q2dNICrAHOjdl2O7Ab+Cru4dsT+CJwGm4k9nngLeB6r303QIFqb70e\n+BAYAlQAvwbqO9D2UOAT4CJv3w+APcAVCe7lLqAB+AxwFLACWBPY/3Wgt3dPlwLbgM96+64GGmLO\n92/AiV77k71+Xpjg2ucCe4E7gUrv2E+BYwL3+RFOYJV5bX4GzPf6uz/wNHCb1/5CYD3QH+gFPB7n\nc5vmvR8KbAXO8c59BHCct+/F4OcF7Au8D4z3vovBwOZA+yeAR4F9gC94fWhIcM9XAc8H1k/2ztUd\n+AqwFDjA61N/4LAE5/kq7jcl3ue2A/hCB+7tGEBjzt3SBjjWO09377f1EnBXoO06YESc/gnuf3J2\nYNt84Ife+3T+G7F9bvndpfDdbAKGeu8PAk7Jtfzo7MU0/9zwoqr+VlWbVXWHqv5FVZeo6l5VfQeo\nA85KcvwTqrpMVfcAc4GBHWh7IbBcVZ/09v0UJ4AT8XXgdlX9SFXfw2nzLajq46q63runR3B/6CGJ\nTqaqz6rqG177vwKPtXPPzcCtqrpLVZ8F/gBcEtg/X1UjqtqMe4hdA0zy+vsvYAbgjxa+DsxW1RWq\nuh2YluS63wZ+qap/8vq6VlX/nqDtRcBbqjrH+y5fBhYA/+5pq6OBW1T1U1X9G/CrJNedB3xRRPp4\n65cC81R1t3d/+wPHA3j3sSHeSbzf2TvqeBb4E3BmB+4tKar6lnee3aq6Efd7SvZ9+scp7rv/FoCI\nHAiM9LbRgf9GIhJ+N97+PUB/EdlPVbeo6isduEZBYcI/N6wNrojI8SLyf54Z4l/AfwMHJzk++Ef/\nFKfVpNv2c8F+eH/CdUnO0zum3+8Fd4rIFSLyV294vhUnmBLegzecbxCRTSLyMU5LS3bPm1X105jr\nfy6wHuzbYTjtP9if3+E0Uoi599h7ieEI4O0k+4McBQzzr+ld9xu4z+6zQHmq11XVj3EPuG+IiOAe\nXHO9fYuA+4B7gX+KyH0isl+883jmjCWeqWMrcB6tn3M695YUETlMRB73TDj/Ah4i+fcZ5BFgjPeA\nHAMsUdV13nnT/W8kItl3A3AxMAr4h/e7PK0D1ygoTPjnhthUqrXA6zgzxv7Af+KGw53JesDXKvEE\nzOFJ2m/ACQuflnBSEfk8ThBdC1Sp6oHAm7TeQ7zUsY/htNsjVPUA4H6S33OViPSMuf4HgfXgNf6J\nM60dp6oHessB3nXA3Xvce4nDWuDoBPti72st8KfANQ9UF311vden5jSuC85E9C3gDNx/dXHLhVVn\nqeopwEk4s88PYg/2Pq8ncKOez3rfyyJaP+d07m27d859AtsOC7yvAXYBA7zf8BWk+Bv2RkEbcBr/\npbiHgU86/43tOJNavP4l+27wRhejcArC7/BGHsWMCf/8YD/gY2C7iJwATOiCa/4OOEVEvioi3YAb\ngEOStH8cuFlEDhQ3j+D6wL59ccJiE+45cg2eScLjn0CfGEfdfsAWVd0pIqfTapJJRBkwTUS6i4sX\n/zJOsLVBXaTP/cAsz8kqItJHRM4L3MtVnlbZC7g1yXVnA1eLyNmeo7WPiBwXuK/PB9o+BZwoIpeK\nSIW3nCoix3mmtQXAf4lITxE5CbisnXv+LdAPJ/Ae80ZneOc81fvetuMedM1xjq/E2eA3AU0iciHO\nLt+Re9vgLePEzScJ47Rpn/28vnwsIkcAP2zn3mJ5BPg+zm8T/F7T+W8sx40georIsTi/iU/C78Zr\nf6mI7O99T58Q//MsKkz45wf/Dxdx8glO0/l1Z19QVf+JG/b+BOf4Ohp4Fae9xeNWnMa8Bvg9MCdw\nrr/hHKxLvTbHAUsCx/4RWIUzUfhmqGuBGeIinm7GCeRkrMMJl/XAw7gQ2lVJ2v8/nFllKU54LMIJ\nUlT1t8AvgOdxDsQ/JjqJqjbi/Af/453nOVq191nAtzwzwk88U81IYJzXzw04rbsycM+fwQnW2cCD\nyW5YVXfiHhjnEq0NH+gdvxX3fazHfY+xx2/FCdT5wBacfft3Hbw39drejPMNHUP0d3wrcKp3nqdw\no7p0eATnkP6jqn4U2J7Of+MunBKyEXgA57j377W97+Zy4D3PtPRtr11RI54yYZQ4IlKOM6P8u6q+\nkOv+BBGRc4H7VbU6130xjGLBNP8SRkTO98w4lcAtuIiHpTnulmEYXYAJ/9LmDOAdnE14JHCxqiYy\n+xiGUUSY2ccwDKMEMc3fMAyjBMnbxG4HH3ywVldX57obhmEYBcXLL7/8oaomC9sG8lj4V1dXs2zZ\nslx3wzAMo6AQkWQz1lsws49hGEYJYsLfMAyjBDHhbxiGUYLkrc3fMIziZM+ePaxbt46dO3fmuisF\nTY8ePejTpw8VFXFr27SLCX/DMLqUdevWsd9++1FdXY1LJmuki6qyefNm1q1bR9++CQvCJcXMPoZh\ndCk7d+6kqqrKBH8GiAhVVVUZjZ5M+JcgkQjMmOFeDSMXmODPnEw/QzP7lBiRCJxzDuzeDd27w5/+\nBKFQ6sc2NMCIEakfYxhGfmKaf4nR0OAEf1OTe21oSO04/6Fxyy3u1UYNRqGzYMECRIQ333wzabuH\nHnqIDz74IGmbZDQ0NHDhhRd2+PjOwoR/iTFiBJSXg4h7HTEiteM6+tAwjHzl0Ucf5YwzzuDRRx9N\n2i5T4Z+vmPAvQXxTYTomwxEjnJmovNy9pvrQMIxskG0/1bZt23jxxReZPXs2jz3WWq63pqaGAQMG\ncPLJJzN16lSeeOIJli1bxtixYxk4cCA7duygurqaDz/8EIBly5YxwvszLF26lFAoxKBBgxg6dCh/\n//vfs9PZTsJs/iVGQwPs3QuqToOfMyc1+30o5PwDZvM3uppM/FSJePLJJzn//PM59thjqaqq4uWX\nX2bjxo08+eSTLFmyhH322YctW7Zw0EEH8fOf/5y77rqLIUOGJD3n8ccfzwsvvEC3bt145plnuPnm\nm5k3L91qll2HCf8Swzf7NDW5B8Ds2TBoEGze3L5QD4VM6BtdTzyTY6a/w0cffZQbbrgBgG9+85s8\n+uijqCpXXnkl++yzDwAHHXRQWuf8+OOPufzyy1m1ahUiwp49ezLrZCdjwr/ECIXgggtgwQK3vmcP\nXHede58trcowsolvcvQ1/0xNjlu2bOHZZ5/ltddeQ0RoampCRLjkkktSOr5bt240NzcDRMXZ33LL\nLZx99tnMnz+fNWvWtJiD8hWz+Zcghx0Wvd7cbI5cI3/xTY633ZYd5eSJJ57gsssu47333mPNmjWs\nXbuWvn37csABB/Dggw/y6aefAu4hAbDffvvxySeftBxfXV3Nyy+/DBBl1vn44485/PDDAeckzndM\n+OcRXTX5avx4p0GJQLdu0Y7cqiq49lq3WDinkS+EQnDTTdkZlT766KNcfPHFUdvGjBnD+vXrGTVq\nFEOGDGHgwIHcddddAFxxxRVMnDixxeF76623csMNNzBkyBDKy8tbzjF58mRuuukmBg0axN69ezPv\naCeTtzV8hwwZoqVUzKUznFrtXc933oJ7X1UF3/2u6wNARQV8+9vuYWGmICNbrFy5khNOOCHX3SgK\n4n2WIvKyqib3TmOaf97Q1XH0QU3Kf795s/MB+OzZA7W1NqnLMIoRE/55Qj7E0Y8Y4bT9IH5IqPkC\nDKO4sGifPCFXcfS++aeqymn+P/sZvPoqbNgATz/tRiI2qcswig8T/nlEV8fR+36GXbtcxE9ZGVRW\ntvobLJGbYRQvWTH7iMgDIrJRRF5PsF9E5H9EZLWI/E1ETsnGdY22xEYMJYsgamhoFfzgXnftgmnT\nXPtsRlgYhpFfZEvzfwj4OTAnwf4vA/285TTgXu/VyJDYqJ1gxNCsWTBpUuIIoqqqVsEPLvSzuRme\neQZeeCE7EUc2ejCM/CQrmr+qLga2JGlyETBHHX8GDhSR3tm4dilTVwdnnQU/+pET+nPmREcMzZuX\nPIJo82Zn6gEn+I8+2q03Nyd38qY6HyG2f5GIFZIx8oPy8nIGDhzISSedxCWXXNIysasjBFM2P/XU\nU9xxxx0J227dupV77rkn7WtMmzatZd5BtuiqaJ/DgbWB9XXetihEJCwiy0Rk2aZNm7qoa4VJJALf\n+Y4Lx/TNNRAdMTRmTPIIohEjnI2/vBx69IAbb2xdT+TkTTWvf7z+zZljNQGM/KBnz54sX76c119/\nne7du3PfffdF7VfVlhQO6TBq1CimTp2acH9HhX9nkFehnqpap6pDVHXIIYcckuvu5DVz5rjsnD7l\n5W4yVnAafDicfFp87LT59tpD6vMRGhqiTUr+REirCWB0iE4cMp555pmsXr2aNWvWcNxxxzF+/HhO\nOukk1q5dy6JFiwiFQpxyyilccsklbNu2DYA//OEPHH/88Zxyyin85je/aTnXQw89xPXXXw/AP//5\nTy6++GJOPvlkTj75ZBobG5k6dSpvv/02AwcO5MYbbwTgzjvv5Itf/CJf+MIXuPXWW1vO9eMf/5hj\njz2WM844o1PSQ3dVtM/7wBGB9T7eNiMFYu3mkQg8+GDr/rIy+PnPW4V1UGi3F0EUuz+4Hm8W8Nat\nzkRUVpY8BNQfVeza1dq/AQPg4Yezl6DLKBE6cfr73r17+f3vf8/5558PwKpVq3j44Yc5/fTT+fDD\nD7n99tt55pln6NWrFzU1NfzkJz9h8uTJXHPNNTz77LMcc8wxfOMb34h77u9973ucddZZzJ8/n6am\nJrZt28Ydd9zB66+/zvLlywFYtGgRq1atYunSpagqo0aNYvHixfTq1YvHHnuM5cuXs3fvXk455RQG\nDx6clXv26Srh/xRwvYg8hnP0fqyq67vo2gVNvN99Q0P0TFwRJ1g767rdurnJXnv3Om3ezwk0a1bi\n/2CieQvxtplT2EhKJ+R03rFjBwMHDgSc5v/tb3+bDz74gKOOOorTTz8dgD//+c+sWLGCYcOGAbB7\n925CoRBvvvkmffv2pV+/fgCMGzeOurq6Ntd49tlnmTPHxcCUl5dzwAEH8NFHH0W1WbRoEYsWLWLQ\noEGAKzKzatUqPvnkEy6++OKW9NKjRo3K6H7jkRXhLyKPAiOAg0VkHXArUAGgqvcBTwMXAKuBT4Er\ns3HdUiD4u//bjmo+P/Q9vljWjcubD2ELB3I3k3hAw1nJcZ7our75xk8Dpeq2bd4cfUysEA+OIurq\nnAN6zBgXPho8pitzGhkFSLZzOtNq84+lV69eLe9VlS996UttyjzGO66jqCo33XQTEyZMiNo+a9as\nrF0jEdmK9vmWqvZW1QpV7aOqs1X1Pk/w40X5fEdVj1bVAapaOhnbMsQvvrKKao7mPQQob95Lb9Zz\nIiupYwJrm3vzvboToa4uJdNoKm1i0034dX8hvsknmSO4rg4mTIBFi9xrUEmy2sBGu2Q7p3OKnH76\n6bz00kusXr0agO3bt/PWW29x/PHHs2bNGt5++22AhDWAzznnHO69914Ampqa+Pjjj9ukhx45ciQP\nPPBAiy/h/fffZ+PGjQwfPpwFCxawY8cOPvnkE377299m/f5shm+eEwrBbRdE+PyC9wCILburQG82\nIGs2oBMm8GL529xCTUItOlVNO2i2qaqC733PCehEmT6TjcxjK9nNm+ecy9ApSp1RjOSgjNwhhxzC\nQw89xLe+9S12eeF0t99+O8ceeyx1dXV85StfYZ999uHMM8+MEug+d999N+FwmNmzZ1NeXs69995L\nKBRi2LBhnHTSSXz5y1/mzjvvZOXKlYS8e9t3332pr6/nlFNO4Rvf+AYnn3wyhx56KF/84hezf4Oq\nmpfL4MGD1VDVxkbdW9FdmyHhot7SDNqE6DxG67CyRp0+ve3ppk9XLS93h5SXa9w2HTmmsVG1Z0+3\nv2dPt+5TW9vSRQW3Hnvs9OnRxxjFy4oVK3LdhaIh3mcJLNMUZKxp/vlMJAKTJlG+Z3fLJgWaRfhX\nj8P4dIfyOTbgV2QQr8XFLGB08wI+bBgLN9VHndLXtHftcmacqqr2u5GKdh4KOQewb9cPKmm+lu/v\n89eDx5qd3zC6mFSeELlYSl7zr61VLSuLVplBtbJStbGxRRu/mlp9m2ptimnXMiIYOzbuqSsq3Olj\ntfREtKedJ9P8DSOIaf7ZIxPNP68meZU6viP2tboITJwYPUsK4NRT4bnnIBRq0cYfLA9zUs93eXdy\nbetMKgK+gblzoVcvGDiwxQu7ebM7dXtpHIK0l+TNHLdGOmieVhAsJDL9DM3s04Uki2cPOmLPb7oO\nRaOduyJRgfWxcfRHh8IwegBcdx3EhqJ9+in89a8wdChMnsyI0TVZc7IG6wF05JwW41969OjRg82b\nN1NVVYVIbAiDkQqqyubNm+nRo0eHz2HCv4uIRJyA27PHRczExuX7mvOVTXUMJE4c8Y03tpGObWzl\noZCrxDJuHDzySGtgfpCZMwkBf/pTTcZCNzZyaNYsN6pI9ZwW41+a9OnTh3Xr1mH5uzKjR48e9OnT\np8PHm/DvIvyMm+Be58yJFnS+GWfSjrsBZ7bxRfem88ZyaE1N6herr3dZ1a691mn8scycSWj2bEKf\n/SxU3QChcNs2KRBr6tm8OXoCVyrH+/UEdu5s+5kYxUlFRQV9+/bNdTdKHrP55xjfzg/w2nfrOKbb\nmqj9H3AY1S/Up5/PKhRy5p+xY13Kzlg2b4YVK9rOukqDTOsOB+sJqMLs2Zbp0zC6ChP+XcT48S7R\nmYh7HT8+elbsi2dO4fMzJ1C599MorX8a/xXlQE07uWF9PezYAZMnJ25z221pSd3gAyuTiZevvhq9\nvndv5jUEDMNIkVRCgnKxFGOoZ2y4ZDBcswmJmrC1e/+D9NputVGhkxmHU06e3DZ0NLj06BE3NDT2\nHrIR0tnY6KJW40SxJmwrkriNYRgOLNQzv4gX1TJiBISljvuYiMRE91TcOYPLFoe57TbnSG1oaFup\nK+1wypoaqK2Fo46Kv3/nThcaeuyxCVXsbIV0NjRE1yMIRLG2Yc4c5xtQbS0KYxhGZpjDtwtIFNUS\nIsJpzde1EfwMHw7hML4cDKZW9kP5OxyiGQ67JRJxUrS2tm1U0KpVLWGhxDias5WLJ/Y8ydJDG4aR\nfUzz7wLmzHFKdZS2HInAtGlIc1O04C8rg0AN0KCmvXcvXHVVlpIbhkJw770uhDQRM2fCZz4DU6ZE\nHZaNBIvpnGf8ePeAEHGv48d37JqGYQRIxTaUi6VQbP6ppD0I2ra7d1f9W63b2OzZ+VuWsrI2Wc+6\nJG1Cba3qQQcl9wcMHJhTY7slfzOM1CBFm3/OhXyipRCEfyqCOZgRU0R14kRVHT26Rag2g75Gf71X\nJurDE+NLti4TfO05hMG1MQwjb0lV+JvZJwNScX4GY+F79IDrBkXgySej2rzIcH7Q4176jY9v+2gv\nr07WaM8hDM4U1Ldvh+cGGIaRH5jwz4BUJjnF2rYHzJ0a5WBVhH0mjs+f1AbhMKxZ4x4CifKurFnj\nJof17p3yQ8Di9A0jvxCNjfTIE4YMGaLLluV/tce0EpONHOlqGQbp3x/eeKOTepchkQhMnQqLFydv\nFycqKPY0nZnDx5LDGUYrIvKyqg5pr52FemZIyoVI4gl+gBtuyHqfskYoBM8/77T7GTOcxh+PmTPh\n/ffdbOI4JCvxCJkJb0sOZxgdw8w+XUEkEl/wT57ctqxVPhIOw7vvQmOjqwsQj7lzYb/9osJCfZKZ\nx5IVfk8FqyNgGB3DhH8nEWXjjjcl9bzzkppK8hI/ZfTYsfH3b9vmRgGnndbmsEQx/ZkK70yTyxlG\nqWJmn07A12Zv3TGFE7kHZVv0RK7zzoOFC3PVvcypr3ezkKdNg/Xr2+5fuhQGDYJ77okqPhPPHJPp\njOHYojZm8jGM1DDNvxNoaHCCfzIz2Y9t0TsnTixswe8TDsMHHyQeBSxfDmed5WoKJLHlZDJjOBJx\np58zxwS/YaSLRft0ApEIfH7ooRzKpralGF96qfikVCQCl1zinL7xqKxMnLUtg0uefbZL9AZu1BDr\nSDaMUiTVaB/T/LOIb+fvMW0Kh+JK1Cmtufm59NLilE6hEKxbl3gUsGsXTJqUcZB/0I/i+wp89uwx\nZ69hpIMJ/yzh2/n/70cRvrDoToAWrb9JurnIngShkEVDfb2LCJo4EU44IXrfX/7izEAnnggXX5z2\ngyA2KsgvGO9TUWHOXsNIB3P4Zgm/Hu3Y5jmUeSmafY3/vRt/wdE1BRDSmQ18z65fsd5Xz1Wder5i\nhVsWLEjL8R2vXvBzz7UGUo0fX5yDKsPoLEz4Z4lgPdogHw8cXjqCP0go1FqB5sEHW43zQRYtcmGh\nS5a0e7p4UUEpT7AzDKMNZvbJEj1ejXCTzOAVBrGL7jQjSPfuHHjPHe0fXKz4NQOeew5Gj47fZunS\nlBLFZauOgGEYDov28UgnxUCbtlOmoHfdRXOzsose/LDbLKZcvZmjxqdwslIiUYoLn9693dyBQpj1\nbBh5iuX2SYN08sPEtn3ra1PoM3cmApQDPdjlBP+9N3XlLRQGCxe69A933x3fDLR+vcsWunhx8TvH\nDSPHmNmH9FIMBNv+544pfG7uTIJjp7IynMZvxKemxtW0nDw5cZu5czsUEWQYRupkRfiLyPki8ncR\nWS0iU+Psv0JENonIcm+5OhvXzRbt5YcJxpf7ba+hjik4jT9qItcZZ5ipJxVqapIniluwAM480x4C\nhtFJZGzzF5Fy4C3gS8A64C/At1R1RaDNFcAQVb0+1fPmi80/nkkIoPe5/Tnq05Utgl8BKdYZvJ3N\nuHFO20+EiCs0X2iJ8AwjB3TlDN9TgdWq+o6q7gYeAy7Kwnm7lESlEuPmoifC57pvbmnT8vi87z4T\n/B2hvt5VDuvfH6qrne0siKrLFnrWWTYKMIwskQ3hfziwNrC+ztsWyxgR+ZuIPCEiR8Q7kYiERWSZ\niCzbtGlTFrqWObEmocvemAJnnEHF1o1Aq+CXQsnNn6+Ew66i2bvvuvDQeCUkFy+GYcPamIKsRKRh\npE82zD7/Dpyvqld765cBpwVNPCJSBWxT1V0iMgH4hqr+W7Lz5lNiN98k9PWtdRw9c4Iz8QDNCG/L\n0ZTdeGNpTuTqTOrqXJqIZL/PU08lMmuJVfIyjABdafZ5Hwhq8n28bS2o6mZV9WP77gcGZ+G6XUYo\nBDeNiHB0natS5aduUIQrZQ6PH2iCP+uEw85/Mnx44jZLlzL4zJ5ctrPOKnkZRppkQ/j/BegnIn1F\npDvwTeCpYAMR6R1YHQWszMJ1u45IxEXxbN0KtJp67uKHvFIZsoRinYVfQ7i2tq0fwKOiaSf36QTu\nYEpLpFaqZiAzFxkljapmvAAX4CJ+3gb+w9v238Ao7/0M4A3gr8BzwPHtnXPw4MGaNwwcqOoMEC3L\nrgMP1enTVRsbc925EqGxUXX06Dbfg4I2gzaBbj2oWn81vFYrK1XLy1V79kz8/TQ2uv3ttTOMQgNY\npinI7azE+avq06p6rKoerao/9rb9p6o+5b2/SVVPVNWTVfVsVX0zG9ftEqZMcVWpYugeviJudJDR\nSYRCMH++mxtw4IFRu/y5FvtvWcPYxRN4dtdp7ZqBrPC7UerYDN9k1NW5EMNYCrH4erEQCsFHH7nv\nIEBwsl2IpfwfI5PWBLbC70apY8I/EVOmuDwzsYwdWxw1eAudhQudL6C6GgiE3HqvX2YRH5cdQOgX\n4+IebllCjVLHsnrGo64uvuAfPtw5II38YsoUmDmzzQOghYMPhqeeMglvlARWwzcBKUV4zJvXdltZ\nGdxRwrn58xkvT5D069dW8AN8+CEMHepGCe3UDTCMUqGkhH9sHdi4D4ApU+DVV6O3ibhZp6Y55i+h\nELz1VvJkce+950Z0U6Z0bd8MIw8pKeHfboTHyJHOweunljjsMFeB6qWXLHVDoRAKuYd3bAH5IDNn\nuvKRNgowSpiSEv7xIjx8M9DGkeOiqkwpsGX3vkQmzzeNvxBZscI55xOxdKkbBViyOKNEKSnhHxvh\nAc78s+Y/6jhk0dwWh6H/+sstX0tsHjLyn/r65GYgcMnizjzTvmSj5Cgp4Q/RqZt9M9AteisQHSXy\nIQcxlRqbAFTo+Gag2lo49VQiQNtLAAAcjklEQVTYb7+2bZqa4IILzAxklBQlJ/yDVFXBg03jOJwN\nbfbd2m2GTQAqJsJhWLIE7ror/v6tW50ZKJAu2nL/GMVMyRZwj0Rg+Xfq+AWuglSwItdrA8cy8Now\nt21uW9nLKHB8x/2MGbBmTdv9CxbAggXs3v8gfvXpDOo0bKmijaKkZDV/nTqFmXsnAcEUzfBI2VhO\nea2eSZNM8Bct4TC8+y7rxk6mGSHeNMeKf23hF3sn8EDTODP9GUVJaQr/KVMILZ5JL3YArQ7e1waO\n5XKpt2RfJUAkAsf+pobhZS/xZNlotlf3j9rvjwQvYy7z9GIurDLbj1FclKbwf+ihqERgzZU9kdpa\ntt9Tb8m+SgTf2f9Sc4iv6Xx+eP4bcZPFAYxqXsCACUNdoXnDKBJKT/hPmQIbN0ZtWnLad4kMCFuy\nrxJixAj3kAdXFODBByEybaGbG1BR0dIuqCQwdy707WtRQUZRUNTCv020RiQSFe2hwBqOYvhLNS3x\n/MFQUKN4CYXgqqta68Tv3euZ+err3ZCgthb237/tgWvWuKig3r3tIWAUNEUr/GPz+LxWF4FJk6C5\nGWi188/gZrPxlyjjx0OPHgnMfOEw3Hln4oM3bHAPATMFGQVK0Qr/YB6fy3bW0X/imW5KP07wNyHU\nMJlfEqaszGz8pUi7Zr5wuLVmwL77xj/J3LmWLdQoSIpW+Pt5fIaVRbhXJ1KmTVH775cJ3EQNZWVw\n7rlm4y9V2jPzRQaEmRF+l8iiTxLnCvKzhdoowCggirqYSyQCx361HwdtXh2VuqG5opJ/k+d4sSlk\nE3hKnEjEjRLjzenwTYe7d9P6O5l0WssIMi4VFXD22VbtzcgZqRZzKeoZvvu+FuEzm1e3rCsgFRWU\nPf8cMwgl/NMbpUFc4R74LcRLAR5assRp+E88Abt2tT3pnj0uO2z//i6zqGHkKUVr9olE4M/XzQFa\nZ/ACbDz76xAKWVSP0W59h4RF3uvrYedOmDw58clXrozKE2QY+UZxCv+6Oo4Z1Z+rmuqiUjf8gfOY\nPaI+x50z8oWEwj3A5ZfDNdckMA3W1DiHcP/+sM8+bQ9esADOOMMqhxl5SfHZ/L3i68Fi3s0ItUzg\n+5X38txzpu0brSSy+bdnEopL//5O449H794wbZpVhDM6ndIt4D5rFtA6M1OBvXTjV4znyitN8BvR\nJDL/tVvyMx4rVriRwOGHt923fn2blNGGkUuKT/hLa1yPb+75rvyc5T1DjB+fs14ZBUYqJqG4hMPw\nv//bmjsilgULYNgwMwUZOaf4hP8NN7S8FeCDsZOp/nHYwjmNtMgoz1MoBC+8ACefHH+/qisib6MA\nI4cUn80fnN1/3jwYM8ZsrEZuqatzpsj16121sFjKy+GrX3WRQ6adGFkgVZt/cQp/w8g3IhFXKL6p\nKf7+8nK45x5TVoyMKV2Hr2F0AhnX8/VNQaNHwwknQFnMX6+pCSZOhCOOMH+A0SWY5m8Y7dChsM/2\nqKuD665LPBLo1w8efthMQUbamObfDhlrckbJ0KGwz/YIh1tHAvEig1atgqFDYeTILFzMMNqSFeEv\nIueLyN9FZLWITI2zv1JEfu3tXyIi1dm4bkeJzfVvDwAjGemEfdbVOXmdUobnUAjmz3cPgerq+G0W\nLYJBg+xHWkJ0mWKqqhktQDnwNvB5oDvwV6B/TJvrgPu8998Eft3eeQcPHqydxfTpquXlquBep0/v\ntEsZRUJjo/udNDYmblNb635T/lJbm+YFysqiTxBcysrSPKFRiDQ2qvbs6eRSz57Jf2+JAJZpCrI7\nG5r/qcBqVX1HVXcDjwEXxbS5CHjYe/8EcI6ICJ1MoidohyfwGCVLKokA581Lvt7uBV58EYYPj7+/\nudnNED7rLBsFFDGdYmJMQDaE/+HA2sD6Om9b3Daquhf4GKjKwrUTksy0Y4Xajc5gzJjk6+0SCsHz\nz7sUEYl0o8WL3QPCHgBFSVcqpnmVz19EwkAY4Mgjj8zoXHFzsQeEfChkQt/ILn6IfsbzC8NhGDDA\nzQJesKDt/r17nUbzta+59NJGwRKbWNBXTLuk1kgqtqFkCxACFgbWbwJuimmzEAh577sBH+KFmSZa\nMrX5Z8N2Zhg5p7ZWtbo6sS+ge3fzBRQonSWj6EKb/1+AfiLSV0S64xy6T8W0eQq43Hv/78CzXic7\nDTPtGEVBOAzvvutMQaee6spEBtm92/kCeve2IvIFRlfa9+ORsfBXZ8O/HqfdrwQeV9U3ROS/RWSU\n12w2UCUiq4EfAG3CQTsDq9ZlFA3hMCxZAl//evz9Gza4h4DNDi4Ych14YjN8DaPQ6N3bCftE9Orl\nJo+ZPyDvSVRMKBNshq9hdCFdOmN8/Xo477zENQO2b4e5c12eIIsKymt86wR0fcaBvIr2MYxCpFNy\n/7THwoXuta4Opk+H995r22bdOpci4rzzWtsbeUdOfj+Y5m8YGZNTx104DGvWOIdwIhYtcgXmzR+Q\nl+Tq92PC3zAyJNeOO8A9BBob49cPBtixw80bsAdA3pGr3485fA0jC8Q67jrDkZcyU6Y4QR+PHj3g\niitg/HgLg8sjsvl7sUpehpEjcmXDbdOJ8eNh9er4+8vKYNQoKx9ZhFi0j2HkiDlzYOfO3E3eAZxA\nX7XKCfd4NDe71BHDhsG111pUUAlimr9hZJFIxA3dd+9265WV8NxzOVau6+pg9mx45RWXFygeInDp\npTY3oAuIRJyCAK5Uw+bNuYnzt1BPw8giDQ2tlRlF4Mor88CqEg67JRJJnCxO1c0NWLXKzSQ2OoVI\nBM4+G3btat1WVuaUhK42D5rZxzCySDByw0/DkzcWFb9y2NixidssXQrjxnVdn0oMP6wzSHNzgeb2\nMQyjFT+h4DXXOM3/l7/Mw1Kh9fXOF3DMMXDCCW33z53risaYLyDr+MpBkLIyy+0Thdn8jUJmxgxX\nSKipyY0CbrutdRp/3nHaaU7jT8TYseYLyCJm8zeMIsbX8Pxwz3haXU7nAgRZssSZeubOjb9/7lxX\nYezxx/PAgVH45EshKRP+htEJtFeRKS/mAgSpr3flIa+7rtVjHcTPE9SvHzz8cH5Irzwmbx7sSTDh\nbxidRDINr70yoznBLx85daqrFRyPVavcQ6CxMQ86nJ/k3YM9AebwNYwckBf5gOIRLCK///6J202d\n2vU5iAuEXFfoShUT/oaRA/K+zGg4DB9/7KKC9tmn7f7Fi+Hmm90MYSsfGUXePthjsGgfwzDap72I\noMmToaam6/qT5ySy+XeFL8ASuxlGHlMIDsE21NXBvHlO69+5s+3+gw5ypqBwuOv7VgB0lS/AErsZ\nRp7iC4FbbsnDCWDJCIddRbAxY+Lv37LFFZEfNKiAbqrryDdfgAl/w+gEktX0zTchkDb19W7iV48e\n8fcvX+4igk480fwBAfLNF2BmH8PIMu0N7wslFDAlRo50ZSKTYTWEW8gnm79p/oaRZdrT7PM+0icd\nFi50zt59903cZtEiSxbnEQq5NB/58J2b8DeMLJPK8D4oBJKZiAqCmhr45BP3EBCJ32buXKsfnGeY\n2ccwOoFUhvd+gq8HH3Q1VgreBATJawYAHHKIK3JgYaGdhoV6GkYe49v9d+50dVSgALJ/pkOyRHFg\nYaGdiNn8DSOP8f0CvuAXyY8IkKzh1ww48MD4+/2w0AIxBaVimis0850Jf8PIAbF+gQkTisDkE0tN\nDXz0kcsTVF0dv83MmXDooXn9EGhvXkYk4urenH12Yc3dsKyehpED2kv5XFT4NYQThYVu2uQeApCX\nvoBkGVjjme/yJktrO5jmbxg5IhRygr+hoTA0xYxZuNCNAo46Kv7+e+5xIaO9e+fV5LBk0VuFbL4z\nh69h5IiimuyVLnV1MHFiq9SMRx5NDkuWqM3/DsvL4aqrYPz43H6PVsbRMPKcvCzo0lUEC8e88w58\n+GHbZHGLFkFVVV5EBQUL8/j57caMcd0qVPOdCX/DyBHt1fktyMyf6eAXjoHEoaF+VNDvf++ih3L8\nQdTVue5Aq/siHM55tzpERjZ/ETlIRP4oIqu8188kaNckIsu95alMrmkYxUKyNA8Fm/mzo9TXOzNP\nIhYsgLPOyvkHMW9e8vVCIlOH71TgT6raD/iTtx6PHao60FtGZXhNwyh65sxxVpCCzfzZERYudLWB\njzkm/v49e+DSS11cZY4eArHZrBNlty4IVLXDC/B3oLf3vjfw9wTttqV77sGDB6thFDONjao9e6qW\nl7vXxsbW7ZWVqs4bqtq9e/S+6dNb14uWyZNbP4B4S0VFzj6E2lrV885zr/kIsExTkLGZav6fVdX1\n3vsNwGcTtOshIstE5M8iMjrDaxpGUZAo+2dDg8v143PBBa0J4ErGFFRT40YBoxOIiz174Etfyqop\nKNUZun5Nm0LPTNGu8BeRZ0Tk9TjLRcF23hMnUdzWUepCjy4FZonI0QmuFfYeEss2bdqU7r0YRkGR\nKH58xAjoFgjFePrpVudvQReBSZdQCObPd3MDTj0VymLE1fbtrqTk0KEZzwsoqQerR7vCX1XPVdWT\n4ixPAv8Ukd4A3uvGBOd433t9B2gABiVoV6eqQ1R1yCGHHNLBWzKMwiCRwzcUcokv/ezITU2tUT/5\nVAmqywiHYckSuPded/PxmDAho8lhJfdgJXOH71PA5d77y4EnYxuIyGdEpNJ7fzAwDFiR4XUNoyhI\nVNxj/HhXJTEo6IMPi1mzSmhmsE84DC+8AMOHx9+/YYN7CHSgcEwpPlgzmuErIlXA48CRwHvA11V1\ni4gMASaq6tUiMhSoBZpxD5tZqjq7vXPbDF+j1EllVmm6M4OLZu5Ae+UjJ09OO09QsXw2ls/fMIqU\nGTOcbbqpKb0aAEWXTqKuDm691Wn88aisdA7hPEkR0VVYPn/DKFI6aqIoOrt2OAzr1yeeHLZrlxsd\n7Ltvh3wBhZafP11M+BtGgdHRAvBFa9f2s4UmqhmwfXvavoBg9M/ZZ+d0XlmnYWYfwyhygrZsKA67\ndkIiEbj8cli1Kv7+FH0BQdMauMirHj0Kw1RmWT0Nw4hr5y+KGsGJCIXgrbfYXt2ffd5bCYAE9991\nF7z/vgsd/drXEj4I/FGSX6RFtfgyr5rZxzCKmKKz86dAJAKHbFzBRKllPYdFzzxtbnbZQ1evdtXD\n+vePew7ftHbRRc5MVlZWZKYyTPgbRlFTtHb+JPgPvDoNc2T5eiLDJzvpXVbWOnPOZ+VKN5165Mi4\n51q40D0vAL773eLR+sGEv2EUNbHOYSjuCBZo+8CTO2rgxRfh9ttdVtAYtKkJXbSIf/U/LWp7Q4ML\nGFJ1D4Cf/rS4Pjez+RtGkeNXoSq6OP8E+A+8aMd2oBTXK684jR+XjEy8131XLmV73xPpNfBYmDyZ\nESNClJW1av5+mo1i+cxM8zeMEiASgWnTnCZbCvb/RGkzAFixAsaOjUoUJ96yz5oVrnDMsGGEXqvj\nF7+AigrXtLKyuMxmJvwNo8jxNf5nnnFabDE6L9Omvp7X7n2RZoRgOuIWj4AqTJhAeO5ZvPzzCLff\nXnwjJRP+hlHk+A5QX/Cfe27xCbJUiJ2x+7vNIc4qe4n7mMjzJEgWt3gxA647k5tGRIru87JJXoZR\n5JSKrT8Z8T4DiN721tem0GfuzLjH7+3Ri/cPG8y/brqDAeH8/vAst49hlCixGm6q6SCKOZdNcL7D\nzp2uRnLs59Kn3qseFpMyWoHynds5cs1iTpwwlI0j008ZnY+Y5m8YRURHtfxiHx1EIs7HsXu3W6+s\nhOeeS3KPU6bAnXeCaktEEAR8A2PHQn19p/a5o5jmbxglSEdn9Bb7TOBQCK66qnWO1969TvtPONKp\nqYGXXmoZBbRxCM+d69JFF3DGNxP+hlEkRCLwj3+4CavpzugttpnAkYiTy0HZHKyOVl4ODz7YTs3e\nUAiefx7xUkb7UUEtD4DFi+G++2DYsIJ8ANgkL8MoAurq4DvfcRE93brBV78Khx2W+vHxJ0YVHpGI\n0+hnz4Y9e9y2Bx5onZzl3+M//gG//GX0SCfhPS9ciCQrHKPqRgHf/37a1cNyiWn+hlHgRCJw/fXO\nlNHc7ITe737nhFtCrTYOSSdGFQC+36K2tlXwg3sfa8YaNCjNkY5fOKa2Nn4R+T17XKK4qqoOF5Hv\nakz4G0aB09DQmncenF27uTm+/b4UInpiY1gqKpxwDxZomTQJZs1KvyBOSxH5fv3i79+yxRWOOfRQ\n5zTOY0z4G0aBM2KEi14pK3OC7oc/dOuxWm1Q+KUzIsgV6T6ogn6LykoYPRomTmw16cQ6tTdv7uBI\nx6sZwNixzokQj02b3EggjephXY3Z/A2jwIlnrx89uq39Pl5ET76aeDoSetqe38J/OPjnzNipXV/v\nlpEjXa3geMydC/vt57zNefZhW5y/YZQIhRTLHyyjWF7uzDPZqEAWLGmZ1Xuvq4O774Y332xNAxrL\neee5AgGdTKpx/ib8DaOE6DThl2U68qDy762qypl0cnKPU6Y4c08iDjrIPdnC4U7rggl/wzAKmnQe\nVP7DYteu1gR2lZWJHxqd+hCcMgUeecQNW9avj98mxULyHcFm+BqGkZBCiPpJJ/Q0mLkU3Guimcqd\n7viuqYG1a2HevKiaAVHMnOl8ATmMCDLhbxglRqbCLx8fHL4z15e1yWoWdFkqi1DIlY9MFBa6bVvS\nIvKdjUX7GEaJkUj4pWIGyaXTOJmpJhjp057NP+tRP8nww0IjEZg61aWEiGXlSjj2WHj44S51Upjw\nN4wiJ1Zoxgq/qqrkAj14/Jw5LiWyambhouna3IM2fRGXvmLy5Ohj/VrF7ZGTVBZeniDGjXPhn7Gs\nWgVDh0J1tbN1daJD2MeEv2EUMYk09aDwix0JzJnTug9aj+/WzdnS/RiR8vKOac0dGT00NLQ6c8GV\n2X366Y4/fFJ9UGSd+no4/HD4yU9cPo5Y1qxxM4Tnzev0sFCz+RtGEZPIxBN0psbay2fPhh/9yOUq\nmzkz+nhfXom4FMnQvv0/1kfgjx7SsblXVbXdtmdPO2mZ85WaGtd5L1toXBYt6vzZwaqal8vgwYPV\nMIzMaGxU7dlTtbzcvTY2xm9XW6taUaEqoup0e7eUl6tWVra+du/eeq7a2vbPHXv92lp3Dv/8lZWJ\n++QfP3GiaxfbN1Dt1q39e8trGhtVhw9ve2PgbrgDNwUs0xRkrJl9DKOISdW+vXlztEnHRxWuvBKO\nPLLVxJPIXBTPBBPbZt681iR0Iu7cycpKnnNOq48hHk1NmfsfcorvC6irc0OxLVta96l26k2Z8DeM\nIido307kaA06gUVa1c/KyrZpaYLv24uaiXUujxnjkmL664MGObNNvAdTbJZOv18+FRVuW1NTERSg\nCYfdMm6cmyCmCj17dupNZTTDV0QuAaYBJwCnqmrcKbkicj5wN1AO3K+qd7R3bpvhaxjZpT1Hq18I\nBWD//WH5ciesBwxIPHJIJWontk0wDcOkScn74/e3vBwGDoRly9wIRcT5RcePL4x0FWmR4fTjVGf4\nZmSXxwn944AGYEiCNuXA28Dnge7AX4H+7Z3bbP6GkR0aG1WnT3e28/LyVlv+9Olt2/XsqVpW5tqU\nlTn7fGWle19R4Wz22WL69OT9Cfa9sTF1/0WpQ1fY/FV1pfekSdbsVGC1qr7jtX0MuAhYkcm1DcNo\nn1jtuZv3j49nJomXIsGviKXq1r/zHTcSyIaWncpkq9iQzGIoNZkvdIXN/3BgbWB9HXBavIYiEgbC\nAEceeWTn98wwipygwxXgmmtanbeJ8t0Hk6N16+aO9Y9vbk7fB5nIitGRyVY5i88vQtoV/iLyDBCv\nFPR/qOqT2eyMqtYBdeBs/tk8t2GUIrHadbKaIolSJLz2Wmtx+MrK1HyQqdr10xHmhZKOulBoV/ir\n6rkZXuN94IjAeh9vm2EYnUy62nU8YRwKJXf6QrSwf/VVePDB1glh/qghGI5ZV+fCPseMSS2TQSEV\noikUusLs8xegn4j0xQn9bwKXdsF1DcOgY6aSWC072Tlic+knols3d766OhepA63VD9t7uBRSCcpC\nISPhLyIXAz8DDgH+T0SWq+pIEfkcLqTzAlXdKyLXAwtxkT8PqOobGffcMIxOIV0tO9ZRHI/ghK5p\n06L3zZ7tTEvJrtelmThLhIxy+6jqfFXto6qVqvpZVR3pbf9AVS8ItHtaVY9V1aNV9ceZdtowjM4j\n3Xz3sbmBgpSXu6VHD+dvAGfqCfK5z7V/Pd98ddttZvLJFjbD1zCMKNLVsoN+ha1b4ac/dYK8shJm\nzWqbW9+38fs2/wEDXALL9q5nkT7ZxWr4GobRhkwiazpyrEXyZA8r4G4YhlGCWAF3wzAMIyEm/A3D\nMEoQE/6GYSQktgqXUTxYtI9hGHGxWbXFjWn+hmHEpb14fxsVFDam+RuGEZdk8f42Kih8TPgbhhGX\nZEnhLNdO4WPC3zCMhCSaVWu5dgofE/6GYaRNRwqxGPmFCX/DMDqE5dopbCzaxzAMowQx4W8YhlGC\nmPA3DMMoQUz4G4ZhlCAm/A3DMEoQE/6GYRglSN4WcxGRTcB7HTz8YODDLHYnFxT6PRR6/6Hw76HQ\n+w+Ffw+56P9RqnpIe43yVvhngogsS6WSTT5T6PdQ6P2Hwr+HQu8/FP495HP/zexjGIZRgpjwNwzD\nKEGKVfjX5boDWaDQ76HQ+w+Ffw+F3n8o/HvI2/4Xpc3fMAzDSE6xav6GYRhGEkz4G4ZhlCBFJ/xF\n5HwR+buIrBaRqbnuT7qIyAMislFEXs91XzqCiBwhIs+JyAoReUNEbsh1n9JFRHqIyFIR+at3D/+V\n6z51BBEpF5FXReR3ue5LRxCRNSLymogsF5Flue5PuojIgSLyhIi8KSIrRSSvEmAXlc1fRMqBt4Av\nAeuAvwDfUtUVOe1YGojIcGAbMEdVT8p1f9JFRHoDvVX1FRHZD3gZGF1g34EAvVR1m4hUAC8CN6jq\nn3PctbQQkR8AQ4D9VfXCXPcnXURkDTBEVQtykpeIPAy8oKr3i0h3YB9V3ZrrfvkUm+Z/KrBaVd9R\n1d3AY8BFOe5TWqjqYmBLrvvRUVR1vaq+4r3/BFgJHJ7bXqWHOrZ5qxXeUlBakoj0Ab4C3J/rvpQi\nInIAMByYDaCqu/NJ8EPxCf/DgbWB9XUUmOApJkSkGhgELMltT9LHM5ksBzYCf1TVQruHWcBkoDnX\nHckABRaJyMsiEs51Z9KkL7AJeNAzvd0vIr1y3akgxSb8jTxBRPYF5gGTVPVfue5Puqhqk6oOBPoA\np4pIwZjgRORCYKOqvpzrvmTIGap6CvBl4DueSbRQ6AacAtyrqoOA7UBe+SCLTfi/DxwRWO/jbTO6\nEM9OPg+Yq6q/yXV/MsEbqj8HnJ/rvqTBMGCUZzN/DPg3EanPbZfSR1Xf9143AvNxZt1CYR2wLjBi\nfAL3MMgbik34/wXoJyJ9PQfLN4GnctynksJzls4GVqrqT3Ldn44gIoeIyIHe+564AII3c9ur1FHV\nm1S1j6pW4/4Dz6rquBx3Ky1EpJcXMIBnLjkPKJgIOFXdAKwVkeO8TecAeRX00C3XHcgmqrpXRK4H\nFgLlwAOq+kaOu5UWIvIoMAI4WETWAbeq6uzc9iothgGXAa95NnOAm1X16Rz2KV16Aw970WNlwOOq\nWpDhkgXMZ4H5TpegG/CIqv4ht11Km+8Ccz1F9B3gyhz3J4qiCvU0DMMwUqPYzD6GYRhGCpjwNwzD\nKEFM+BuGYZQgJvwNwzBKEBP+hmEYJYgJf8MwjBLEhL9hGEYJ8v8BHhamjqJDlqQAAAAASUVORK5C\nYII=\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
@@ -618,7 +634,7 @@
"colab_type": "text"
},
"source": [
- "Oh dear! The graph makes it clear that our network has learned to approximate the sine function in a very limited way. From `0 <= x <= 1.1` the line mostly fits, but for the rest of our `x` values it is a rough approximation at best.\n",
+ "Oh dear! The graph makes it clear that our network has learned to approximate the sine function in a very limited way. The predictions are highly linear, and only very roughly fit the data.\n",
"\n",
"The rigidity of this fit suggests that the model does not have enough capacity to learn the full complexity of the sine wave function, so it's only able to approximate it in an overly simplistic way. By making our model bigger, we should be able to improve its performance.\n",
"\n",
@@ -631,7 +647,11 @@
"metadata": {
"id": "oW0xus6AF-4o",
"colab_type": "code",
- "colab": {}
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 255
+ },
+ "outputId": "8e677f2e-25c6-4933-d8ff-0499a0a22bd6"
},
"source": [
"model_2 = tf.keras.Sequential()\n",
@@ -647,10 +667,34 @@
"model_2.add(layers.Dense(1))\n",
"\n",
"# Compile the model using a standard optimizer and loss function for regression\n",
- "model_2.compile(optimizer='rmsprop', loss='mse', metrics=['mae'])"
+ "model_2.compile(optimizer='rmsprop', loss='mse', metrics=['mae'])\n",
+ "\n",
+ "# Show a summary of the model\n",
+ "model_2.summary()"
],
"execution_count": 0,
- "outputs": []
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "Model: \"sequential_1\"\n",
+ "_________________________________________________________________\n",
+ "Layer (type) Output Shape Param # \n",
+ "=================================================================\n",
+ "dense_2 (Dense) (None, 16) 32 \n",
+ "_________________________________________________________________\n",
+ "dense_3 (Dense) (None, 16) 272 \n",
+ "_________________________________________________________________\n",
+ "dense_4 (Dense) (None, 1) 17 \n",
+ "=================================================================\n",
+ "Total params: 321\n",
+ "Trainable params: 321\n",
+ "Non-trainable params: 0\n",
+ "_________________________________________________________________\n"
+ ],
+ "name": "stdout"
+ }
+ ]
},
{
"cell_type": "markdown",
@@ -667,7 +711,7 @@
"metadata": {
"id": "DPAUrdkmGq1M",
"colab_type": "code",
- "outputId": "34ad91e0-229b-479c-bd65-12ad1ed1c660",
+ "outputId": "bd73e2e2-b5f7-472d-b054-fdb86ff87126",
"colab": {
"base_uri": "https://localhost:8080/"
}
@@ -683,16 +727,14 @@
"text": [
"Train on 600 samples, validate on 200 samples\n",
"Epoch 1/600\n",
- "600/600 [==============================] - 0s 422us/sample - loss: 0.5655 - mae: 0.6259 - val_loss: 0.4104 - val_mae: 0.5509\n",
+ "600/600 [==============================] - 1s 1ms/sample - loss: 0.6993 - mae: 0.7257 - val_loss: 0.4758 - val_mae: 0.6040\n",
"Epoch 2/600\n",
- "600/600 [==============================] - 0s 111us/sample - loss: 0.3195 - mae: 0.4902 - val_loss: 0.3341 - val_mae: 0.4927\n",
- "...\n",
- "Epoch 598/600\n",
- "600/600 [==============================] - 0s 116us/sample - loss: 0.0124 - mae: 0.0886 - val_loss: 0.0096 - val_mae: 0.0771\n",
+ "600/600 [==============================] - 0s 153us/sample - loss: 0.4000 - mae: 0.5489 - val_loss: 0.3766 - val_mae: 0.5306\n",
+ "...",
"Epoch 599/600\n",
- "600/600 [==============================] - 0s 130us/sample - loss: 0.0125 - mae: 0.0900 - val_loss: 0.0107 - val_mae: 0.0824\n",
+ "600/600 [==============================] - 0s 150us/sample - loss: 0.0116 - mae: 0.0860 - val_loss: 0.0104 - val_mae: 0.0804\n",
"Epoch 600/600\n",
- "600/600 [==============================] - 0s 109us/sample - loss: 0.0124 - mae: 0.0892 - val_loss: 0.0116 - val_mae: 0.0845\n"
+ "600/600 [==============================] - 0s 150us/sample - loss: 0.0115 - mae: 0.0859 - val_loss: 0.0104 - val_mae: 0.0806\n"
],
"name": "stdout"
}
@@ -706,14 +748,14 @@
},
"source": [
"## Evaluate our new model\n",
- "Each training epoch, the model prints out its loss and mean absolute error for training and validation. You can read this in the output above (note that your exact numbers may differ): \n",
+ "Each training epoch, the model prints out its loss and mean absolute error for training and validation. You can read this in the output above:\n",
"\n",
"```\n",
"Epoch 600/600\n",
- "600/600 [==============================] - 0s 109us/sample - loss: 0.0124 - mae: 0.0892 - val_loss: 0.0116 - val_mae: 0.0845\n",
+ "600/600 [==============================] - 0s 143us/sample - loss: 0.0115 - mae: 0.0859 - val_loss: 0.0104 - val_mae: 0.0806\n",
"```\n",
"\n",
- "You can see that we've already got a huge improvement - validation loss has dropped from 0.15 to 0.015, and validation MAE has dropped from 0.31 to 0.1.\n",
+ "You can see that we've already got a huge improvement - validation loss has dropped from 0.17 to 0.01, and validation MAE has dropped from 0.36 to 0.08.\n",
"\n",
"The following cell will print the same graphs we used to evaluate our original model, but showing our new training history:"
]
@@ -723,7 +765,7 @@
"metadata": {
"id": "SYHGswAJJgrC",
"colab_type": "code",
- "outputId": "efcc51f6-f1f1-490a-ffba-ed283586f83e",
+ "outputId": "e7000158-aa8a-47ce-f372-e2b0c41c34e9",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 851
@@ -746,7 +788,7 @@
"plt.show()\n",
"\n",
"# Exclude the first few epochs so the graph is easier to read\n",
- "SKIP = 100\n",
+ "SKIP = 80\n",
"\n",
"plt.clf()\n",
"\n",
@@ -778,7 +820,7 @@
{
"output_type": "display_data",
"data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEWCAYAAACJ0YulAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi40LCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcv7US4rQAAIABJREFUeJzt3Xl8VOX1+PHPyQ4JEAhRtmBAEQg7\nRDQiJYgiasUflVpwQayI0rpUy1epK0WtuFQRS61LRVEUF6qioNSyiGhklUU2QQwS1hDWsGQ9vz/u\nzWQIWSaQySTMeb9e88q9zzxz73nuTObM89xNVBVjjDEGICTQARhjjKk5LCkYY4zxsKRgjDHGw5KC\nMcYYD0sKxhhjPCwpGGOM8bCkYKqUiISKSLaItKzKuoEkIueISJUfuy0il4hIutf8BhHp7Uvdk1jX\nayLywMm+vpzlPi4ib1T1ck3ghAU6ABNYIpLtNVsXyAEK3PnbVHVqZZanqgVATFXXDQaq2rYqliMi\nI4AbVDXVa9kjqmLZ5vRnSSHIqarnS9n9JTpCVf9XVn0RCVPV/OqIzRhT/Wz4yJTLHR54T0TeFZFD\nwA0ikiIi34nIfhHZISITRSTcrR8mIioiie782+7zn4vIIRFJE5FWla3rPn+5iPwoIgdE5EUR+UZE\nhpcRty8x3iYim0Rkn4hM9HptqIg8LyJZIrIZGFDO9nlQRKaVKJskIs+50yNEZJ3bnp/cX/FlLStD\nRFLd6boi8pYb2xqgR4m6D4nIZne5a0RkoFveCfgH0NsdmtvjtW3Her3+drftWSLysYg09WXbVERE\nBrnx7BeRuSLS1uu5B0Rku4gcFJH1Xm29QESWu+W7ROQZX9dn/EBV7WEPVBUgHbikRNnjQC5wFc6P\niDrAecD5OD3N1sCPwB1u/TBAgUR3/m1gD5AMhAPvAW+fRN0zgEPA1e5z9wJ5wPAy2uJLjJ8ADYBE\nYG9R24E7gDVACyAOWOD8q5S6ntZANhDttezdQLI7f5VbR4CLgaNAZ/e5S4B0r2VlAKnu9LPAfKAh\ncBawtkTda4Gm7ntynRvDme5zI4D5JeJ8GxjrTvd3Y+wKRAH/BOb6sm1Kaf/jwBvudHs3jovd9+gB\nYIM73QHYAjRx67YCWrvTS4Ch7nQ94PxA/y8E88N6CsYXC1X1U1UtVNWjqrpEVRepar6qbgZeAfqU\n8/oPVXWpquYBU3G+jCpb99fAClX9xH3ueZwEUiofY3xSVQ+oajrOF3DRuq4FnlfVDFXNAsaXs57N\nwA84yQrgUmCfqi51n/9UVTerYy4wByh1Z3IJ1wKPq+o+Vd2C8+vfe73vq+oO9z15ByehJ/uwXIDr\ngddUdYWqHgPGAH1EpIVXnbK2TXmGADNUda77Ho3HSSznA/k4CaiDOwT5s7vtwEnubUQkTlUPqeoi\nH9th/MCSgvHFVu8ZEWknIjNFZKeIHATGAY3Lef1Or+kjlL9zuay6zbzjUFXF+WVdKh9j9GldOL9w\ny/MOMNSdvs6dL4rj1yKySET2ish+nF/p5W2rIk3Li0FEhovISneYZj/QzsflgtM+z/JU9SCwD2ju\nVacy71lZyy3EeY+aq+oG4M8478NudziyiVv1ZiAJ2CAii0XkCh/bYfzAkoLxRcnDMV/G+XV8jqrW\nBx7BGR7xpx04wzkAiIhw/JdYSacS4w4gwWu+okNm3wcuEZHmOD2Gd9wY6wAfAk/iDO3EAv/1MY6d\nZcUgIq2Bl4BRQJy73PVey63o8NntOENSRcurhzNMtc2HuCqz3BCc92wbgKq+raq9cIaOQnG2C6q6\nQVWH4AwR/h2YLiJRpxiLOUmWFMzJqAccAA6LSHvgtmpY52dAdxG5SkTCgLuBeD/F+D7wJxFpLiJx\nwP3lVVbVncBC4A1gg6pudJ+KBCKATKBARH4N9KtEDA+ISKw453Hc4fVcDM4XfyZOfrwVp6dQZBfQ\nomjHeineBW4Rkc4iEonz5fy1qpbZ86pEzANFJNVd9//h7AdaJCLtRaSvu76j7qMQpwE3ikhjt2dx\nwG1b4SnGYk6SJQVzMv4M3ITzD/8yzg5hv1LVXcDvgOeALOBs4Huc8yqqOsaXcMb+V+PsBP3Qh9e8\ng7Pj2DN0pKr7gXuAj3B21g7GSW6+eBSnx5IOfA5M8VruKuBFYLFbpy3gPQ7/JbAR2CUi3sNARa//\nAmcY5yP39S1x9jOcElVdg7PNX8JJWAOAge7+hUjgaZz9QDtxeiYPui+9AlgnztFtzwK/U9XcU43H\nnBxxhmaNqV1EJBRnuGKwqn4d6HiMOV1YT8HUGiIywB1OiQQexjlqZXGAwzLmtGJJwdQmFwGbcYYm\nLgMGqWpZw0fGmJNgw0fGGGM8rKdgjDHGo9ZdEK9x48aamJgY6DCMMaZWWbZs2R5VLe8wbqAWJoXE\nxESWLl0a6DCMMaZWEZGKzswHbPjIGGOMF0sKxhhjPCwpGGOM8ah1+xSMMdUrLy+PjIwMjh07FuhQ\njA+ioqJo0aIF4eFlXfqqfJYUjDHlysjIoF69eiQmJuJcnNbUVKpKVlYWGRkZtGrVquIXlMKGj4wx\n5Tp27BhxcXGWEGoBESEuLu6UenVBkxTStqbx5NdPkrY1LdChGFPrWEKoPU71vQqK4aO0rWn0m9KP\n3IJcIkIjmDNsDikJKYEOyxhjapyg6CnMT59PbkEuBVpAbkEu89PnBzokY4yPsrKy6Nq1K127dqVJ\nkyY0b97cM5+b69ttF26++WY2bNhQbp1JkyYxderUqgiZiy66iBUrVlTJsqpbUPQUUhNTiQiN8PQU\nUhNTAx2SMcZHcXFxni/YsWPHEhMTw+jRo4+ro6qoKiEhpf/OnTx5coXr+eMf/3jqwZ4GgqKnkJKQ\nwpxhc3is72M2dGRMNaiOfXibNm0iKSmJ66+/ng4dOrBjxw5GjhxJcnIyHTp0YNy4cZ66Rb/c8/Pz\niY2NZcyYMXTp0oWUlBR2794NwEMPPcSECRM89ceMGUPPnj1p27Yt3377LQCHDx/mmmuuISkpicGD\nB5OcnFxhj+Dtt9+mU6dOdOzYkQceeACA/Px8brzxRk/5xIkTAXj++edJSkqic+fO3HDDDVW+zXwR\nFD0FcBKDJQNj/K869+GtX7+eKVOmkJycDMD48eNp1KgR+fn59O3bl8GDB5OUlHTcaw4cOECfPn0Y\nP3489957L6+//jpjxow5YdmqyuLFi5kxYwbjxo3jiy++4MUXX6RJkyZMnz6dlStX0r1793Ljy8jI\n4KGHHmLp0qU0aNCASy65hM8++4z4+Hj27NnD6tWrAdi/fz8ATz/9NFu2bCEiIsJTVt2CoqdgjKk+\n1bkP7+yzz/YkBIB3332X7t270717d9atW8fatWtPeE2dOnW4/PLLAejRowfp6emlLvs3v/nNCXUW\nLlzIkCFDAOjSpQsdOnQoN75FixZx8cUX07hxY8LDw7nuuutYsGAB55xzDhs2bOCuu+5i9uzZNGjQ\nAIAOHTpwww03MHXq1JM++exUWVIwxlSpon14oRLq93140dHRnumNGzfywgsvMHfuXFatWsWAAQNK\nPV4/IiLCMx0aGkp+fn6py46MjKywzsmKi4tj1apV9O7dm0mTJnHbbbcBMHv2bG6//XaWLFlCz549\nKSgoqNL1+sKSgjGmSgVqH97BgwepV68e9evXZ8eOHcyePbvK19GrVy/ef/99AFavXl1qT8Tb+eef\nz7x588jKyiI/P59p06bRp08fMjMzUVV++9vfMm7cOJYvX05BQQEZGRlcfPHFPP300+zZs4cjR45U\neRsqEjT7FIwx1ScQ+/C6d+9OUlIS7dq146yzzqJXr15Vvo4777yTYcOGkZSU5HkUDf2UpkWLFjz2\n2GOkpqaiqlx11VVceeWVLF++nFtuuQVVRUR46qmnyM/P57rrruPQoUMUFhYyevRo6tWrV+VtqEit\nu0dzcnKy2k12jKk+69ato3379oEOo0bIz88nPz+fqKgoNm7cSP/+/dm4cSNhYTXr93Vp75mILFPV\n5DJe4lGzWmKMMTVYdnY2/fr1Iz8/H1Xl5ZdfrnEJ4VSdXq0xxhg/io2NZdmyZYEOw69sR7MxxhgP\nSwrGGGM8LCkYY4zxsKRgjDHGw5KCMaZG69u37wknok2YMIFRo0aV+7qYmBgAtm/fzuDBg0utk5qa\nSkWHuE+YMOG4k8iuuOKKKrku0dixY3n22WdPeTlVzZKCMaZGGzp0KNOmTTuubNq0aQwdOtSn1zdr\n1owPP/zwpNdfMinMmjWL2NjYk15eTWdJwRhTow0ePJiZM2d6bqiTnp7O9u3b6d27t+e8ge7du9Op\nUyc++eSTE16fnp5Ox44dATh69ChDhgyhffv2DBo0iKNHj3rqjRo1ynPZ7UcffRSAiRMnsn37dvr2\n7Uvfvn0BSExMZM+ePQA899xzdOzYkY4dO3ouu52enk779u259dZb6dChA/379z9uPaVZsWIFF1xw\nAZ07d2bQoEHs27fPs/6iS2kXXYjvq6++8txkqFu3bhw6dOikt21p7DwFY4zP/vQnqOobinXtCu73\naakaNWpEz549+fzzz7n66quZNm0a1157LSJCVFQUH330EfXr12fPnj1ccMEFDBw4sMz7FL/00kvU\nrVuXdevWsWrVquMuff3EE0/QqFEjCgoK6NevH6tWreKuu+7iueeeY968eTRu3Pi4ZS1btozJkyez\naNEiVJXzzz+fPn360LBhQzZu3Mi7777Lq6++yrXXXsv06dPLvT/CsGHDePHFF+nTpw+PPPIIf/3r\nX5kwYQLjx4/n559/JjIy0jNk9eyzzzJp0iR69epFdnY2UVFRldjaFbOegjGmxvMeQvIeOlJVHnjg\nATp37swll1zCtm3b2LVrV5nLWbBggefLuXPnznTu3Nnz3Pvvv0/37t3p1q0ba9asqfBidwsXLmTQ\noEFER0cTExPDb37zG77++msAWrVqRdeuXYHyL88Nzv0d9u/fT58+fQC46aabWLBggSfG66+/nrff\nfttz5nSvXr249957mThxIvv376/yM6r92lMQkQHAC0Ao8Jqqji/x/HDgGWCbW/QPVX3NnzEZY05e\neb/o/enqq6/mnnvuYfny5Rw5coQePXoAMHXqVDIzM1m2bBnh4eEkJiaWernsivz88888++yzLFmy\nhIYNGzJ8+PCTWk6Rostug3Pp7YqGj8oyc+ZMFixYwKeffsoTTzzB6tWrGTNmDFdeeSWzZs2iV69e\nzJ49m3bt2p10rCX5racgIqHAJOByIAkYKiJJpVR9T1W7ug9LCMaYE8TExNC3b19+//vfH7eD+cCB\nA5xxxhmEh4czb948tmzZUu5yfvWrX/HOO+8A8MMPP7Bq1SrAuex2dHQ0DRo0YNeuXXz++eee19Sr\nV6/UcfvevXvz8ccfc+TIEQ4fPsxHH31E7969K922Bg0a0LBhQ08v46233qJPnz4UFhaydetW+vbt\ny1NPPcWBAwfIzs7mp59+olOnTtx///2cd955rF+/vtLrLI8/ewo9gU2quhlARKYBVwPl98mMMaYU\nQ4cOZdCgQccdiXT99ddz1VVX0alTJ5KTkyv8xTxq1Chuvvlm2rdvT/v27T09ji5dutCtWzfatWtH\nQkLCcZfdHjlyJAMGDKBZs2bMmzfPU969e3eGDx9Oz549ARgxYgTdunUrd6ioLG+++Sa33347R44c\noXXr1kyePJmCggJuuOEGDhw4gKpy1113ERsby8MPP8y8efMICQmhQ4cOnrvIVRW/XTpbRAYDA1R1\nhDt/I3C+qt7hVWc48CSQCfwI3KOqW0tZ1khgJEDLli17VPRrwBhTdezS2bXPqVw6O9A7mj8FElW1\nM/Al8GZplVT1FVVNVtXk+Pj4ag3QGGOCiT+TwjYgwWu+BcU7lAFQ1SxVzXFnXwN6+DEeY4wxFfBn\nUlgCtBGRViISAQwBZnhXEJGmXrMDgXV+jMcYc5Jq2x0ag9mpvld+29GsqvkicgcwG+eQ1NdVdY2I\njAOWquoM4C4RGQjkA3uB4f6KxxhzcqKiosjKyiIuLq7Mk8JMzaCqZGVlndIJbXaPZmNMufLy8sjI\nyDil4/ZN9YmKiqJFixaEh4cfV273aDbGVInw8HBatWoV6DBMNQn00UfGGGNqEEsKxhhjPCwpGGOM\n8bCkYIwxxsOSgjHGGA9LCsYYYzwsKRhjjPGwpGCMMcbDkoIxxhgPSwrGGGM8LCkYY4zxsKRgjDHG\nw5KCMcYYD0sKxhhjPCwpGGOM8bCkYIwxxsOSgjHGGA9LCsYYYzwsKRhjjPGwpGCMMcbDkoIxxhgP\nSwrGGGM8LCkYY4zxsKRgjDHGI2iSwsKF8PDDkJcX6EiMMabmCpqkkJYGjz8OOTmBjsQYY2ouvyYF\nERkgIhtEZJOIjCmn3jUioiKS7K9YwsOdv9ZTMMaYsvktKYhIKDAJuBxIAoaKSFIp9eoBdwOL/BUL\nWFIwxhhf+LOn0BPYpKqbVTUXmAZcXUq9x4CngGN+jIWwMOdvfr4/12KMMbWbP5NCc2Cr13yGW+Yh\nIt2BBFWdWd6CRGSkiCwVkaWZmZknFYz1FIwxpmIB29EsIiHAc8CfK6qrqq+oarKqJsfHx5/U+iwp\nGGNMxfyZFLYBCV7zLdyyIvWAjsB8EUkHLgBm+GtnsyUFY4ypmD+TwhKgjYi0EpEIYAgwo+hJVT2g\nqo1VNVFVE4HvgIGqutQfwVhSMMaYivktKahqPnAHMBtYB7yvqmtEZJyIDPTXestiScEYYyoW5s+F\nq+osYFaJskfKqJvqz1js6CNjjKlY0JzRbD0FY4ypmCUFY4wxHpYUjDHGeFhSMMYY42FJwRhjjEfQ\nJAU7+sgYYyoWNEnBegrGGFMxSwrGGGM8LCkYY4zxsKRgjDHGw5KCMcYYj6BJCnb0kTHGVCxokoL1\nFIwxpmKWFIwxxnhYUjDGGOMRNElBBEJDLSkYY0x5giYpgNNbsKRgjDFls6RgjDHGI6iSQliYHZJq\njDHlCaqkYD0FY4wpnyUFY4wxHkGVFApDjvH9ttWkbU0LdCjGGFMjBU1SSNuaxs4jW1mxbQ39pvSz\nxGCMMaUImqQwP30+GpKLFoSRW5DL/PT5gQ7JGGNqnKBJCqmJqUhoAWg4EaERpCamBjokY4ypccIC\nHUB1SUlIoW18Nhpdl8nD5pCSkBLokIwxpsbxqacgImeLSKQ7nSoid4lIrA+vGyAiG0Rkk4iMKeX5\n20VktYisEJGFIpJU+Sb4LjY6hpb1zrGEYIwxZfB1+Gg6UCAi5wCvAAnAO+W9QERCgUnA5UASMLSU\nL/13VLWTqnYFngaeq0zwlWWHpBpjTPl8TQqFqpoPDAJeVNX/A5pW8JqewCZV3ayqucA04GrvCqp6\n0Gs2GlAf4zkplhSMMaZ8vu5TyBORocBNwFVuWXgFr2kObPWazwDOL1lJRP4I3AtEABeXtiARGQmM\nBGjZsqWPIZ8oPByys0/65cYYc9rztadwM5ACPKGqP4tIK+CtqghAVSep6tnA/cBDZdR5RVWTVTU5\nPj7+pNdl1z4yxpjy+dRTUNW1wF0AItIQqKeqT1Xwsm04+x6KtHDLyjINeMmXeE6WDR8ZY0z5fD36\naL6I1BeRRsBy4FURqWin8BKgjYi0EpEIYAgwo8Ry23jNXgls9D30yrOkYIwx5fN1n0IDVT0oIiOA\nKar6qIisKu8FqpovIncAs4FQ4HVVXSMi44ClqjoDuENELgHygH04+yz8xpKCMcaUz9ekECYiTYFr\ngQd9XbiqzgJmlSh7xGv6bl+XVRUsKRhjTPl83dE8DucX/0+qukREWuPnoR5/sKRgjDHl83VH8wfA\nB17zm4Fr/BWUv9jRR8YYUz5fdzS3EJGPRGS3+5guIi38HVxVs56CMcaUz9fho8k4Rw41cx+fumW1\niiUFY4wpn69JIV5VJ6tqvvt4Azj5s8gCxJKCMcaUz9ekkCUiN4hIqPu4AcjyZ2D+YEnBGGPK52tS\n+D3O4ag7gR3AYGC4n2Lym/BwUIWCgkBHYowxNZNPSUFVt6jqQFWNV9UzVPX/UQuPPgp3L+FnvQVj\njCndqdyO894qi6KahLkH4NphqcYYU7pTSQpSZVFUk+1HfgZg4eYlAY7EGGNqplNJCn69IU5VS9ua\nxqTv/w7AoKlDSduaFuCIjDGm5ik3KYjIIRE5WMrjEM75CrXG/PT55IceAiA3J4T56fMDG5AxxtRA\n5V7mQlXrVVcg/paamEp4xA/kAuGF9UlNTA10SMYYU+OcyvBRrZKSkMIT/Z0bu/2z/+ukJKQEOCJj\njKl5giYpAHRNaA9AmwadAxyJMcbUTEGVFKKinL/HjgU2DmOMqamCKinUqeP8PXo0sHEYY0xNFVRJ\nwXoKxhhTvqBKCkU9BUsKxhhTuqBKCkU9BRs+MsaY0gVlUrCegjHGlC6okoLtaDbGmPIFVVKIjHT+\nWk/BGGNKF1RJISTESQyWFIwxpnRBlRQAwiPzWbBpqV0l1RhjShFUSSFtaxrZ7OK7zavoN6WfJQZj\njCkhqJLC/PT5EHEIzY0mtyDXLp9tjDEl+DUpiMgAEdkgIptEZEwpz98rImtFZJWIzBGRs/wZT2pi\nKhJxBHLrEREaYZfPNsaYEvyWFEQkFJgEXA4kAUNFJKlEte+BZFXtDHwIPO2veMC5fHbXlmfTKroD\nc4bNsctnG2NMCf7sKfQENqnqZlXNBaYBV3tXUNV5qnrEnf0OaOHHeABoFteARqFnWUIwxphS+DMp\nNAe2es1nuGVluQX4vLQnRGSkiCwVkaWZmZmnFFRMDGRnn9IijDHmtFUjdjSLyA1AMvBMac+r6iuq\nmqyqyfHx8ae0LksKxhhTtnLv0XyKtgEJXvMt3LLjiMglwINAH1XN8WM8gCUFY4wpjz97CkuANiLS\nSkQigCHADO8KItINeBkYqKq7/RiLR1FSUK2OtRljTO3it6SgqvnAHcBsYB3wvqquEZFxIjLQrfYM\nEAN8ICIrRGRGGYurMln5WygogK82fefvVRljTK3jz+EjVHUWMKtE2SNe05f4c/0lpW1N499r3wMm\ncPm/hzL3j+/YUUjGGOOlRuxori7z0+dTELMFgNx9Z9gZzcYYU0JQJYXUxFTCG+4EIOxQazuj2Rhj\nSgiqpJCSkMJHIycCMOrcv9nQkTHGlBBUSQFgQOfzqFMHQg+1CnQoxhhT4wRdUhCBM86AUzwx2hhj\nTktBlxQAoupls3jTJrufgjHGlBB0SSFtaxobjy5iQ9o5pI6ZYInBGGO8BF1SmJ8+n8LIfQDkvvOe\nHZZqjDFegi4ppCamIgVRx80bY4xxBF1SACC3XqAjMMaYGinoksL89PnHXQxvysopgQvGGGNqmKBL\nCqmJqYQPut0z/9rSN2xnszHGuIIuKaQkpHBlz3ZwxR8AyD/UkKe/8eutoY0xptYIuqQA0CSmCcT9\n6Mzsac+nP35qvQVjjCFIk8KwLsMIabLWmdnViUIttENTjTGGIE0KKQkpjO5/I0Tvgm09UZT9OfsD\nHZYxxgRcUCYFgNjIWGj3Cay7BjZexjPfPMP4mVO55hq7h7MxJngFbVJITUwlpP0nUBAJU79Ad7fj\ngTGh/Oc/8MkngY7OGGMCI2iTQkpCCgNSGxUXHGyB5tYFIDo6QEEZY0yABW1SAHio/x+KZw4mQG4M\n4Fxe2xhjglFQJ4WUhBRGfHiXM3MgAfKcLsKKXzYGMCpjjAmcoE4KAL/vORRidsCa38HheAA+XvVl\ngKMyxpjACPqkkJKQQpchH8OetrC/NQArf/nJTmYzxgSloE8KAC890hXOLu4daF5du/SFMSYoWVLA\n6S1ceufH0PEdpyA3hhkbZlhvwRgTdCwpuP76m2GEDL4R6mZCbgyFFFpvwRgTdPyaFERkgIhsEJFN\nIjKmlOd/JSLLRSRfRAb7M5aKpCSkMLDdQIjIhpz6oFhvwRgTdPyWFEQkFJgEXA4kAUNFJKlEtV+A\n4cA7/oqjMu678D4IPwKrboRPXrfegjEm6Pizp9AT2KSqm1U1F5gGXO1dQVXTVXUVUOjHOHyWkpDC\n2ef95MysuBmw3oIxJrj4Myk0B7Z6zWe4ZZUmIiNFZKmILM3MzKyS4MryxqR46PC+M5Pj7FsYMWOE\nJQZjTFCoFTuaVfUVVU1W1eT4+Hi/ruuixBQuuMI9o/mn/gCs3bOWPm/0scRgjDnt+TMpbAMSvOZb\nuGU13t9GXOJMvD8dMnoCkFeYZ/sXjDGnPX8mhSVAGxFpJSIRwBBghh/XV2X6nns+vxv9jTOzbpCn\n3PYvGGNOd35LCqqaD9wBzAbWAe+r6hoRGSciAwFE5DwRyQB+C7wsImv8FU9lTXumF22Tt8H6QaBO\nWSGFjPnfCUfWGmPMacOv+xRUdZaqnquqZ6vqE27ZI6o6w51eoqotVDVaVeNUtYM/46ms+0Y1h6y2\nsHqop2zBLwu4/3/3BzAqY4zxn1qxozlQrrsOuvQ8CB+/Cdt6eHoMz3zzjA0jGWNOS5YUyhEVBfNn\n1yeybh68uhT+7exnUJTrpl9nicEYc9qxpFCB2Fj4w63ObTrJuBDmPQoL/4/0A+n0ntzbEoMx5rRi\nScEHDz4I4RHuSddfjYX/PQ0KBVpgh6kaY04rlhR8EBcH27eV2FQHWgLwyYZPeGXZKwGIyhhjqp4l\nBR81bgzbt3sVzH0MCsJQlNs/u90SgzHmtGBJoRKaNoW8PAiNyIFVw+D73wNYYjDGnDYsKVRSWBi8\n9vF6Z+aHIbC9O2CJwRhzerCkcBKGX96F7hdvhvS+8Moy+No5y9kSgzGmtrOkcJJefqp18cycJ+Fg\nM8ASgzGmdrOkcJKSk2Gr990intsGK26EQkFRbvvsNrschjGm1rGkcApatIDCQjin2w6n4OMpMK4Q\nVjnXSnr6m6fp+q+udoKbMabWsKRwikRg8ZymjH51BsT+7BSuHOZ5fuWulfR6vZcNJxljagVLClWg\nYUN4ZsRAXvxsLjRfBLs7wdsz4ccrAGw4yRhTa1hSqEJ39LqFP4+Kh0PNYdMV8M5MJzns7Aw4w0lt\nJrZh1GejbEjJGFMjiaoGOoZKSU5O1qVLlwY6jDIVFsJzz8F/VswhbWq/4if+kARnrPPMhkgIL135\nEiN7jAxAlMaYYCMiy1Q1uaJ0dm+9AAAVTklEQVR61lOoYiEhMHo0fPt2P56b82bxE5O/hr2tnF7D\n4TgKtZDbPruNPm/0sV6DMabGsKTgR/dcfBOvfbqKxFtHw9E4mLgZ/rUS3v3UU2fBlgVc+PqFlhyM\nMTWCJQU/u+XXnfn5lWfp+muvL/yMFPji7zB5vufchgXpTnJo+vemDHpv0AkJIisLLrwQNm2q3vir\n2+LFzhFdK1YEOhJjgpMlhWqy7JMU3p33PYmXfOEUfHcvbOlTfG7Dp6/A8pvZ+dWv+Xj9x1z4+oW0\neqGV51DWadMgLQ2eesp5eVYW1LLdQT756CPn78yZgY3DmGBlSaGahITAkNRu/PzlAL5YtYQ6Tbcc\nX2H5rTDjdfj0VZj5IqweQvr+dG777Dbino5jzKwnAPh+79eMnvYSjRvDv/7lvHTmTHjggWpu0Gnu\nhx+cHss33wQ2jn//Gx57zJneuhW+/z6w8VSHjz+GKVMCHUXwsqQQAJd1Oo/sjLP4Zksa5z15HQy6\nAWK8btaw5A6Y/i68vAQW/4G9R/eSnRkLwLKtq/j7jFkA3PXs1zT9e1MG/ymNJ5/Ko+fLKX4/Se7w\nYaeXUpUWL4bQUOdLr6j3I1K166isop7K9OnVt87CQucLsbCwuGzECHjkEWe6dWvo3r3i5ezcCR98\nUPXxbdwI8+eX/pwqTJwIGzac+noGDYKbbjr15VS1os9mTo5vvfQjR8reXr748kvnR0F1C6v+VRpw\neg4Xtkxh8ZgU0ram8ddPH2JZxmr2vDALjsQ7lXYkO4/dHSE91Sk71NRz17f8zb3Z+a/X4efuUBjO\nkg0ZLNl5G6P/O5rQTQMJT1hJaL09J6w7KiyK2KhYcvJziI+Op1FUI5rENKFb025kHckiNTGVC1qk\nIOIcXnvoEDz6qPPa4cPhww8hM9O58VBlZGfDV1/BlVceXz5xovNF+OWXxV+IeXmVW3ZVWrYMvv3W\nmY6IKLve4cMQEwOTJzvbpTIee8y5TMrNNxeXvfoq3H576cvLyYH8fGc6P9+5hHtZBg2C776D3bsh\nNxfeew/uucdJtA88AJ06wdChlYsX4Nxznb+qTuI54wzncwzw3//C3XfDgAHw+eeVX3ZlrF7t9OTK\na8Mvvzh3TIyOLn9Ze/dCo0bOdF4e/OMfzntQp87x9XbudO6n8uKLcOedzmf2zjvLX/aYMU791auh\nY0enLDe3/M+Ut/79nb+33OJb/api5ynUMG9/uZKxz2/hQNfH2DPzTudmPr7qNR7OXOXsyF58J7Rc\nAJEHoctb0GYmRB6GbT1g/lj47e+gMAxUICwHwo85y8hsB5Oc8ynajR3E+rHOIH+TZ5sCsHO0c52n\nelc/TN0L3+Dgx48T3et1wpr8eFwoBYcaE1J3HxJa4Ck7NHs0h7/8M/EPnEdoowxP+f63/8mxFYOo\nP3g0R5dfQ97mFKJbr6Aweid1+48nvGnxz8/8rJYUHm5EREtnT3T+znMpzIkmvNlaoqKE+tKM1X+e\nS/2rH6Huhc4YRFES3Hd0HzkFOaVuuoJ9zQltuI2osCjS//Szpzwk6hAtxiWTG3LwuPpRYVFEZiWz\n4bEPkDr7OPOxJE98BVlnEd5sLVLnABKa76mfUC+RY7ubsjvqW7bckw5A24eu5WDeXvJDD3Dwk8fI\nWTOAMy6bTJ0Bf+XYMdj1F6fe2Y9ezk9/db5t4x9MJrThNk8s2XPv4PDcOznjsbaIwK6H16FHY4+L\nN+zMDTS87Voyx60EIHFCq+O2SVRYFC0btASFn77twqFFgwk561uiUl8AoPBILLsfcT4XzR/qw7bH\nvyK82Voiz1rB2cOeYfPUuzm0cDgRbb6i0W1DSt3GAFooIIpI6T9Ofl7ZgpXjXwQg4W+dyPxwLPUG\njHc+L1sv5JyO+1l46xwAer/Wl18O/ex5T4uWl3XgCFvv30BUu69o96c/kZOfQ2RY5Al/9yy/kIxX\nJtHk7kFEtVpB2LI72fTWvcT+eryn3Y3qNKJbk27Mn9WYbf+eULw9m/1A43svLbWNRXGsnzCBY+v7\nULfPS0Sn/hPS+5D55j84b/wQdkd+d0Lcx/Kc7XB48TWEH0lg0ZRBALQZfyHRsUfJyc+hbeO23Hfh\nfaQkpJS5jcvi63kKlhRqsLStadz36kyWfnApOXuaoe3fg68fOrmFhR+G9tOLk0ziPOd+EN7i10Bm\nh+L5ix+Euc6+DK68HVosgtfSoCDKKQvJhcII55pPf+gAB86CuA2QVxeezC5ezpWjnLLvb4ZM9ydT\nvzFOjyg0FzZeDru6Qp0s59Ddknr8Czq+ByF5MHmhU/ZICBxqBs+7yaXr6xCdCd/cX9zeZkuh3nY4\nkAA3DIDDZ0Kjzc7zm/rD3nOgzSz45SL46C0Y1s/plX3xwvHrP28SnD8RGh+f+NhwJbz7mTN9wXNO\nIi4MP77OgLug5yQIKYS0u2H2BBhyNUz75MR2Fmm6zEni6/8f7HbOhueGy+Dt2e50fzjny+L6Y93/\n4bvOdtr3VCYcLaUbF3YU8t2fwI8KZLaHsGNQb4fzo+CT1+DwGfDjVcWvOX8CXH4PLL0VPnOHJkNz\noCCyuM5FT8LCvzjTIXnQfzScsRri10K9XVAQCiEFsKM7vDkHukyBK+526heEAQqhBc70Y15dxMv+\n5GyvDu8563h5BaQ8C2mjnefbfQTbe0CHDyCrDeQ0gPb/gfoZ8L477ndLCix4EK65Hva0gxaLoVDg\nWCx8McH5f+j7MBxtBN/dU7zu+xtCnf1wpCG8O8Mp23pR8fNnfeV8ruM2Qmg+rB4CaffCTX0hJN/5\nofXOp7Dx1ye+D4OvdT4nEYcgcb5TtrcNvPVfaPgTbO95fP2bL4KzvoH9CVA/g/CwML4a/lWlE4Ov\nSQFVrVWPHj16aLB6eenLmvzPFG1+7VPafGyynjk+QSPPe1O57B6l/QfO48Kn1Ong+/NRcGJZVJbz\nN/xQ1a2n7u6yn7v0z0riHN+XFb3D+XvxX5QR551cPNcOUvrfq3SZrDT/Tolb79vrovYqiXOL52O2\n+b7OJsudv96vR5Ub+znxtPm0uKzbq05sviw35ZnS37/SHpfcp4TkVH57RRxQhvdW6m9xlt9w4/HP\n19mjhB5VWi5wtmv9LVXzuQk7cmJZg3Tnb7NFSt1dFS8jJFfpf4/S98Hy68VsV4ZeWTzf/RUlcr9y\n9udK9M4ylu21LaN3KmGHy19Hp7eVK/6ghGcrV4xSxqJ/W/C3Sn9/AEtVK/6OrbBCTXsEc1Ioy7e/\nfKt/W/A3ve/L+7T9P9rrmc800TPGdtCGI6/VMx4/W5s820QbjzlfG/3xKq1z2WPKpaOVbq8pI3oq\nHd5VOr2lnP+8Muh655/3/OeVnhOVRhuUzlOcD2bsZiXpPeeL595mzoc/9GjxF2N4tpLyrCJ5xf+Y\nt3d2llX0hSP5Ff8ztvnM+Rv/gxOP559vW/EXzXFfAIeVGy4t/Usr9qfyk1STZU78JesULavXk0rT\nJb5/GXWceuKXLao0XuPEUtprQo86ybxo/vbOSs8XlF7jlSFXKQ9FKO3+U/x8ZeKJ2e5s+zp7ir9w\ny/qiQp0vssvuVvo86sx3/ffx2/KONkrbj4rLuv7bibFo3rsdZT3a/efkEkx5jzNXnPhF3/fBstfT\ncJNXG153PrPxq5WR3YuTcGmPq29ytmnC18Wfec/7eMx5lHzNpX9W7m+g/P5CZ/kl68RuLn1dceuV\nRj8qPf5VXNZ6tnJPcw0fF67f/vJtpb8nfE0Kfh0+EpEBwAtAKPCaqo4v8XwkMAXoAWQBv1PV9PKW\nGUzDR/6StjWN+enziasbx/c7vmdn9k72Ht1L5pFMIsMi2Xd0HyJS4Th8kdyfUghr9gMhdQ6Rn9ma\nkJhMJOLIcfsTCo80oPBwHIWHGxFSfyd6OI7Qxj+jOTFI+FGnUmgeIVHZRIYW7wM4tKYXEW3nA8qx\n5YOJ7PQZmhuN5kVBfiSE5hEW9wtHsxpzYFN7Z/iq4WZiw5oS1eAQuQW57N2XD8tuhZYLiT7agZCD\nLQlPWEnE2c4JgloYQv72DhQei6H+uStpWLc+Ow7sYtdR9z4ZBWHEFrRFMjtRmN0YaT2X/fnbYOMV\nEJpL7LlrCTl0lmc/x7HVVxDWbDWhDbehR2PJj3K2L0cbOMM1e9tQN6IOR9jjDHNF7yY2P4mohntL\n3f+RfTCE7FWXOLF0nUL0wa6E7+tESN0DhDbYTsG+BCTyMHnbOhLZbi6aH0FovUwIP3rcUVyFR+sh\n4Tkc+akbh7Y3hYIIqLedmHbfkf1TJzj7vxBagCBo1tnQaBPRP44g5HAzorpNJ7RelrM994Q4Bzw0\nWe0seM01ELeRhi13EpLZmdBGW5zPQlZLctZcRkTiYrQwnLAzNhFSdz+qcOTHCzgUsgUa/QjLboM9\nbeHcmdBuBuSHw4KHoelyGpz1C2G5cRSeuZx9x7Jg23nOMGD4Eee9DsuhUeNCCvY158CPnaDVXMhu\nQr2zfuLQj92cYaGWC53PSnZTZz9awnfOvrW8upD4NRxuTGx0NAf4BS0IgbxoYsOacXR1f3JyC6HT\nO1AYSr1Gx4iOcPZca0EYhOSjRxpybOVVhJ+1HKSQ7G+vJ+fcd+CHa6HOXhpeMYHISHG229G9sKML\nZLWF/Cganj+LyPAwQrOSkMNnkhG6wLmYZmguNPE6e3P9/4OCcEj6kF+16s34fuNr5z4FEQkFfgQu\nBTKAJcBQVV3rVecPQGdVvV1EhgCDVPV35S3XkoIpS1GyS01MPe6fpqzyk13eySwzbWsaU1Y6O76H\ndRlGSkJKpZbxyrJXmL52OtckXVMlF1EsuW7veaDcuCpT15cY4urGkXUk64S/pb2PU1ZOYWf2TgCa\nxDTxbMvy2lTW8kuup7T342S2e3mfw5KfgZKvK2pf0Y+0to3bcvk5l5e6PSqrJiSFFGCsql7mzv8F\nQFWf9Koz262TJiJhwE4gXssJypKCMcZUXk24SmpzwPsuxhluWal1VDUfOACccPiJiIwUkaUisjQz\nM9NP4RpjjKkVZzSr6iuqmqyqyfHx8YEOxxhjTlv+TArbgASv+RZuWal13OGjBjg7nI0xxgSAP5PC\nEqCNiLQSkQhgCDCjRJ0ZwE3u9GBgbnn7E4wxxviX3659pKr5InIHMBvnkNTXVXWNiIzDOV52BvBv\n4C0R2QTsxUkcxhhjAsSvF8RT1VnArBJlj3hNHwN+688YjDHG+K5W7Gg2xhhTPWrdBfFEJBPYUmHF\n0jUGTryWdO10urTldGkHWFtqKmuL4yxVrfDwzVqXFE6FiCz15eSN2uB0acvp0g6wttRU1pbKseEj\nY4wxHpYUjDHGeARbUvDvDYyr1+nSltOlHWBtqamsLZUQVPsUjDHGlC/YegrGGGPKYUnBGGOMR1Ak\nBREZICIbRGSTiIwJdDwVEZHXRWS3iPzgVdZIRL4UkY3u34ZuuYjIRLdtq0Ske+AiP5GIJIjIPBFZ\nKyJrRORut7zWtUdEokRksYisdNvyV7e8lYgscmN+z73WFyIS6c5vcp9PDGT8JYlIqIh8LyKfufO1\ntR3pIrJaRFaIyFK3rNZ9vgBEJFZEPhSR9SKyTkRSqrstp31SEOcOcJOAy4EkYKiIJAU2qgq9AQwo\nUTYGmKOqbYA57jw47WrjPkYCL1VTjL7KB/6sqknABcAf3e1fG9uTA1ysql2ArsAAEbkAeAp4XlXP\nAfYBt7j1bwH2ueXPu/VqkruBdV7ztbUdAH1VtavXMfy18fMFzu2Lv1DVdkAXnPenetviy42ca/MD\nSAFme83/BfhLoOPyIe5E4Aev+Q1AU3e6KbDBnX4Z5zanJ9SriQ/gE5xbtNbq9gB1geXA+ThnmIaV\n/LzhXAwyxZ0Oc+tJoGN342mB8wVzMfAZILWxHW5M6UDjEmW17vOFc+uAn0tu2+puy2nfU8C3O8DV\nBmeqqnsneXYCZ7rTtaZ97rBDN2ARtbQ97pDLCmA38CXwE7BfnTsHwvHx+nRnwQCZANwHFLrzcdTO\ndgAo8F8RWSYiRTdSro2fr1ZAJjDZHdZ7TUSiqea2BENSOO2o87OgVh1LLCIxwHTgT6p60Pu52tQe\nVS1Q1a44v7R7Au0CHFKlicivgd2quizQsVSRi1S1O85wyh9F5FfeT9aiz1cY0B14SVW7AYcpHioC\nqqctwZAUfLkDXG2wS0SaArh/d7vlNb59IhKOkxCmqup/3OJa2x4AVd0PzMMZZokV586BcHy8NfXO\ngr2AgSKSDkzDGUJ6gdrXDgBUdZv7dzfwEU6yro2frwwgQ1UXufMf4iSJam1LMCQFX+4AVxt436Xu\nJpyx+aLyYe6RCBcAB7y6mgEnIoJzM6V1qvqc11O1rj0iEi8ise50HZx9I+twksNgt1rJttS4Owuq\n6l9UtYWqJuL8P8xV1eupZe0AEJFoEalXNA30B36gFn6+VHUnsFVE2rpF/YC1VHdbAr1zpZp24FwB\n/Igz/vtgoOPxId53gR1AHs6vh1twxnDnABuB/wGN3LqCc3TVT8BqIDnQ8Zdoy0U43d1VwAr3cUVt\nbA/QGfjebcsPwCNueWtgMbAJ+ACIdMuj3PlN7vOtA92GUtqUCnxWW9vhxrzSfawp+v+ujZ8vN76u\nwFL3M/Yx0LC622KXuTDGGOMRDMNHxhhjfGRJwRhjjIclBWOMMR6WFIwxxnhYUjDGGONhScEYl4gU\nuFfaLHpU2RV1RSRRvK56a0xNFVZxFWOCxlF1LmFhTNCynoIxFXCv1/+0e83+xSJyjlueKCJz3WvZ\nzxGRlm75mSLykTj3XVgpIhe6iwoVkVfFuRfDf92zohGRu8S538QqEZkWoGYaA1hSMMZbnRLDR7/z\neu6AqnYC/oFzhVGAF4E3VbUzMBWY6JZPBL5S574L3XHOtAXnuveTVLUDsB+4xi0fA3Rzl3O7vxpn\njC/sjGZjXCKSraoxpZSn49xcZ7N7cb+dqhonIntwrl+f55bvUNXGIpIJtFDVHK9lJAJfqnOjFETk\nfiBcVR8XkS+AbJzLGnysqtl+bqoxZbKegjG+0TKmKyPHa7qA4n16V+Jcw6Y7sMTrSqXGVDtLCsb4\n5ndef9Pc6W9xrjIKcD3wtTs9BxgFnpvyNChroSISAiSo6jzgfpzLUp/QWzGmutgvEmOK1XHvqlbk\nC1UtOiy1oYiswvm1P9QtuxPnLln/h3PHrJvd8ruBV0TkFpwewSicq96WJhR4200cAkxU514NxgSE\n7VMwpgLuPoVkVd0T6FiM8TcbPjLGGONhPQVjjDEe1lMwxhjjYUnBGGOMhyUFY4wxHpYUjDHGeFhS\nMMYY4/H/AZN6yxQ6gTLNAAAAAElFTkSuQmCC\n",
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEWCAYAAACJ0YulAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3Xl4VdXV+PHvIiMkYQpRhgQCikKY\nIQIRlSBocQCLUguiiEVR31pbrVVq+/pa2r4V60+tvtRKW+cBqVaKAqUtQkGlyCgIiCAECWMIBMKQ\nhIT1+2OfXC4hE5CTm3DX53ny5J5z9zln7XNvss7e+wyiqhhjjDEADUIdgDHGmLrDkoIxxpgASwrG\nGGMCLCkYY4wJsKRgjDEmwJKCMcaYAEsKpkaJSISIHBKRtjVZNpRE5EIRqfFzt0VkiIhkBU1vEJHL\nq1P2DLb1JxF59EyXr2S9vxKRV2p6vSZ0IkMdgAktETkUNNkIKARKvOm7VfXN01mfqpYA8TVdNhyo\n6sU1sR4RuRO4VVUzg9Z9Z02s25z7LCmEOVUN/FP2jkTvVNV/VVReRCJVtbg2YjPG1D7rPjKV8roH\n3hGRt0UkH7hVRDJE5D8ikiciO0XkORGJ8spHioiKSKo3/Yb3/hwRyReRxSLS/nTLeu9fIyJficgB\nEXleRD4RkXEVxF2dGO8WkU0isl9EngtaNkJEnhGRXBHZDAytZP/8TESmlZk3RUSe9l7fKSLrvfp8\n7R3FV7SubBHJ9F43EpHXvdjWAn3KlP25iGz21rtWRIZ787sB/wdc7nXN7Q3at48HLX+PV/dcEZkh\nIq2qs2+qIiIjvHjyROQjEbk46L1HRWSHiBwUkS+D6tpfRFZ483eLyG+ruz3jA1W1H/tBVQGygCFl\n5v0KKAKG4Q4iGgKXAP1wLc0OwFfAfV75SECBVG/6DWAvkA5EAe8Ab5xB2fOAfOAG770HgWPAuArq\nUp0Y/wY0AVKBfaV1B+4D1gLJQCKw0P2plLudDsAhIC5o3XuAdG96mFdGgCuBo0B3770hQFbQurKB\nTO/1U8ACoBnQDlhXpuzNQCvvM7nFi+F87707gQVl4nwDeNx7fbUXY08gFvg98FF19k059f8V8Ir3\nurMXx5XeZ/QosMF73QXYCrT0yrYHOnivlwKjvdcJQL9Q/y2E84+1FEx1fKyqH6jqcVU9qqpLVXWJ\nqhar6mZgKjCwkuXfVdVlqnoMeBP3z+h0y14PrFLVv3nvPYNLIOWqZoy/UdUDqpqF+wdcuq2bgWdU\nNVtVc4EnKtnOZuALXLICuArYr6rLvPc/UNXN6nwEzAPKHUwu42bgV6q6X1W34o7+g7c7XVV3ep/J\nW7iEnl6N9QKMAf6kqqtUtQCYCAwUkeSgMhXtm8qMAmaq6kfeZ/QELrH0A4pxCaiL1wW5xdt34JJ7\nRxFJVNV8VV1SzXoYH1hSMNWxLXhCRDqJyCwR2SUiB4FJQItKlt8V9PoIlQ8uV1S2dXAcqqq4I+ty\nVTPGam0Ld4RbmbeA0d7rW7zp0jiuF5ElIrJPRPJwR+mV7atSrSqLQUTGicjnXjdNHtCpmusFV7/A\n+lT1ILAfaBNU5nQ+s4rWexz3GbVR1Q3Aj3Gfwx6vO7KlV/QOIA3YICKfici11ayH8YElBVMdZU/H\nfBF3dHyhqjYGHsN1j/hpJ647BwAREU7+J1bW2cS4E0gJmq7qlNnpwBARaYNrMbzlxdgQeBf4Da5r\npynwj2rGsauiGESkA/ACcC+Q6K33y6D1VnX67A5cl1Tp+hJw3VTbqxHX6ay3Ae4z2w6gqm+o6gBc\n11EEbr+gqhtUdRSui/D/Ae+JSOxZxmLOkCUFcyYSgAPAYRHpDNxdC9v8EOgtIsNEJBL4IZDkU4zT\ngR+JSBsRSQQeqaywqu4CPgZeATao6kbvrRggGsgBSkTkemDwacTwqIg0FXcdx31B78Xj/vHn4PLj\nXbiWQqndQHLpwHo53gbGi0h3EYnB/XNepKoVtrxOI+bhIpLpbfsnuHGgJSLSWUQGeds76v0cx1Xg\nNhFp4bUsDnh1O36WsZgzZEnBnIkfA7fj/uBfxA0I+0pVdwPfBZ4GcoELgJW46ypqOsYXcH3/a3CD\noO9WY5m3cAPHga4jVc0DHgDexw3WjsQlt+r4H1yLJQuYA7wWtN7VwPPAZ16Zi4Hgfvh/AhuB3SIS\n3A1Uuvzfcd0473vLt8WNM5wVVV2L2+cv4BLWUGC4N74QAzyJGwfahWuZ/Mxb9Fpgvbiz254Cvquq\nRWcbjzkz4rpmjalfRCQC110xUlUXhToeY84V1lIw9YaIDPW6U2KA/8adtfJZiMMy5pxiScHUJ5cB\nm3FdE98CRqhqRd1HxpgzYN1HxhhjAqylYIwxJqDe3RCvRYsWmpqaGuowjDGmXlm+fPleVa3sNG6g\nHiaF1NRUli1bFuowjDGmXhGRqq7MB6z7yBhjTBBLCsYYYwIsKRhjjAmod2MKxpjadezYMbKzsyko\nKAh1KKYaYmNjSU5OJiqqoltfVc6SgjGmUtnZ2SQkJJCamoq7Oa2pq1SV3NxcsrOzad++fdULlMPX\n7iPvtgQbvMf6TSzn/WdEZJX385V3X3hjTB1SUFBAYmKiJYR6QERITEw8q1adby0F74ZlU3BPosoG\nlorITFVdV1pGVR8IKv8DoJdf8SzetpgFWQvITM0kIyXDr80Yc06yhFB/nO1n5Wf3UV9gU+kj97yH\nm9+Ae9ZseUbjbhdc4xZvW8zg1wZTVFJEdEQ088bOs8RgjDHl8LP7qA0nP04wmwqelCUi7XBPY/qo\ngvcniMgyEVmWk5Nz2oEsyFpAUUkRJVpCUUkRC7IWnPY6jDGhkZubS8+ePenZsyctW7akTZs2gemi\nouo9duGOO+5gw4YNlZaZMmUKb775Zk2EzGWXXcaqVatqZF21ra4MNI/CPbC9pLw3VXUq7sHrpKen\nn/Yd/DJTM4mOiA60FDJTM88qWGNM7UlMTAz8g3388ceJj4/noYceOqmMqqKqNGhQ/nHuyy+/XOV2\nvv/97599sOcAP1sK2zn5GbOBZ7WWYxTuEYG+yEjJYN7Yefxy0C+t68iYWrB422J+s+g3LN622Ldt\nbNq0ibS0NMaMGUOXLl3YuXMnEyZMID09nS5dujBp0qRA2dIj9+LiYpo2bcrEiRPp0aMHGRkZ7Nmz\nB4Cf//znPPvss4HyEydOpG/fvlx88cV8+umnABw+fJibbrqJtLQ0Ro4cSXp6epUtgjfeeINu3brR\ntWtXHn30UQCKi4u57bbbAvOfe+45AJ555hnS0tLo3r07t956a43vs+rws6WwFOgoIu1xyWAUcEvZ\nQiLSCfdoPv++PbjEYMnAGP/V5hjel19+yWuvvUZ6ejoATzzxBM2bN6e4uJhBgwYxcuRI0tLSTlrm\nwIEDDBw4kCeeeIIHH3yQl156iYkTTzk5ElXls88+Y+bMmUyaNIm///3vPP/887Rs2ZL33nuPzz//\nnN69e1caX3Z2Nj//+c9ZtmwZTZo0YciQIXz44YckJSWxd+9e1qxZA0Benjvx8sknn2Tr1q1ER0cH\n5tU231oKqlqMe9j4XGA9MF1V14rIJBEZHlR0FDBN7cEOxpwTanMM74ILLggkBIC3336b3r1707t3\nb9avX8+6daee19KwYUOuueYaAPr06UNWVla5677xxhtPKfPxxx8zatQoAHr06EGXLl0qjW/JkiVc\neeWVtGjRgqioKG655RYWLlzIhRdeyIYNG7j//vuZO3cuTZo0AaBLly7ceuutvPnmm2d88dnZ8vU6\nBVWdraoXqeoFqvprb95jqjozqMzjqnpqmjbG1EulY3gREuH7GF5cXFzg9caNG/nd737HRx99xOrV\nqxk6dGi55+tHR0cHXkdERFBcXFzuumNiYqosc6YSExNZvXo1l19+OVOmTOHuu+8GYO7cudxzzz0s\nXbqUvn37UlJS7jCrr+zeR8aYGhWqMbyDBw+SkJBA48aN2blzJ3Pnzq3xbQwYMIDp06cDsGbNmnJb\nIsH69evH/Pnzyc3Npbi4mGnTpjFw4EBycnJQVb7zne8wadIkVqxYQUlJCdnZ2Vx55ZU8+eST7N27\nlyNHjtR4HapSV84+MsacQ0Ixhte7d2/S0tLo1KkT7dq1Y8CAATW+jR/84AeMHTuWtLS0wE9p1095\nkpOT+eUvf0lmZiaqyrBhw7juuutYsWIF48ePR1URESZPnkxxcTG33HIL+fn5HD9+nIceeoiEhIQa\nr0NV6t0zmtPT09UesmNM7Vm/fj2dO3cOdRh1QnFxMcXFxcTGxrJx40auvvpqNm7cSGRk3Tq+Lu8z\nE5HlqppewSIBdasmxhhThx06dIjBgwdTXFyMqvLiiy/WuYRwts6t2hhjjI+aNm3K8uXLQx2Gr2yg\n2RhjTIAlBWOMMQGWFIwxxgRYUjDGGBNgScEYU6cNGjTolAvRnn32We69995Kl4uPjwdgx44djBw5\nstwymZmZVHWK+7PPPnvSRWTXXnttjdyX6PHHH+epp5466/XUNEsKxpg6bfTo0UybNu2kedOmTWP0\n6NHVWr5169a8++67Z7z9sklh9uzZNG3a9IzXV9dZUjDG1GkjR45k1qxZgQfqZGVlsWPHDi6//PLA\ndQO9e/emW7du/O1vfztl+aysLLp27QrA0aNHGTVqFJ07d2bEiBEcPXo0UO7ee+8N3Hb7f/7HPQTy\nueeeY8eOHQwaNIhBgwYBkJqayt69ewF4+umn6dq1K127dg3cdjsrK4vOnTtz11130aVLF66++uqT\ntlOeVatW0b9/f7p3786IESPYv39/YPult9IuvRHfv//978BDhnr16kV+fv4Z79vy2HUKxphq+9GP\noKYfKNazJ3j/T8vVvHlz+vbty5w5c7jhhhuYNm0aN998MyJCbGws77//Po0bN2bv3r3079+f4cOH\nV/ic4hdeeIFGjRqxfv16Vq9efdKtr3/961/TvHlzSkpKGDx4MKtXr+b+++/n6aefZv78+bRo0eKk\ndS1fvpyXX36ZJUuWoKr069ePgQMH0qxZMzZu3Mjbb7/NH//4R26++Wbee++9Sp+PMHbsWJ5//nkG\nDhzIY489xi9+8QueffZZnnjiCbZs2UJMTEygy+qpp55iypQpDBgwgEOHDhEbG3sae7tq1lIwxtR5\nwV1IwV1Hqsqjjz5K9+7dGTJkCNu3b2f37t0VrmfhwoWBf87du3ene/fugfemT59O79696dWrF2vX\nrq3yZncff/wxI0aMIC4ujvj4eG688UYWLVoEQPv27enZsydQ+e25wT3fIS8vj4EDBwJw++23s3Dh\nwkCMY8aM4Y033ghcOT1gwAAefPBBnnvuOfLy8mr8imprKRhjqq2yI3o/3XDDDTzwwAOsWLGCI0eO\n0KdPHwDefPNNcnJyWL58OVFRUaSmppZ7u+yqbNmyhaeeeoqlS5fSrFkzxo0bd0brKVV6221wt96u\nqvuoIrNmzWLhwoV88MEH/PrXv2bNmjVMnDiR6667jtmzZzNgwADmzp1Lp06dzjjWsqylYIyp8+Lj\n4xk0aBDf+973ThpgPnDgAOeddx5RUVHMnz+frVu3VrqeK664grfeeguAL774gtWrVwPutttxcXE0\nadKE3bt3M2fOnMAyCQkJ5fbbX3755cyYMYMjR45w+PBh3n//fS6//PLTrluTJk1o1qxZoJXx+uuv\nM3DgQI4fP862bdsYNGgQkydP5sCBAxw6dIivv/6abt268cgjj3DJJZfw5ZdfnvY2K2MtBWNMvTB6\n9GhGjBhx0plIY8aMYdiwYXTr1o309PQqj5jvvfde7rjjDjp37kznzp0DLY4ePXrQq1cvOnXqREpK\nykm33Z4wYQJDhw6ldevWzJ8/PzC/d+/ejBs3jr59+wJw55130qtXr0q7iiry6quvcs8993DkyBE6\ndOjAyy+/TElJCbfeeisHDhxAVbn//vtp2rQp//3f/838+fNp0KABXbp0CTxFrqbYrbONMZWyW2fX\nP2dz62zrPjLGGBNgScEYY0yAr0lBRIaKyAYR2SQiEysoc7OIrBORtSLylp/xGGPOTH3rZg5nZ/tZ\n+TbQLCIRwBTgKiAbWCoiM1V1XVCZjsBPgQGqul9EzvMrHmPMmYmNjSU3N5fExMQKLwozdYOqkpub\ne1YXtPl59lFfYJOqbgYQkWnADUDwFSF3AVNUdT+Aqu7xMR5jzBlITk4mOzubnJycUIdiqiE2Npbk\n5OQzXt7PpNAG2BY0nQ30K1PmIgAR+QSIAB5X1b+XXZGITAAmALRt29aXYI0x5YuKiqJ9+/ahDsPU\nklAPNEcCHYFMYDTwRxE55faDqjpVVdNVNT0pKamWQzTGmPDhZ1LYDqQETSd784JlAzNV9ZiqbgG+\nwiUJY4wxIeBnUlgKdBSR9iISDYwCZpYpMwPXSkBEWuC6kzb7GJMxxphK+JYUVLUYuA+YC6wHpqvq\nWhGZJCLDvWJzgVwRWQfMB36iqrl+xWSMMaZydpsLY4wJA3abC2OMMafNkoIxxpgASwrGGGMCLCkY\nY4wJsKRgjDEmwJKCMcaYAEsKxhhjAiwpGGOMCbCkYIwxJsCSgjHGmABLCsYYYwIsKRhjjAmwpGCM\nMSbAkoIxxpgASwrGGGMCLCkYY4wJsKRgjDEmwJKCMcaYAEsKxhhjAnxNCiIyVEQ2iMgmEZlYzvvj\nRCRHRFZ5P3f6GY8xxpjKRfq1YhGJAKYAVwHZwFIRmamq68oUfUdV7/MrDmOMMdXnZ0uhL7BJVTer\nahEwDbjBx+0ZY4w5S34mhTbAtqDpbG9eWTeJyGoReVdEUspbkYhMEJFlIrIsJyfHj1iNMcYQ+oHm\nD4BUVe0O/BN4tbxCqjpVVdNVNT0pKalWAzTGmHDiZ1LYDgQf+Sd78wJUNVdVC73JPwF9fIzHGGNM\nFfxMCkuBjiLSXkSigVHAzOACItIqaHI4sN7HeIwxxlTBt7OPVLVYRO4D5gIRwEuqulZEJgHLVHUm\ncL+IDAeKgX3AOL/iMcYYUzVR1VDHcFrS09N12bJloQ7DGGPqFRFZrqrpVZUL9UBzrVm7Fl59FYqL\nQx2JMcbUXWGTFGbPhnHjoKAg1JEYY0zdFTZJISrK/baWgjHGVCxskkKkN6R+7Fho4zDGmLosbJKC\ntRSMMaZqYZMUrKVgjDFVC5ukUNpSsKRgjDEVC5ukUNpSsO4jY4ypWNgkBWspGGNM1cIuKVhLwRhj\nKhY2ScEGmo0xpmphkxSspWCMMVULm6RgLQVjjKla2CQFaykYY0zVwiYpWEvBGGOqFjZJwU5JNcaY\nqoVNUrCL14wxpmphkxSspWCMMVULm6RgLQVjjKla2CQFaykYY0zVfE0KIjJURDaIyCYRmVhJuZtE\nREWkyodKnyk7JdUYY6rmW1IQkQhgCnANkAaMFpG0csolAD8ElvgVC5zoPpq5bg6Lty32c1PGGFNv\n+dlS6AtsUtXNqloETANuKKfcL4HJQIGPsbBqz1IAZq6fzeDXBltiMMaYcviZFNoA24Kms715ASLS\nG0hR1VmVrUhEJojIMhFZlpOTc0bB/GfHxwBoSQRFJUUsyFpwRusxxphzWcgGmkWkAfA08OOqyqrq\nVFVNV9X0pKSkM9rewA6Xuu1qNNER0WSmZp7Reowx5lzmZ1LYDqQETSd780olAF2BBSKSBfQHZvo1\n2DwgtR8AV6Vey7yx88hIyfBjM8YYU69F+rjupUBHEWmPSwajgFtK31TVA0CL0mkRWQA8pKrL/Aim\n9OyjAW0yyUipvKwxxoQr31oKqloM3AfMBdYD01V1rYhMEpHhfm23Ig28mtopqcYYU7FqtRRE5AIg\nW1ULRSQT6A68pqp5lS2nqrOB2WXmPVZB2czqxHKmRFxrwS5eM8aYilW3pfAeUCIiFwJTcWMFb/kW\nlU8sKRhjTOWqmxSOe91BI4DnVfUnQCv/wvKHJQVjjKlcdZPCMREZDdwOfOjNi/InJP/ExEBhYaij\nMMaYuqu6SeEOIAP4tapu8c4oet2/sPwREwNFRaGOwhhj6q5qDTSr6jrgfgARaQYkqOpkPwPzQ3S0\ntRSMMaYy1WopiMgCEWksIs2BFcAfReRpf0OredZ9ZIwxlatu91ETVT0I3Ig7FbUfMMS/sPwRHW3d\nR8YYU5nqJoVIEWkF3MyJgeZ6x1oKxhhTueomhUm4K5O/VtWlItIB2OhfWP6wloIxxlSuugPNfwH+\nEjS9GbjJr6D8EhMDhw+HOgpjjKm7qjvQnCwi74vIHu/nPRFJ9ju4mmanpBpjTOWq2330MjATaO39\nfODNq1fslFRjjKlcdZNCkqq+rKrF3s8rwJk97SaEbKDZGGMqV92kkCsit4pIhPdzK5DrZ2B+sIFm\nY4ypXHWTwvdwp6PuAnYCI4FxPsXkm7xju9mXf4jF2xaHOhRjjKmTqpUUVHWrqg5X1SRVPU9Vv009\nO/to8bbFzNryV/KPFjL4tcGWGIwxphxn8+S1B2ssilqwIGsBx6UAimMoKiliQdaCUIdkjDF1ztkk\nBamxKGpBZmomEVHFUBJDdEQ0mamZoQ7JGGPqnLNJClpjUdSCjJQMxvYeBcej+Oet88hIyQh1SMYY\nU+dUekWziORT/j9/ARr6EpGPLjwvBYA+51tCMMaY8lTaUlDVBFVtXM5PgqpWeYsMERkqIhtEZJOI\nTCzn/XtEZI2IrBKRj0Uk7WwqU5WYGPfbrlUwxpjynU33UaVEJAKYAlwDpAGjy/mn/5aqdlPVnsCT\ngK/PaIiNdb+PHvVzK8YYU3/5lhSAvsAmVd2sqkXANOCG4ALeMxpKxeHzOEWjRu63JQVjjClfte6S\neobaANuCprOBfmULicj3cae3RgNXlrciEZkATABo27btGQfU0BsFsaRgjDHl87OlUC2qOkVVLwAe\nAX5eQZmpqpququlJSWd+yyVLCsYYUzk/k8J2ICVoOtmbV5FpwLd9jMeSgjHGVMHPpLAU6Cgi7UUk\nGhiFu/12gIh0DJq8Dp+f5lY6pnDkiJ9bMcaY+su3MQVVLRaR+3CP8YwAXlLVtSIyCVimqjOB+0Rk\nCHAM2A/c7lc8YC0FY4ypip8DzajqbGB2mXmPBb3+oZ/bL8uSgjHGVC7kA821yZKCMcZULqySQumY\nwow1c+3W2cYYU46wSgqrc5cA8MHaefZMBWOMKUdYJYX/7JoPgB6zZyoYY0x5wiopDL5wIDQ4hhTH\n2TMVjDGmHGGVFDJSMoiLEy5teSXzxtozFYwxpixfT0mti+IbRdKlWV8yUqoua4wx4SasWgrgTku1\nU1KNMaZ8lhSMMcYEWFIwxhgTEHZJoVEjSwrGGFORsEsKDRvaXVKNMaYiYZkUrKVgjDHls6RgjDEm\nIOySgo0pGGNMxcIuKRwo2UXuwSN2MzxjjClHWCWFxdsWM/PraRw5onaXVGOMKUdYJYUFWQsoiTgM\nxxpSWGx3STXGmLLCKilkpmYSEVMINCCaeLtLqjHGlOFrUhCRoSKyQUQ2icjEct5/UETWichqEZkn\nIu38jCcjJYPvZ9wBwIwb/2F3STXGmDJ8SwoiEgFMAa4B0oDRIpJWpthKIF1VuwPvAk/6FU+p7u3a\nA9C5SV+/N2WMMfWOny2FvsAmVd2sqkXANOCG4AKqOl9VS68v/g+Q7GM8ADRt6n7v3+/3lowxpv7x\nMym0AbYFTWd78yoyHpjjYzzAiaSQl+f3lowxpv6pEw/ZEZFbgXRgYAXvTwAmALRt2/asttWsmftt\nScEYY07lZ0thOxD8fLNkb95JRGQI8DNguKoWlrciVZ2qqumqmp6UlHRWQZW2FN5Y8qFdp2CMMWX4\nmRSWAh1FpL2IRAOjgJnBBUSkF/AiLiHs8TGWgK8OfQbAeys/sgvYjDGmDN+SgqoWA/cBc4H1wHRV\nXSsik0RkuFfst0A88BcRWSUiMytYXY1ZlvuRi+9oY4pK7AI2Y4wJ5uuYgqrOBmaXmfdY0Oshfm6/\nPFdeMBBi85CCRKIjou0CNmOMCVInBpprU0ZKBh07HEa4llfGptsFbMYYEySsbnNRKr1HHIW7LrCE\nYIwxZYRlUkhI/oatW2Hel0tCHYoxxtQpYZcUFm9bzMtb/geA6/4wwc4+MsaYIGGXFBZkLaAkzl0u\ncexgop19ZIwxQcIuKWSmZhLVeB8AEUda29lHxhgTJOySQkZKBn/93gsAfL/zL22w2RhjgoRdUgAY\n2v0SIiKUVV/vsDEFY4wJEpZJYcn2xRyP286/P99it7owxpggYZkUFmQtQM/7At3ThcLiQhtsNsYY\nT1gmhcRGiXD+55CTxvGSBm7aGGNM+N3mAiD3SC7SejVaEgM7e7Ny58pQh2SMMXVCWLYUMlMzieyw\nyE1s+hYvr3rZxhWMMYYwTQoZKRmMv2wYtJ8HS35I0ZEYG1cwxhjCNCkA9GrVCy7/XziaiH7yY/IK\n7fmcxhgTtkkh90guJP/HTSx8jKc+mGFdSMaYsBe2SSEzNZOImELo+CEAx3Mu4rXPXwtxVMYYE1ph\nmxQyUjIYdvEwGHG7m7HvQnYd2hXaoIwxJsTCNikAPHzpw0TF50PsPsi9iFkbZ1kXkjEmrIV1UshI\nyeC6jtdByqewYRjHikusC8kYE9Z8TQoiMlRENojIJhGZWM77V4jIChEpFpGRfsZSkZbxLaH3nyA/\nGVaMZ13OulCEYYwxdYJvSUFEIoApwDVAGjBaRNLKFPsGGAe85VccVRnbYywNOn8IyZ/CJ4+wMGsR\nj/zrkVCFY4wxIeVnS6EvsElVN6tqETANuCG4gKpmqepq4LiPcVQqIyWD9NZ9oP/vYP8F8OlD/PaT\n39rYgjEmLPmZFNoA24Kms715p01EJojIMhFZlpOTUyPBBRvfezx0mQ5p02Heb9BNVzHxX6f0dhlj\nzDmvXgw0q+pUVU1X1fSkpKQaX/+EPhO4IvUK+PYdkLQW3p3GwpXbrRvJGBN2/EwK24GUoOlkb16d\n9MTgJ5DoozD6BpDjMG0GT370gnUjGWPCip9JYSnQUUTai0g0MAqY6eP2zkpGSgY/GfATaJYF37kZ\n9naG91/hkX/8NNShGWNMrfEtKahqMXAfMBdYD0xX1bUiMklEhgOIyCUikg18B3hRRNb6FU91TB4y\nmSvaXQEdPoKrH4Ivb2TR098rhqscAAAU+0lEQVSn75OjrcVgjAkLoqqhjuG0pKen67Jly3xb/+Jt\nixnw0gBUFWa8DJ+Pc29c+hQvPteYCX0m+LZtY4zxi4gsV9X0qsrVi4Hm2hToRhJg+F3umQsAnz7E\n3fcfZOryqSGNzxhj/GRJoRyTh0zm4QEPQ0QxjPq2e54zuMTw+1ctMRhjzlmWFCowechkXrz+RSTm\nMNzTE24aBY1y4K+vc/ePd3Ht91aSZ8/lMcacYywpVGJCnwn84fo/ICLQ7R24+SY4fD78+zHmvNyL\ne3/zcahDNMaYGmVJoQqBxIBA6iIYfyncch1EHmHak5dx71v/G+oQjTGmxlhSqIaTEkPL1XDRbEhx\np6j+4RddufWvt4Y4QmOMqRmWFKrppMQAcNMt0PVt+Go4bz52PT1e6GnXMhhj6j1LCqfhpMQQvweG\n3QVtlsDaUax+59tc+qfL7cwkY0y9ZknhNE3oM4FPvvcJPc/vCTGHYXwGdH0L/v04vDGHu387h3vf\n+k2owzTGmDNiSeEMZKRksPKelYzpNgYaKNw0BobeDzvS4Z33+cPYB+j8y+tYsPE/oQ7VGGNOiyWF\ns/DGjW+4i9wE6P88PHS+Sw4lsXz52CwGXdSfMT9dFOowjTGm2iwpnKXARW4IRB5zyWHYXdDK3Z/p\nrekFDHxlIH/+cDW5uSEO1hhjqmA3xKshi7ct5r9m/Rerdq86MXPuU7D4AWiyDQ60I7FVPru3JRAR\nEbo4jTHhyW6IV8tKxxkeHvDwiZkDJ8Flk6H5RgBydyaQ8sgw4poU8KMfhShQY4yphLUUfDB1+VTu\n+fAelKB9m98S/t/Ok8p9e9oIHr70YTJSMmo5QmNMuLGWQgiVnrZ6RdsrTsxM2AV9Xjyp3IyXOnDp\nPW+QeN9wRrwzol5c/JabC998E+oojDF+sZaCz05pNezpDJ/fDp88cqJQk63QcRZc8nviD/Uhusk+\nototI0abEFdwEd+89TCpV8/i8qvyGNtjbI21LA4fhjffhNtug4YNq7dMq1awaxf49bX57W8hOhp+\n+EN/1h8OJk+GIUOgT5/a2d6WLdC6NcTE1M726pvjx0EE9u2DxMTQxVHdloIlhVqweNtinvzkSRZu\nXci+gn1uZkECLL8b8lvDfx4AKQaNPHnB6INQ1PjE9PcuhWZbaFzQjWPLxiIxh4m/8nkaxJ84ran0\n4xQ5NQ5VyHt9KlGt1xGX+XuOfDqO/Jm/IKbrbJqNG48eiwE5jkQeIzYyluOrbiX/QAMKc1sRP/h5\nGsTtY9dDrgvsvF9eTP6snxF32Z+JbPkVALGRsTSNbcr+o/sREeKPJ1McuZ/8T8dwPHkR2nIlWtSQ\n40ebENFkFwAla79N65aRNOm4mqy8rXzzYBYAPV7oRV7BfgpLCk9ab0FxIQWfDwOg0fk7aJLfj/P6\n/4ucIznERMZw9IhQtOkyihKXU9x4M7rhelq03442ySImMobc1ZeQN+cBuv34IYqj99IxsSMrdq7g\naPFRYvf1pqAQWrc9SmTDw2w9sBURCWy7sKSQmIhYIr8ZTFS7pcQd7E2b9odomZBEn+SerNy5kp0H\ndyMN3Iew7+g+co7kEC2NiImKILN9JgcLDrJqUWuKGmWR22QeIkLPlj255sJrmLNxDhtyN9CiURJ8\ncymdex6kaVw8K7JX0/283uzLK+Sdh35Au9GTGZrZnLVZe8hu8DExkTHsO5SPHm1K1/PTmDPhNSKj\ni8j4/dVsXNOchPNzGXFJf774OIXZv7iP6PiDDH7qftq1aUjj2MYs2LKAouNFRDeIpn+La5g75Wo6\nfGcqh6O3UFBcwJiL72Hhc3dw//3w+sfz+PfOWXS5bAsPX/owuXuiGJaeTmK73SQ9NIiCkqO0bdKW\n5rHNaRnfkl6terH3cC4t4hL556d7WPCHG7jovx5hP+7zKCwu5OIWF/PwpW4s7slPnmRD7oaT5r32\n+WvsOrQrsE/3HM7h4sTOXHvRt8g9ksve/Hy+yF1Oz1Y9aRrTlMzUzMCB0+Jti5nyzw/JI4uU8xoz\ntsfYwDqBwPSCrAUkNkrk/aWfsrXkM5o1aEvTqCSi4w8F/n5axrcMHJQt3rY4sEzukdyTfq/cuZKv\nt5QQk5DP4dXfYv4z47j6rgX844+ZjHrhcZom72Zsj7Gs2bOG99a9F4i7dNnguILrHrz9M2FJoY6a\nunwq/7vof9l6YKubocDBZDgeAYsfhHUj4VBraLgXjrZwZdKmw84+cLANlMSevMLzV0H6i5B7EWT3\ng+xL3fy+z7v17u8Au3tAtzch6gisuKv8wG67Cj76FezpApGFoAIFzU+833AvdPobrBx/6rL9n4H8\nVtBqBXw1DHq+Ap/fBlszT5Q5bw3ccj28Mh8OtHP3jmq5Cv5vg3v/3m7w1fUwz7sa/Jr7oPkm2N4X\n8lLheCTE7YHNQ2B3z5O333opfOsBiD4M778Ke7q7+QnZkJ/sXg95xK1v+ntuuscrkP4Htz8u+43b\nf2/Ndu812wSDHoP5kyBpHWQ8DRuvhc7vwz9+C9sucycP7OvoykfnQ+JXUJgARfHQbiE03g7Nvnaf\nwWc/gB6vQpvPYMtg11Is3edS4qbj9kC/5+BYI/jmMpj5Z7h4BvR6CWb9HjQCDrVyyzXKgfidrp5X\n/QT2XQjbLoU93eDGW+Cvb528f2Ly4Nvj4J0ZJ+YNnggd5sFfX4eOs6HLX+DweW7/fvYDV+audPed\n+f26Uz/zhnuh819hRdDjaW++ES78OxxMgcijsPUK953428tuH20Z4sp1fx0u+gBarXT1LY5137uL\nZ8Kmoa4eJdHwrQdhVw/YcQkcaAsXfQhNt8CnD7nP7Zr7Yem9sLeTWzayEGLzACXqYGfivv0T8nYk\nwp+9btnxGe5zXPjfkPoR9HseWi13B2eNt0HjbHj7Q+jzB1h/ExxJcn97OWmubMonUBJDTPbVFOYn\nwJ40aPuJi2vFna7OLb6E4oYwawokboScLqfuuwdbu+90k23u7+xIC5g2w33nmm9y38XCJq67+Wii\n+9sqbAIpn9AgppAXrnvhjB4LbEmhjittPfwn+z/sOrzr5DeLo6BBCTQ4DsdiIKoQ9rdz/7S3XAkl\nMZD2F/fPYGcfKGh25oF0fRu+GQAH27rpiIITiafpFshrf+brLtXgGByPOrt1RBS6egNEHoErfgUf\nVXDb8uRPTyRH47/ur0PWwBPfoXNB42/qXn1i98H19xLR7T0W3bHotFsM1U0KkVUVOBsiMhT4HRAB\n/ElVnyjzfgzwGtAHyAW+q6pZfsZUV2SkZPD+qPeBEwli5a6VFJYUBsrERsYS1SCKjfs2QrOtcNNt\np66oMM4dycTtcYmkJNqNUezs4+bt7QRNs0COw44+7ujuWCOIyXdHhg33QUET10I51ggynoEDKW5d\nTb+B3AuhsDEcawgtNsCmb7mj1k4zIPoQLJ/gjjTz2rmjyob7XSvh/DVwJBHid7vHmX4xysWW8qnb\n9j5vvYdauqOj/R28o7Xt7ohs47XuaCrlUxdnu0Xw8UR3RH7RB9B4p9tu1FEX7+rbIH6XazV0eQ+O\nC6y6A9oucvPXjIGYA65ebRe51tP2S9zRYKMcF1OnGRC7H1aNc8u0n+/2y9Hm7kg3LxWSl7gjw6+v\ndkfrm66BqMPuKDOiCDrOca2anM6w+SpXJuqoO4rUBq6+fafAF9+FY3GQsMPVP7817OztYsxLhW5v\nu1g2X+X2QV47d7Tc/U346jq3zibfwKrbodkWt2+lxO3Htp+4I/RmWZB/Pmy8DhoUuyPyojg4fzXs\n6eqOUNstdMvkpbrPszgWYg6613mpgLgWXacZ8PVVcOFc1/o52swdlTf5xh205LeE9SOgoClEHHPf\nu/hd7si5+SbY395NRx2F7P6uPtrAzYsscPWX4xCXAyjkXuz2eaO97sj7gn+4+u+7wMV03hq3ja7T\nXMy7erptXzTLfUf3dHPrS9ju6pOww61n6xUu9sgCF2ezr91ntqera2l3nOPKt14Gy++CtPfcdzW/\ntfspiocO/3Tfm+abYO/Fbv8Vx7rv/+4ebv6lv3UtyT1d3TZ2ea3bfR1dr8C+jt7fwgF34NXiS7f9\nI0lufxc0cduK3+2+87t6us+j4T6O63EWZC3w7axF31oKIhIBfAVcBWQDS4HRqrouqMx/Ad1V9R4R\nGQWMUNXvVrbec6WlcDoWb1vMa5+/xrqcdWw9sPWkxOGX4H786myvqKSI/Uf3n3wabhnNGzYnOiL6\npGX2Hd13SpnGMY0D284vyj+pTOk6ylu2rIToBPKL8quMvSxByq1HQnQCh4oOVVpHY/wWExHD/Nvn\n18uWQl9gk6pu9gKaBtwABHdQ3gA87r1+F/g/ERGtb31aPstIyagX1zKUDr5lpmYCnDQQFzz4V3aZ\n4EG/isqUrjf4/eBle7XqddKAX2nZ8gYESwfzGsc2ZtXOVdyUdhPdzut2SuxlX5dd38qdK9l1aFdg\nQDkpLonmsSfGYUoHWnOP5JJXmMeCLQuIjYoNDMKWDiZWNLi6LmcdBcUFdEzsyMbcjcRGxZLWIi0w\nONy6cWuuufCaQH2D15UUl0Rai7TAgOafV/w5sO1S+47uo6C4gMz2mScNdAYPbDaObcwHGz5gf8F+\nwCXl6y+6nq/2fsXKXSsDA/HRDaIZ33s83c7rFhgcLd0vpQPKpfsneL8kNkoMDLCXxtyrVS/mbJwT\nWH/pwHXwPi1dJiYyhugG0YF9VHS86KQDmdjI2MDy+47uC5w80LZJW1BOmg6OLXg/BO+v0oOy0oOm\nwuLCQP3K1rN0mbL7oHS7cdFx9GrZi5zD7rtT+hmXF1fwd6omz0Asj58thZHAUFW905u+DeinqvcF\nlfnCK5PtTX/tldlbZl0TgAkAbdu27bN161ZfYjbGmHPVOXXxmqpOVdV0VU1PSkoKdTjGGHPO8jMp\nbAdSgqaTvXnllhGRSKAJbsDZGGNMCPiZFJYCHUWkvYhEA6OAmWXKzAS8k7YZCXxk4wnGGBM6vg00\nq2qxiNwHzMWdkvqSqq4VkUnAMlWdCfwZeF1ENgH7cInDGGNMiPh6nYKqzgZml5n3WNDrAuA7fsZg\njDGm+urFQLMxxpjaUe9ucyEiOcCZnpPaAthbZan64Vypy7lSD7C61FVWF6edqlZ5+ma9SwpnQ0SW\nVec83frgXKnLuVIPsLrUVVaX02PdR8YYYwIsKRhjjAkIt6QwNdQB1KBzpS7nSj3A6lJXWV1OQ1iN\nKRhjjKlcuLUUjDHGVMKSgjHGmICwSAoiMlRENojIJhGZGOp4qiIiL4nIHu/W4qXzmovIP0Vko/e7\nmTdfROQ5r26rRaR36CI/lYikiMh8EVknImtF5Ife/HpXHxGJFZHPRORzry6/8Oa3F5ElXszvePf6\nQkRivOlN3vupoYy/LBGJEJGVIvKhN11f65ElImtEZJWILPPm1bvvF4CINBWRd0XkSxFZLyIZtV2X\ncz4piHsC3BTgGiANGC0iaaGNqkqvAEPLzJsIzFPVjsA8bxpcvTp6PxOAF2opxuoqBn6sqmlAf+D7\n3v6vj/UpBK5U1R5AT2CoiPQHJgPPqOqFwH5gvFd+PLDfm/+MV64u+SGwPmi6vtYDYJCq9gw6h78+\nfr/APb7476raCeiB+3xqty6qek7/ABnA3KDpnwI/DXVc1Yg7FfgiaHoD0Mp73QrY4L1+EfeY01PK\n1cUf4G+4R7TW6/oAjYAVQD/cFaaRZb9vuJtBZnivI71yEurYvXiScf9grgQ+BKQ+1sOLKQtoUWZe\nvft+4R4dsKXsvq3tupzzLQWgDbAtaDrbm1ffnK+qO73Xu4Dzvdf1pn5et0MvYAn1tD5el8sqYA/w\nT+BrIE9Vi70iwfEG6uK9fwBIrN2IK/Qs8DBw3JtOpH7WA0CBf4jIcu8pjVA/v1/tgRzgZa9b708i\nEkct1yUcksI5R91hQb06l1hE4oH3gB+p6sHg9+pTfVS1RFV74o60+wKdQhzSaROR64E9qro81LHU\nkMtUtTeuO+X7InJF8Jv16PsVCfQGXlDVXsBhTnQVAbVTl3BICtV5Alx9sFtEWgF4v/d48+t8/UQk\nCpcQ3lTVv3qz6219AFQ1D5iP62ZpKu7JgXByvHX1yYIDgOEikgVMw3Uh/Y76Vw8AVHW793sP8D4u\nWdfH71c2kK2qS7zpd3FJolbrEg5JoTpPgKsPgp9Sdzuub750/ljvTIT+wIGgpmbIiYjgHqa0XlWf\nDnqr3tVHRJJEpKn3uiFubGQ9LjmM9IqVrUude7Kgqv5UVZNVNRX39/CRqo6hntUDQETiRCSh9DVw\nNfAF9fD7paq7gG0icrE3azCwjtquS6gHV2ppAOda4Ctc/+/PQh1PNeJ9G9gJHMMdPYzH9eHOAzYC\n/wKae2UFd3bV18AaID3U8Zepy2W45u5qYJX3c219rA/QHVjp1eUL4DFvfgfgM2AT8Bcgxpsf601v\n8t7vEOo6lFOnTODD+loPL+bPvZ+1pX/f9fH75cXXE1jmfcdmAM1quy52mwtjjDEB4dB9ZIwxppos\nKRhjjAmwpGCMMSbAkoIxxpgASwrGGGMCLCkY4xGREu9Om6U/NXZHXRFJlaC73hpTV0VWXcSYsHFU\n3S0sjAlb1lIwpgre/fqf9O7Z/5mIXOjNTxWRj7x72c8Tkbbe/PNF5H1xz134XEQu9VYVISJ/FPcs\nhn94V0UjIveLe97EahGZFqJqGgNYUjAmWMMy3UffDXrvgKp2A/4Pd4dRgOeBV1W1O/Am8Jw3/zng\n3+qeu9Abd6UtuPveT1HVLkAecJM3fyLQy1vPPX5VzpjqsCuajfGIyCFVjS9nfhbu4TqbvZv77VLV\nRBHZi7t//TFv/k5VbSEiOUCyqhYGrSMV+Ke6B6UgIo8AUar6KxH5O3AId1uDGap6yOeqGlMhaykY\nUz1awevTURj0uoQTY3rX4e5h0xtYGnSnUmNqnSUFY6rnu0G/F3uvP8XdZRRgDLDIez0PuBcCD+Vp\nUtFKRaQBkKKq84FHcLelPqW1YkxtsSMSY05o6D1VrdTfVbX0tNRmIrIad7Q/2pv3A9xTsn6Ce2LW\nHd78HwJTRWQ8rkVwL+6ut+WJAN7wEocAz6l7VoMxIWFjCsZUwRtTSFfVvaGOxRi/WfeRMcaYAGsp\nGGOMCbCWgjHGmABLCsYYYwIsKRhjjAmwpGCMMSbAkoIxxpiA/w9joh58jOBh/wAAAABJRU5ErkJg\ngg==\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
@@ -790,7 +832,7 @@
{
"output_type": "display_data",
"data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZIAAAEWCAYAAABMoxE0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi40LCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcv7US4rQAAIABJREFUeJzsnXl8VNXZ+L/PvVlARWij1oVAcGeT\nLaIpImHR4q7V1rVBQBHcSm1fK77S8qoVpVqpSy2oUOJS608/UjfckJFtQHapC4IaSEQUUkEQSTJz\nn98fd+5kZjKTTDKZbJwvn3yYuXPuveeee+95zrOc54iqYjAYDAZDQ7GauwIGg8FgaN0YQWIwGAyG\nlDCCxGAwGAwpYQSJwWAwGFLCCBKDwWAwpIQRJAaDwWBICSNIDM2OiNgiskdEujRm2eZERI4VkUaP\nrReRESJSEvF9g4gMTqZsA871hIjc3tD9aznu3SLyj8Y+rqH5yGjuChhaHyKyJ+LrAUAFEAx9v05V\nn6nP8VQ1CBzU2GX3B1T1hMY4johcA1ylqoURx76mMY5taPsYQWKoN6oa7shDI95rVPWdROVFJENV\nA01RN4PB0PQY05ah0QmZLv4lIv8Ukd3AVSJSICLLRGSniHwlIg+JSGaofIaIqIjkhb4/Hfp9nojs\nFhG/iHSrb9nQ72eJyKcisktEHhaRJSJydYJ6J1PH60Rkk4h8KyIPRexri8iDIlIuIp8DI2tpn/8V\nkeditj0qIn8Jfb5GRD4OXc9nIW0h0bHKRKQw9PkAEXkqVLcPgQExZe8Qkc9Dx/1QRM4Pbe8NPAIM\nDpkNd0S07ZSI/ceHrr1cROaKyBHJtE1diMhFofrsFJF3ReSEiN9uF5GtIvKdiHwSca2nisjq0Pav\nReTPyZ7PkAZU1fyZvwb/ASXAiJhtdwOVwHm4g5X2wMnAKbha8NHAp8CNofIZgAJ5oe9PAzuAfCAT\n+BfwdAPKHgbsBi4I/XYLUAVcneBakqnjv4GOQB7wX+/agRuBD4HOQA6w0H294p7naGAPcGDEsb8B\n8kPfzwuVEWAY8ANwUui3EUBJxLHKgMLQ5/sBH/AjoCvwUUzZXwJHhO7JFaE6/CT02zWAL6aeTwNT\nQp/PDNWxL9AO+BvwbjJtE+f67wb+EfrcPVSPYaF7dDuwIfS5J7AZODxUthtwdOjzCuDy0OcOwCnN\n/S7sz39GIzGki8Wq+oqqOqr6g6quUNXlqhpQ1c+BmcCQWvZ/QVVXqmoV8AxuB1bfsucCa1X136Hf\nHsQVOnFJso5TVXWXqpbgdtreuX4JPKiqZapaDtxby3k+B/6DK+AAzgC+VdWVod9fUdXP1eVdYD4Q\n16Eewy+Bu1X1W1XdjKtlRJ73eVX9KnRPnsUdBOQncVyAK4EnVHWtqu4DbgOGiEjniDKJ2qY2LgNe\nVtV3Q/foXlxhdAoQwBVaPUPm0S9CbQfugOA4EclR1d2qujzJ6zCkASNIDOmiNPKLiJwoIq+JyDYR\n+Q64Eziklv23RXzeS+0O9kRlj4ysh6oq7gg+LknWMalz4Y6ka+NZ4PLQ5ytC3716nCsiy0XkvyKy\nE1cbqK2tPI6orQ4icrWIrAuZkHYCJyZ5XHCvL3w8Vf0O+BY4KqJMfe5ZouM6uPfoKFXdAPwW9z58\nEzKVHh4qOhroAWwQkfdF5Owkr8OQBowgMaSL2NDXGbij8GNV9WDgD7imm3TyFa6pCQAREaI7vlhS\nqeNXQG7E97rCk58HRojIUbiaybOhOrYHXgCm4pqdOgFvJVmPbYnqICJHA48BE4Cc0HE/iThuXaHK\nW3HNZd7xOuCa0L5Mol71Oa6Fe8++BFDVp1V1EK5Zy8ZtF1R1g6pehmu+fAB4UUTapVgXQwMxgsTQ\nVHQAdgHfi0h34LomOOerQH8ROU9EMoBfA4emqY7PAxNF5CgRyQF+X1thVd0GLAb+AWxQ1Y2hn7KB\nLGA7EBSRc4Hh9ajD7SLSSdx5NjdG/HYQrrDYjitTr8XVSDy+Bjp7wQVx+CcwVkROEpFs3A59kaom\n1PDqUefzRaQwdO7/wfVrLReR7iIyNHS+H0J/Du4F/EpEDglpMLtC1+akWBdDAzGCxNBU/BYYhdtJ\nzMB1iqcVVf0auBT4C1AOHAOswZ330th1fAzXl7Ee1xH8QhL7PIvrPA+btVR1J/Ab4CVch/UluAIx\nGf6IqxmVAPOA4ojjfgA8DLwfKnMCEOlXeBvYCHwtIpEmKm//N3BNTC+F9u+C6zdJCVX9ELfNH8MV\nciOB80P+kmxgGq5faxuuBvS/oV3PBj4WNyrwfuBSVa1MtT6GhiGu2dhgaPuIiI1rSrlEVRc1d30M\nhraC0UgMbRoRGRky9WQDk3Gjfd5v5moZDG0KI0gMbZ3TgM9xzSY/Ay5S1USmLYPB0ACMactgMBgM\nKWE0EoPBYDCkxH6RtPGQQw7RvLy85q6GwWAwtCpWrVq1Q1VrC5kH9hNBkpeXx8qVK5u7GgaDwdCq\nEJG6MjQAxrRlMBgMhhQxgsRgMBgMKWEEicFgMBhSYr/wkRgMhqalqqqKsrIy9u3b19xVMSRBu3bt\n6Ny5M5mZiVKt1Y4RJAaDodEpKyujQ4cO5OXl4SZdNrRUVJXy8nLKysro1q1b3TvEwZi2DAZDo7Nv\n3z5ycnKMEGkFiAg5OTkpaY9GkBjqxO+HqVPd/w2GZDFCpPWQ6r0ypi1Drfj9MHw4VFZCVhbMnw8F\nBc1dK4PB0JIwGomhVnw+V4gEg+7/Pl9z18hgqJvy8nL69u1L3759OfzwwznqqKPC3ysrk1u2ZPTo\n0WzYsKHWMo8++ijPPPNMY1SZ0047jbVr1zbKsZoao5EYaqWw0NVEPI2ksLC5a2Qw1E1OTk64U54y\nZQoHHXQQv/vd76LKqCqqimXFH0/Pnj27zvPccMMNqVe2DWA0EkOtFBS45qy77jJmLUN68Zf6mbpo\nKv7S9DnjNm3aRI8ePbjyyivp2bMnX331FePGjSM/P5+ePXty5513hst6GkIgEKBTp07cdttt9OnT\nh4KCAr755hsA7rjjDqZPnx4uf9tttzFw4EBOOOEEli5dCsD333/PxRdfTI8ePbjkkkvIz8+vU/N4\n+umn6d27N7169eL2228HIBAI8Ktf/Sq8/aGHHgLgwQcfpEePHpx00klcddVVjd5myWA0EkOdFBQY\nAWJIL/5SP8OLh1MZrCTLzmJ+0XwKctPz0H3yyScUFxeTn58PwL333suPf/xjAoEAQ4cO5ZJLLqFH\njx5R++zatYshQ4Zw7733cssttzBr1ixuu+22GsdWVd5//31efvll7rzzTt544w0efvhhDj/8cF58\n8UXWrVtH//79a61fWVkZd9xxBytXrqRjx46MGDGCV199lUMPPZQdO3awfv16AHbu3AnAtGnT2Lx5\nM1lZWeFtTY3RSAwGQ7PjK/FRGawkqEEqg5X4SnxpO9cxxxwTFiIA//znP+nfvz/9+/fn448/5qOP\nPqqxT/v27TnrrLMAGDBgACUlJXGP/fOf/7xGmcWLF3PZZZcB0KdPH3r27Flr/ZYvX86wYcM45JBD\nyMzM5IorrmDhwoUce+yxbNiwgZtvvpk333yTjh07AtCzZ0+uuuoqnnnmmQZPKEwVI0gMBkOzU5hX\nSJadhS02WXYWhXmFaTvXgQceGP68ceNG/vrXv/Luu+/ywQcfMHLkyLjzKbKyssKfbdsmEAjEPXZ2\ndnadZRpKTk4OH3zwAYMHD+bRRx/luuuuA+DNN99k/PjxrFixgoEDBxIMBhv1vMlgBInBYGh2CnIL\nmF80n7uG3pVWs1Ys3333HR06dODggw/mq6++4s0332z0cwwaNIjnn38egPXr18fVeCI55ZRTWLBg\nAeXl5QQCAZ577jmGDBnC9u3bUVV+8YtfcOedd7J69WqCwSBlZWUMGzaMadOmsWPHDvbu3dvo11AX\nxkdiMBhaBAW5BU0mQDz69+9Pjx49OPHEE+natSuDBg1q9HPcdNNNFBUV0aNHj/CfZ5aKR+fOnbnr\nrrsoLCxEVTnvvPM455xzWL16NWPHjkVVERHuu+8+AoEAV1xxBbt378ZxHH73u9/RoUOHRr+Gukjr\nmu0iMhL4K2ADT6jqvTG/ZwPFwACgHLhUVUtEZCAw0ysGTFHVl5I5Zjzy8/PVLGxlMDQdH3/8Md27\nd2/uarQIAoEAgUCAdu3asXHjRs4880w2btxIRkbLGsfHu2ciskpV8xPsEiZtVyIiNvAocAZQBqwQ\nkZdVNVKvGwt8q6rHishlwH3ApcB/gHxVDYjIEcA6EXkF0CSOaTAYDC2GPXv2MHz4cAKBAKrKjBkz\nWpwQSZV0Xs1AYJOqfg4gIs8BFwCRnf4FwJTQ5xeAR0REVDXSyNcOV4Ake0yDwWBoMXTq1IlVq1Y1\ndzXSSjqd7UcBpRHfy0Lb4pZR1QCwC8gBEJFTRORDYD0wPvR7Msc0GAwGQxPSYqO2VHW5qvYETgYm\niUi7+uwvIuNEZKWIrNy+fXt6KmkwGAyGtAqSL4HciO+dQ9vilhGRDKAjrtM9jKp+DOwBeiV5TG+/\nmaqar6r5hx56aAqXYTAYDIbaSKcgWQEcJyLdRCQLuAx4OabMy8Co0OdLgHdVVUP7ZACISFfgRKAk\nyWMaDAaDoQlJmyAJ+TRuBN4EPgaeV9UPReROETk/VOxJIEdENgG3AF7ymtNwI7XWAi8B16vqjkTH\nTNc1GAyG1snQoUNrTC6cPn06EyZMqHW/gw46CICtW7dyySWXxC1TWFhIXdMJpk+fHjUx8Oyzz26U\nPFhTpkzh/vvvT/k4jU1aY9BU9XXg9Zhtf4j4vA/4RZz9ngKeSvaYBoPBEMnll1/Oc889x89+9rPw\ntueee45p06Yltf+RRx7JCy+80ODzT58+nauuuooDDjgAgNdfb9tdVot1thsMhv2LxlzS+ZJLLuG1\n114LL2JVUlLC1q1bGTx4cHheR//+/enduzf//ve/a+xfUlJCr169APjhhx+47LLL6N69OxdddBE/\n/PBDuNyECRPCKej/+Mc/AvDQQw+xdetWhg4dytChQwHIy8tjx44dAPzlL3+hV69e9OrVK5yCvqSk\nhO7du3PttdfSs2dPzjzzzKjzxGPt2rWceuqpnHTSSVx00UV8++234fN7aeW9ZJHvvfdeeGGvfv36\nsXv37ga3bVy8xV3a8t+AAQPUYDA0HR999FG9yi9dqtq+vaptu/8vXZp6Hc455xydO3euqqpOnTpV\nf/vb36qqalVVle7atUtVVbdv367HHHOMOo6jqqoHHnigqqp+8cUX2rNnT1VVfeCBB3T06NGqqrpu\n3Tq1bVtXrFihqqrl5eWqqhoIBHTIkCG6bt06VVXt2rWrbt++PVwX7/vKlSu1V69eumfPHt29e7f2\n6NFDV69erV988YXatq1r1qxRVdVf/OIX+tRTT9W4pj/+8Y/65z//WVVVe/furT6fT1VVJ0+erL/+\n9a9VVfWII47Qffv2qarqt99+q6qq5557ri5evFhVVXfv3q1VVVU1jh3vngErNYk+1mgkBoOh2UnH\nks6eeQtcs9bll18OuIPn22+/nZNOOokRI0bw5Zdf8vXXXyc8zsKFC8MLRp100kmcdNJJ4d+ef/55\n+vfvT79+/fjwww/rTMi4ePFiLrroIg488EAOOuggfv7zn7No0SIAunXrRt++fYHaU9WDuz7Kzp07\nGTJkCACjRo1i4cKF4TpeeeWVPP300+EZ9IMGDeKWW27hoYceYufOnY0+s94IEoPB0Ox4SzrbduMt\n6XzBBRcwf/58Vq9ezd69exkwYAAAzzzzDNu3b2fVqlWsXbuWn/zkJ3FTx9fFF198wf3338/8+fP5\n4IMPOOeccxp0HA8vBT2klob+tdde44YbbmD16tWcfPLJBAIBbrvtNp544gl++OEHBg0axCeffNLg\nesbDCBKDwdDspGNJ54MOOoihQ4cyZsyYsDYC7mj+sMMOIzMzkwULFrB58+Zaj3P66afz7LPPAvCf\n//yHDz74AHBT0B944IF07NiRr7/+mnnz5oX36dChQ1w/xODBg5k7dy579+7l+++/56WXXmLw4MH1\nvraOHTvyox/9KKzNPPXUUwwZMgTHcSgtLWXo0KHcd9997Nq1iz179vDZZ5/Ru3dvfv/733PyySc3\nuiBpW5nDDAZDqyUdSzpffvnlXHTRRWETF8CVV17JeeedR+/evcnPz+fEE0+s9RgTJkxg9OjRdO/e\nne7du4c1mz59+tCvXz9OPPFEcnNzo1LQjxs3jpEjR3LkkUeyYMGC8Pb+/ftz9dVXM3DgQACuueYa\n+vXrV6sZKxFz5sxh/Pjx7N27l6OPPprZs2cTDAa56qqr2LVrF6rKzTffTKdOnZg8eTILFizAsix6\n9uwZXu2xsUhrGvmWgkkjbzA0LSaNfOsjlTTyxrRlMBgMhpQwgsRgMBgMKWEEicFgSAv7g9m8rZDq\nvTKCxGAwNDrt2rWjvLzcCJNWgKpSXl5Ou3b1WqkjChO1ZTAYGp3OnTtTVlaGWQuoddCuXTs6d+7c\n4P2NIDEYDI1OZmYm3bp1a+5qGJoIY9oyGAwGQ0oYQWIwGAyGlDCCxGAwGAwpYQSJwWAwGFLCCBKD\nwWAwpIQRJAaDwWBICSNIDAaDwZASRpAYDAaDISWMIDEYDAZDShhBYjAYDIaUMILEYDAYDClhBInB\nYDAYUsIIEoPBYDCkhBEkdeD3w9Sp7v8Gg8FgqIlJI18Lfj8MHw6VlZCVBfPnQ0FBc9fKYDAYWhZG\nI6kFn88VIsGg+7/P19w1MhgMhpaHESS1UFjoaiK27f5fWNjcNTIkwpggDYbmw5i2aqGgwDVn+Xyu\nEDFmrZaJMUEaDM2LESR1UFBgOqWWTjwTpLlnBkPTYUxbhlaPMUEaDM2L0UgMrR5jgjQYmpe0aiQi\nMlJENojIJhG5Lc7v2SLyr9Dvy0UkL7T9DBFZJSLrQ/8Pi9jHFzrm2tDfYem8BkProKAAJk0yQsRg\naA7SppGIiA08CpwBlAErRORlVf0oothY4FtVPVZELgPuAy4FdgDnqepWEekFvAkcFbHflaq6Ml11\nNxjSgd9vtCZD2ySdpq2BwCZV/RxARJ4DLgAiBckFwJTQ5xeAR0REVHVNRJkPgfYikq2qFWmsb1KY\nzsDQEExkmaEtk05BchRQGvG9DDglURlVDYjILiAHVyPxuBhYHSNEZotIEHgRuFtVNfbkIjIOGAfQ\npUuXFC/FxXQGhoZiIssMbZkWHbUlIj1xzV3XRWy+UlV7A4NDf7+Kt6+qzlTVfFXNP/TQQxulPmam\nu6GhmMiy5DATS1sn6dRIvgRyI753Dm2LV6ZMRDKAjkA5gIh0Bl4CilT1M28HVf0y9P9uEXkW14RW\nnK6LiMTrDDyNxHQGhmQxkWV1YzT+1ks6BckK4DgR6YYrMC4Drogp8zIwCvADlwDvqqqKSCfgNeA2\nVV3iFQ4Jm06qukNEMoFzgXfSeA1RmM7AkApmcmvtGPNf6yVtgiTk87gRN+LKBmap6ociciewUlVf\nBp4EnhKRTcB/cYUNwI3AscAfROQPoW1nAt8Db4aEiI0rRB5P1zXEw3QGBkN6MBp/60Xi+KnbHPn5\n+bpypYkWNhhaOiYqsmUhIqtUNb+ucmZmu8FgaDEYjb910qKjtgwGg8HQ8jGCxGBoBkyYq6EtYUxb\nBkMTY8JcDW0No5EYDE2MmdhqaGsYQWIwNDFmlruhrWFMWwZDE2MmthraGkaQGAzNgAlzNbQljGnL\nYDAYDClhBInBYDAYUsIIEoPBYDCkhBEkBoPBYEgJI0gMcTEzrw0GQ7KYqC1DDczMa4PBUB+MRmKo\ngZl5bTAY6oMRJIYamJnXBoOhPhjTlqEGZua1wWCoD0aQGOJiZl4bDIZkMaYtg8FgMKSEESQGg8HQ\nBmjOkH1j2jIYDIZWTnOH7BuNxGAw1BszYbVl0dwh+0YjMRgM9aK5R7+Gmngh+949aeqQfaORGAyt\ngJakATT36NdQEy9k/667mkewG43EYGjhNLUG4PfXPoeouUe/hvg0Z8i+ESQGQyNTV0dcX+JpAOnq\nMJIRWmbCqiEWI0gMhkYkHdpDU2oAyQotM2G1cWjsQUdzYQSJwdCIpEN7aEoNwJitmo62FLRgBInB\n0IikqyNuKg3AmK2ajqY0WaYbI0gMhkakLXTExmzVNLQl7c8IklZGW7GptmVMR2xIhrYw6PBISpCI\nyDFAmapWiEghcBJQrKo701k5QzRtyaZqMBjazqAj2QmJLwJBETkWmAnkAs+mrVaGuJiJYAaDoSWS\nrCBxVDUAXAQ8rKr/AxyRvmoZ4mFWLjQYWi4tKftAU5OsIKkSkcuBUcCroW2Zde0kIiNFZIOIbBKR\n2+L8ni0i/wr9vlxE8kLbzxCRVSKyPvT/sIh9BoS2bxKRh0REkryGVk9zp0EwGAw18fthwgQYOhQm\nT3bNz/ubMEnW2T4aGA/8SVW/EJFuwFO17SAiNvAocAZQBqwQkZdV9aOIYmOBb1X1WBG5DLgPuBTY\nAZynqltFpBfwJnBUaJ/HgGuB5cDrwEhgXpLX0eppKzbV5sQELBgaC89vuW8fqLrbWnsob0NISpCE\nOv+bAUTkR0AHVb2vjt0GAptU9fPQfs8BFwCRguQCYEro8wvAIyIiqromosyHQHsRyQZ+DBysqstC\nxywGLmQ/EiSG1DABC4bGxPNbekJEZP80Oydl2hIRn4gcLCI/BlYDj4vIX+rY7SigNOJ7GdVaRY0y\nIR/MLiAnpszFwGpVrQiVL6vjmF6dx4nIShFZuX379jqqmhh/qZ8JjxUz4feb9zt1tS1iAhYMjUms\n3/K66/bPwUmypq2OqvqdiFyDG/b7RxH5IJ0VAxCRnrjmrjPru6+qzsSNMCM/P18bcn5/qZ/CuydR\nOet1CGYx+6EgC96197uHpC3RliaBGZqftjQXJBWSFSQZInIE8Evgf5Pc50vcMGGPzqFt8cqUiUgG\n0BEoBxCRzsBLQJGqfhZRvnMdx2w0fCU+qj4bBMEs0AwqKoJMmQJTpjTNA2Ns+Y2PefENjY3xWyYv\nSO7EdXgvUdUVInI0sLGOfVYAx4Uc818ClwFXxJR5GTcSzA9cAryrqioinYDXgNtUdYlXWFW/EpHv\nRORUXGd7EfBwktdQb3IOyMHq9jpBuxICgNq88w4sWtQ0a0IYW356aIsvvhl0GJqTpHwkqvr/VPUk\nVZ0Q+v65ql5cxz4B4EZcAfQx8Lyqfigid4rI+aFiTwI5IrIJuAXwQoRvBI4F/iAia0N/h4V+ux54\nAtgEfEaaHO3+Uj8T35iIdl6KffXP6HHqV1iW4DhNY1s3tvyWTUuaM+ANOvbX0FND85NsipTOuCP/\nQaFNi4Bfq2pZ4r1AVV/HDdGN3PaHiM/7gF/E2e9u4O4Ex1wJ9Eqm3qngK/FRGazEwcHO9XN6/wV8\nsa6oyWzrxpbfcmlp2mJbyiJraJ0ka9qajZsSxev0rwptOyMdlWoJFOYVkmVnURmsxLZsyPUz/dl+\nrPEfDHnvQefjgPS9rcaW33JpaR23GXQYmhtRrTugSUTWqmrfura1VPLz83XlypX13s9f6mfakmm8\n8ukrKEqGlYEgBJwAWXYW84vmU5Brevj9jZamkXh1aqpBh/HH7D+IyCpVza+rXLIaSbmIXAX8M/T9\nckLRVW2dVze+SlCDAFQFqwBQlMpgJb4SnxEk+yEtQVuM7cybKoCgJQpRQ/OTrCAZg+sjeRBQYClw\ndZrq1GLwlfgIOsHwd0XJtDJx1CHLzqIwr7D5KmdoVpoz8qs5O/OWYNYz2lfLI9kUKZuB8yO3ichE\nYHo6KtVSKMwrxLZsAk4AAEEY228sXTp2oTCv0GgjhmahOTvz5vbHNKUQNdpX8iSb/TcetzRaLVoo\nBbkFPHr2o2RamUjo38ItC8k5IKdZhUhLCj1tbvbHtmjO5QSaOwN1U4bFmxD85Ellqd39In37uAHj\nAJjw6gQcHD7a/hHXvXpd1G9NrWo3xiipLajs++uIsbl9NM1p1mtKjai5ta/WRCqCpEH5q1oj5XvL\ncXCitr340YuMGzCuyTuzxjBrtJUOuCXY65uLtjg7PxmaUog2t8BuTdQqSERkN/EFhgDt01KjFkhh\nXiEZVkbYVwJwcQ93Yn9Td2aNMUpqKx2wGTHunzSlEN1fBXZ9qVWQqGqHpqpIS6Ygt4CFVy9k2pJp\nbN29lbH9x4bNWk3dmTXGKKmtdMBmxGgwtAySmpDY2mnohMRE+Ev9+Ep84cit1uhvaI11Nhjqi3nO\nU6OxJyQaQvhL/QwvHs6+wD4ABncdzL3D72XSpNb1lBqV3dDWaSu+wNZAKuG/+yXF64r5IfADGvq3\ncPNChvxjCP7S/Sj+1LDf0JrDq034btNhNJJ64C/1M2vtrBrbq5wqky7F0OZo7SP6+voCjRms4RhB\nUg9iU6Z4WGKRc0DsUvMGQ+umtUf31ScYoyUJzdYo0IwgqQeRqeVFhKM6HEXpd6WoKje8fgNQPUnR\nYEgHTdnJtIXovmR9gS1FaLYkgVYfjCCpBwW5Bcwvmh+O2PKV+Ljj3TtQlIAT4PrXrmfNV2so6lPU\nYDNXaxyN1Jf94RpTJV4bNXUnsz+FV7cUodlSBFp9MYKknhTkFoSFxPpv1rtTM0MR1EENMmPVDOas\nm9OgtUpmzoQbb3Qfouzs1jMaqQ+tdcTVlCRqo+boZPaX6L6WIjRbikCrLyZqq4GE13SPmYejKBXB\nCnwlvvodzw833ABVVeA4UFHRNqNMTCRN3SRqo+ZM1rg/UFAAkyY1r+Bs7qSYDcVoJA3EW9NdUQRB\nPbWk9FTYPJyck86t3/F8rgDxsO222VG01hFXU5KojVrKqNmQXlqjFmgESQOJXdNdECpLBqBz3kad\ndkxcYtG7HiOKwkLXnFVRAZYFjzzS+h6mZDCdYd3U1katsZMxtH1MipQUiEyVAjDl7greeXIITlCw\nbbj2t5vpcu6z5JSfS/nHvZMKQTQdrCFZzPNiSDfJpkgxgqQR8ATKzk3deXDCOQQDGWRmOmjRcAJO\nAGfOW1hOe7KzpFXZPeNhOq+WQbqCFhJFizXHPTfPWnKks51Mrq0mwsu9VRGowMFBfvVT7M3DOWVw\nJYucheii30MgC0clLZE2rXFJ2+7WAAAgAElEQVRRrbZCc3Z0jbUuTWT9491faJ57bp615Ggp7WQE\nSR3U1ln4/TDlHxVUOP1xOi8BQDsvxem8jCUacsDn+cCuxFKbrCxpVOdya1xUq63Q3C9wqkEL8eqf\nKFqsOe65edaSo6W0kxEktVBbZ+H9VlE5BMd6Cyk6A81diiUWllg46oZgSe5yLpj6CIdvvxTy3oPO\nxwGNc6db46JabYXmfoFTCVrw+2HKFDeww3Gq65/o/jbHPTfPWnK0lHYygqQWaussvN+coGDRnhH2\nn7j43E9Z89Uatu3ZxrxN8wg4AWzLhs5+Zu/7A4HtAeYUZzVosmI8WuOiWm2FlvACNySCKzwACgkR\ny6quf6L72xz33DxrydFS2sk422shGY0kyp7c2c/QOUOpDFaSYWVwznHnMG/TvPB8EwBbbO4aeheT\nBk9qlGszDsnmozW2/dSpMHmyOziyLBgxwtVOWkv9DU2LcbY3EqNGuf8XFdWM548dCUx4tZiKYAXg\nppZfuXUlVU5VWIgIQpadFQ4X9ohdcbE+mHkFzUdrbPtYTaqlCJHWKJQN1RhBkoBIjcO2q7fXZ3JY\n2e6yqO8iwvSR06OEhRf1VRmsJMtuPLPX/ojpjOqmpZhCImnuwAVD6phcWwmI9Y/MmOE+7LWtFFfU\np4gsOyvh7446zNs4j6mLpoZXVPRSrQQ1SGWwst45uloSzbmantcZTZ5c932q7RitdTXA+tASckpF\nYvKvtX6MRpIAzwSwbx+oun+RDvd4o9+C3AIePuthrn/teoLqLoAVlYcLeOXTV3h5w8tYlsWjZz8a\nlWolntmrJVFXKHRzjipTjaJq7vrvz7SEwAVDahhBkgDPBFBcDLNnQyBQ/ZDX1umU7y2POk6kf0RE\nwgLGcRxufP1G3rv6vag1TlqqWauujra5w2FT7Yyau/77M8mY24zZsmWTVkEiIiOBvwI28ISq3hvz\nezZQDAwAyoFLVbVERHKAF4CTgX+o6o0R+/iAI4AfQpvOVNVv0lF/zwdSVBT9EE+dmrjT8TSMfYF9\nYSFiYZF/ZD5rtq0Jzy8Bd/0SX4mPSYMntVgB4lFXR5uoI2+qDiBV239jjIpNZ9dwavM37g/aYmt/\ndtImSETEBh4FzgDKgBUi8rKqfhRRbCzwraoeKyKXAfcBlwL7gMlAr9BfLFeqavqSZ8UQ+5DX1ul4\nqygWrytm9trZBJwAWXYW/Y/oz6qvVoXLCUKGlcGWXVvwl/pbrCDxHvCcnNo72ngdeVMv1JVKFFWq\nE/xiNde22Nk1F21dW2wLgjKdGslAYJOqfg4gIs8BFwCRguQCYEro8wvAIyIiqvo9sFhEjk1j/RpM\nXZ2Ot4piUZ+iqOzAc9bNoSLghgd36dSFsu/KmLFqBrPXzmbBqAUU5BakFArc2MQ+4NOnQ3l54o42\nsiP3FuoKBNzv3kJdqb4g6Ry51SWIalv+1vOlQfo7u9Y+eq0vbd2HUlxc/fy0VkGZTkFyFFAa8b0M\nOCVRGVUNiMguIAfYUcexZ4tIEHgRuFtb6KzKyGV5AaaPnO464recTMmiQjcPV+4yKoIVFK8rBuD0\nu35P4PPTyDj69yycfF/KwiSVTid2JOgJES+qprbj+XyNv1BXU43c6rNeus/nCsnIJ1DE1eDSQaJ6\ntGXh0hJDlhsLvx9mzap+fjIyWqegbI3O9itV9UsR6YArSH6F62eJQkTGAeMAunTp0qgVSKZDi/di\nl+8tJ7hlIMx5B4JZYFfCqOGQuwyAaf9aRGD2GxDMIvBeJdOOf4SXftfwtybVjjd2JJiTk/zx0rFQ\nV1OYOOq7XnpOTrTAtCz3+8SJ0Lt349cvUahsazeN1EVrnPyZDD6fey/BHYCMHt06rzOd80i+BHIj\nvncObYtbRkQygI64TveEqOqXof93A8/imtDilZupqvmqmn/ooYc26AISUVfce6I5DYV5hVgfjIJA\nNmgGBDOhpBBbbIr6FLF1/fGugAn9tnZZp6g5J7H4S/21/l48dzP7KpwGx+d7I0Fv/ejy8uTj/b19\n774bFi6EceNSn6eRzjXLvboVF9dvvfTycld4gNsRqEYnQmxs4tXDzMNovUTez3bt3MCe1kg6NZIV\nwHEi0g1XYFwGXBFT5mVgFOAHLgHerc1MFRI2nVR1h4hkAucC76Sj8rVRl8027ovd2U/xqxuRtWNx\n5beCFcTqtohBXQYxbck02h1zAthnQlDBrqKk02zuWPA+2XZ2eMa750PJOSCHiW9MTDgj3l/qZ9bO\nSaj1OmgmGZkWhYU29SV2JFgfW3WszyTVUXO6TByxWQwyQm9F5DUmOreneXn7ikSHijc2ierRkn0I\nbdnslioNfaZbWpumTZCEfB43Am/ihv/OUtUPReROYKWqvgw8CTwlIpuA/+IKGwBEpAQ4GMgSkQuB\nM4HNwJshIWLjCpHH03UNiajr5tcwCXVfz/Di4exb8Bs0ACCIKCeeuZxNXd5n4eaq6p1HLYSSQshb\nALnLcBT2BfaFfSheOhURwVEHR53wjPhIQeIr8RE8arFrOls3ij5HDgT6p/W6a6OxzFLpMHFE1g3g\n2muhS5fk1kuPbRPveOl8wWPr0ZI7o7YQkZRu6vtMt8Q2TauPRFVfB16P2faHiM/7gF8k2DcvwWEH\nNFb9UqG2mx/7YvsCr7oZgPPeBWsyOBaZmRZDztvMJ98EonfO9WN1WY5q9Xx4RXl89eNs+35bOJ2K\npRZS9lPki9Oxj1lSY0a8N5+lQmycdUWsXNOe4W+n/tA1tCNvyZE3sXXzzAvJBBXEzXBQUG0qa6oR\nY0vtjNp66G5z0BLbtDU621sMtY3ool7s0ohOHW+WOxy8axDW4tsJdp0fdrgDqGqN1CpBDfLKhlfI\nsDLQoLpC5Kn5UJWBLFHWn/URvpKp4bBhbz7LlLsreMdpjxNMz1K/ydKSI2/iaRXJdLK1RVC1tBGj\nV9/w4MbXNJ1RSx5ANCWNqf21xDY1gqQeRD4MkHxnEdWpk42jQlUVPPiHY1DnLix7Ms6ZN8EPOaGQ\n4OU4ODWO46hD90O688E3HxD8YjBUWqAWVVXKDX/7f+hp95BlZzF95HTK95ZTmFfIlKsLWfRU3Q9d\nU8xfiR01R052rG1+SlMQWbfaMhdEkqgzTuT8bk4hGm9OUFN0Ri15AFEbjdnxN/bAoiW2qREkSRL7\nMIwaVb8RXUFuAVOuJtypi7j7Oo5gaTbWG4/hBNVd3/3qn7lrv8cIE0VZ+/Va90veArArEUewMxyC\nXd/F0SA/BH7g+teuBwg74efPL4g7L6J47ma2OR/CD4fwetUkgkctbrJU9vFW6ktl9nsqL37svsmO\n+BKVSyVsOl3ECrfy8upccukm3aG7je3raWh4fyLSof21tHBoI0iSJPZhgPqP6CJHEjk57lwDT6g4\njg0K4ojrbO+8FFtsBnUZxJadWyjZVRJ9sNxlriO9ZCi/PP8ont/1fng+g5cYsjJYSfG6Yrp09LHz\n8O5M/DscueRTzjr2LG6+vAcV+44CuoAEwX4dRg2nssuKGo77dOC1p1fnyJDZhgiChnbUifZNZsSX\nqFzs9pYwczmR0Jszx902Z056BFy6Hfr1uffJ1qWujj82ym/MmJoL30XSEk1RjY0RJEmSk+OOmlWr\nF7qqK2VIPCJHEr171xQqVoZDMG8BDg6WWvhL/VQ5VfEPlrsMzV3GM9+6fpdYghrkiTVPENw8EJ3z\n6/AkyH/3fQqt6I57+zU0b0WRkqFkdVuXVCr7hnYQsbm74q0dnnDfBOa3VEZ8ifZNdsSXqJy3vaXM\nXI4n9JI14TWUpvAVJXvv61OX+oT3B4PuWkW1CeJkByYtLaS3PhhBkgR+v9vRB4PVk84efzz+Ou71\neRC8Mj5ftVDK6f4JEz9cTWXQjko7XxeRjvlIAk4ASk6PmOioKI47qz4AbhS1A1aQk3+6l+l1mLX8\npe58mNm3XEmgyiYjM8jovzxD0bnH1anFJMrdlYyPpLaVJBO9+Mn4fdI1WvTOveXVKwgGu4a39+nT\nOMdvCLFCryHX3twmnVi8a6ioqJmapqHBBcmG9ydaqyjRMeuK/mtu82cqGEGSBJFmGM+3AdGO1Mjs\nr8mouxDtJ7AsePRRGHdhb3oPmF9j0qFt2Zx97NkcftDh9DuiH/M2zmPuhrnJXUCezxUcoYmO9Cl2\n/9YVweox4LiPQWG3wvAKjfESSHqd+b4Fv0ErFBSCjsOMFz/h8e1jGNRlED0O6UFRn6IakyO9TrWy\nsmuUnX7SpCTvQYmvxkqS3jkKCmD6s+t58qXPOLL3p9B5MP5SogRPZACCt5+/1I8v4GP6s+dS/nHv\nxrOxRwg9e+ebZGTOR9XGcWDlSveez58PdE4+wCHZYIh6BU109jPqgY1QMoSiC7vWee317ezSbdLx\nBMVNN8GDD7rv3vXXw7x5cNZZ7uDPe7duuaXhk2nj/RZvraKcnIaHfDeF0E0nRpAkQeSoJzKvUkZG\ntSM1Mvurp+7OmlW7QPH5qo/pOG7KdTc/U3Wyx96H9Y7bMYwbMI7fv/N7pi2ZFt4mCIO7DGbxlsVR\njvrDTvycb0YND0109FWHGpcUgtqADao88OwqGDwV27I59ahTWVK6BEXDM+u9zlzz3gX7f8OCSfMW\nENQgCzcvZOHmheFsxkBUOn2vUwU7qZc5chb/ll1byLAywAHbssPp971zPLn+SaqOroLvYd6cbEb3\nHR0WPBWBCm58/UaCTjC8MmXvw3ozvHg4FSX9sTb/wKPXQ0FB77oehaSIFHoctZhr//IMn88t4p13\nqn1BxXM3M+fgmhpWPEFQmzYW217JlKtR9uAsijrPByKEf4zm4ffDlCnVz2tkZ5eozo0ppGsMaiKE\nmje4U3X/nzsXXnnFraeXsubBB918b40VHegJGm+tokjzdJ2+mjjtlVCrbiXmLiNIksAbgUyZQrgz\n8BKsefmnYhO7eOpubfbTwsLqJH/gvgSxI5HYDMKR3DfiPgDuX3o/KGTYGfQ4tAdXnnQl8zbOY0P5\nBjb9dxPbv98Oud9EzVUBOLzXBnYsdghUVoFVRbDdNlj4PwTzfCwMLgyX2xfYxxTfFC7ucTG2ZRMM\nO/oLowVTCM/JP2fdnKgFvrxOtcvOIvdF6exn6qL4o2evo6sIVODghNdvOe/485i3aR6Pr36cWWtn\nIYgr3CJMexXBCl759BUssVBVEMJ+Jm9lyrH9xlJR0h/nH2/hBLO4/j2HeX+axuEnfhHWqBoaEh27\nfHLRucdBX1i0qLqjIO89KrdHa1jrv1nvCjwNRqXFqU0biyTZcrFlvcwJYSEQxwTpje5j/VmR9ylW\nSLvXfxfTR07Ht7ccShO3Y21tHaXhWTZj+o6BRbeFtVvLqjY5e3jbIwd39dGAIwcxsZpsJJ5ASdbf\nlEjYxzOntSZzlxEkSVJQ4AqSyM7AmwHtjSRsG84+21WtPeGi6morxcU1H4KCAtecFbn4U33V//tG\n3MeFJ1wYHvk/vvrx8APqK/Fxx7t3RPtPSk8NC4BtuS8hvxoKXwyBfR3g9UdBrRpZiRXl7c/fZv4X\n8xEJOfVzlyG5y0O/R5NhZbD6q9VUBCuilhrOsrPoN3Af5XunMvf7nTww+wEcdci0M/GN8kWNZqf4\nplARrAhrVopS5VTxafmnYcERDCb2H325280PGhYmEXh+J2vzMJyQ7yhYVcXcN76F7//O7LWzeeis\nh6LMimP6joky2c1cNZMXP3qRvkf0pVN2p6iOJry42auu2YiyrtXmkLmbIe89+g3cR9Yb1cJmZ8VO\n7lhwR3gFzYpgRVgQxAqmRMEQyZbzytqWTTAYRFFmr50dvj432Wcu6lhUVsKLL1abdi0LRoxw34WC\nApjw2Eb2LZiI5i3AyV3G9a9dz4AjBrj3Tp244ehRWktJ3XnjIoVeMBhkxqoZZO78OEq7vekmeOAB\nCAbde52ZpfxmosWDDyb/bsXWxxvEWGKR8eVgxnSak9AEGKlRZGQG2dLpGfylNf2GdZloI4/dmsxd\n0kKX8mhU8vPzdeXKxllQMdFaFbEjieJiePJJqAoFXGVnw4IFiSNKEqmvyaq2UxdNZfKCyQQ1iC02\ndw29i8K8Qk7/x+muwx1cITJnfs0U9qWnwuz3wMkEBCQAwybD4HujBE+s5hGJhcVpXU/jx+1+zLxN\n86gKVkWZ17zfl5ctr/EbwPgB43ns3MeYuWomN75+IwEn4Aqh2s4f8ZvdZQXnHX8eS0uX8s3e+Csv\nW1gghEf761cdxI2XnUigykKtinB7CMIZR5/B/C/mRwU72GJz3gnncXzO8TVMiu0y2kV3kp7/q1Kx\nMwI88twn9B6wJ2pk7fm8Dm53MPcvvT9qGeZMK5P3rn6vRqdbHx8JEHefmXPX8+K8cvZ1foNFzjQU\njXpmCu+eROWs1yGYSXa2xUN/teOabfx+GDosSEWF1hh8xMMWm2v7X0uXjl2ihEdk3rjYMuV7y8Nl\nI7VbW2yuPXRWWLstKHCv6/p7lhHcfQh2h+387fYCev+kd+J3K6at4uWxA8LvjTjtaJdt1ZrpoHju\nZmbtHJVwTlZd5seGTnpOFyKySlXz6ypnNJJ6Es8JF7st8vuMGa5WEggkHlEkcuzVK2Qxzmi0ILeA\nR89+NGwusbacQdDJRtV2/Rslhe6LX1IIjgUIockskOfjoG1nsGfO3Lhrp8SiKCOPGcn7W9+nIlhR\n43cHh4WbF8bZ02X1V6uZuWomN7x+Q92CL85vwVEjOP6nx/PqxlejjmuLHSUMzj/hfG796a0hkyH0\nXuC+/E98+ysCR7nHzrAyuLjHxfg2+6K0nqAGmfvJ3Bqh1opSEahgim8KUwqnuOYonytEnKDgODDh\n0ec4/5qPokbW/97wbzLtTIJOMEqIWGLxm4LfRAU+JDJxxg40vHLxTE7jBoxj5tz1XPfLYyDQHeyB\nZI7243ReEn5mopJ9lhRyeJ8S1hzZienPXh/l6/CX+pn496+prDwvHA1IydDw/YlN8SMItmWH/WXg\nZmpQ3KANW2z3L1TGG2xYYpFtZzN95HTWfLUm/JuI0G/gPsYN8JZT8LElYwvaZz384y2CwSwmXBrg\nsX+tZ9Kkmr6v2PY59/hzo/LY2ZY7r8vBQUqGosEsVK2wf8sXeLaGgC4oAF/gWYILFsfVODzB5V1L\nvHsZ+77XMHeFoiaTDZBoKowgSZF42oj3vaioesJXQ6JW6hWyGDKnxI5Axw0YF3bY55x0LhMX21RU\nKmIrg05X/JJJVZ4PMkLhwJYDZ98IucvYs+i2qLDhsOCJpfRUKBnG3ODXvG8nGUkWw4qtK1ixdUW0\nGa6kMPH5a/w2hD8vuS/qmBeecCGHH3Q4f1/1d8DtFF779DVu/emt1e1WAAUFXTn4nQL+vGQxiuKo\nw2fffkbfn/Tl/a3v16hrvFBrB4e3P38b32YfY/qOoV/367EzTsQJCojitPsmnCvNCbodqKJUBaui\njmeJxe9++jseXv5wXLOav9QfzgTdL3A9E6/oHbqfVZz3p+mcNbQT5XvL2bJrS9g04zgOE16bAMCL\n8453hUio3Y777hquGjoy6pkJ+8Fyl7EZ+PsqyLajl4MunFNIZWZ/sM4EzQS7CqvbQmwr09UsLBtB\n3CALz68BPL768bgh7ZZYjO03lm3fb+Pfn/w73CZeduvyveU8du5j9DuiX3hgNPGNiQBRJkgpuTX8\nXDih1EG9B+yJa2KKbB/v3njBHJERkmsOa8fsJUKgyjVbzdo5iuCCmhqHv9QfDgrRoCIi5ByQE/7N\nE1wigiUWjjrMWTcnfAzXpNgZddx31OcTJk2C9VkzmfLRi/T9vi/T/9/ykLaYxeyHgix4146eLNlM\nS3UbQZICiZyStY0o6kN9wycTjVojt/eeDz6fUFiYRUHBvUx4dRcznBlohPPc6vI+qoLGhg3n+QB3\n9AjuS05pATrnbTSYxfvvVcKo5bWaNwA6ZXfiu8rvcNQJj1zjdc7SbSHqzXcRhfbVKzBLXui3iLpF\nHiPbzubWQa7AeGLNE+FRcFCDFK8rjjJpFK8r5vHVj4f3D2owynQVD1tsBhwxgONyjuPZ9c+Gr6Ey\nWMnfV/2dLHsWv/ztWzxzb4Hrd3rjrwR/8iEDTglyZIcjeW3ja1Q5VTWu+3c//R2dsjuFOznPJzBn\n3RxuOuUmHlj6QLWPZ3EOTkUPcGxwhLlvfMvc73+PIGTama4/K3R4R90gg9+c+jxvzaput40HP0Fh\n3r0ATHjVFTZnH3c2cz+JHhBELgc9xTeFqmBVdXaFddWrMY3tNzb8ud8R/cIj76I+RczdMDdK84rE\n2/76xtdraDIAcz+ZGzZ1eWanfYF9/HXZX8MmLw0qJwzYyob3AjgB9/qCXeYzxbecKYVTAKKiAGPb\nZ3Tf0Wzbs41XPn2Flz99mWw7m6I+RYybUEC/I9zw8m8OeZ7NBy9EVdkX2Bd+TrzAlqAT9CqOow4T\n35gYHsiFTXManX1i2pJpbN29lZX/zUStt0AzcaSKnO6fMXOVn+tevQ6Atz5/Cz6rHtxVVgajBpf+\nUj+Fd0+i6rNBZB4zCd8dU5tMmBgfSQpMnequghgMuo52b36A9/2uu6qjRFKdCZ6u8L/w3JAI+7OF\nq9o76uCUnoJ+cXqUj8IWOzxyZ9EkePdOd4QrVTDsD65vpbFYeU3cIABB0NJTXWd2rP+ktIAeeyZw\nfP5XHH7iFxzc7mAe9D/omi1CI0FVJcPKwBKrRtRXPAThqA5H8eXuL1Hc7MwXnHABW3dvjau1AHRe\n9yhf/vs61LHDbWOdPo0MK4OgE4w7Mr9n2D3srNgZV5DFmouqzXuuRhBp+hOELh27sGXXlur7Khbj\n+o/jvcWVfLzyJ5C3IOxbennDy2G/lS02llg1MirYYpNhZUS3V4yJse+tv2N99kwUt31V3SCJGnWP\nwMIiOyObUX1GRWks8fa5ddCtTF82ncpgZfxjiYVVNgj9Ykgoq7YbIp5hZWCLHa674GoFkffg1kG3\n8hf/X8KDDq+9AJ5c82TiDBO1YGGRf2Q+lcHK6jx5tRHh9xt4isPW3Vsp210W/XvEPb9w6iPceulg\nCnILmPBYMX+/+ZLwvbhw6iMMPCWYknaSrI/ECJIUiNVIvIlRXpRIY6YVT6cg8swlnv06cgJfPEen\nJ2iCWwbC2l/hrBnljopDnZnV5X3yj8hn5Vcrw2HJZx97Nlt3b61pvqqLRbfBu3e5gsoKYA/7Pxg8\nFREJrdnidlhnH3u26+TfnI/zj7ei/Cp2lxX89qe/5bt93zFz9cyQJpVcEIF3vdkZrp3+pnk3JezE\nauC99E4WYlehRcOqhWCcNsi2s3norIei/UTJnCOJ65DQP4Rwu1likWFlEHACNTQFzyz4xqY3auZ5\niyTFgYQgnHzkyRzZ4UiAuIEakZx59Jkc/aOjmbFqRsLnyBab844/L/GE3ThtJgjH/OgYPvv2syiH\nfg3B2UhEPQP1eBZjy0vucjLtTMb0HcNHL53PwtlnRNyLPyKD760RFVmvehpne/qJl4TRi1+fPj06\nBUoqYXyJUovUJVSSFWCe6auoT1Hc2Pneh/WuIWhuOvJZHvzTOQQDGWTaDv3OWUfhhVvodOz5FOb9\nJeHM+MI5heGOOFLz8SZBLlxSFa1lRJjXsrMsHrrhl6zJ2OE6XZ0qLMvi4bMeZtyAcW7Y8N0VvB0T\nUBDMXcaD/gcZ229stRCJ48S/8MQLOTDzQJ5Z/0xU+4w4ekTYiT5v07waZp+ERMy30YhOwouSCpuo\nQhFtPQ7pwbxN85IXIt45akSzFcTV1BycsClHEPKPyGdP5R4+2vFRaL/qDurTQz7l1kG3sm3PttoF\nSSgLdaz5Mxk8QbZm25qwVmdh0aVTF0p2xj9n3yP6cuEJF9acoxRDwtF/gnuvKJu+3RQuJgiH/vd8\nvv7PiWhotdLGQhC6durqXmNMfTqMu4g9h71du+CKuOcKYXOqWB+APSTiXiwIm1sj5wmlAyNIUiR2\nQpI3WXHNmup0CammiogURBUV7rwTx6kWDl6Z2JxV9RVg4Vm2ceyskYKmMK8Q39MFOAFwgqCOTf8j\n+3Pf6P7AhdUHLCuAxQXuU5brHt83yhe2tRf1cW3rxeuK2bZnG5QVkPH0RAKVrhnLvvpn/Payn/Ld\ngBciolR6M3VRF3cUjYOoUL63PFz/KVfDe8VBKiqqojo2r9POsrOojOPEl9zlHH7g4Xz+7edRbWKJ\n5UZwlfiYu2EuL7+zHb64LdxJe53hmL5jOLjdwbyy4RU+2fFJdUcQp6MXhGv7Xxv+3u+Ifkx8YyKL\nNy9OOBJPmtICrOJ3cQKuBke/2dDnKTRk4om8rjXb1lSba2I6tI8Yzmk7TqueN5SIOianxqPHIT34\n9am/DgcFzFg1I/ybg8PmnZsT7vvw8oc55kfH8LNjfsYrn74S1zwY1GBCQVRrAEcI12x6CtvmPA2B\nrFAAyg2Q/0S4zOldTmdJ6ZLEufDq0DLKviuLW5/dG/rDYW8lvP5ILKwo/6J2Xlrve9FYGEHSSEQK\nC9uOzsHjOd0buvZD5LGr1zEJpdoodiPD4q3r0RABVvzqxnBUSOV7lRT3fYGCCdUT7cKjmkL3Or3U\nFLNnR6eCiacNebmlIif2+Uv9zFo7y9VSFp2Iu1hXBuII1/74ae4b0RVGxLRHLRPvCgpgwbs2xXPL\n+OjAx1jCChQ3hLTfEf0Ywxjm7tjAtvdqOupnr53Nr0/9tevUDHF5r8urJ6eVDozqbE+ffCcjh3aM\nskF3yu4Uns+TiAzLfe28dpi6aCqVwcooIeL5YA4/6PCwfd7T4MJzbGIQBGvzMIKBjFDHZMPKcbB2\nVI3Q7UFdBrFo86LqneN0sE7uspqzTT28jrL9DvjhUKxuC9HOy8PFvYCMeOl2njj/iaiJnfGuJVJj\nizQDeRMcw6HDsfWpqwONE0Di+c08lJCACWQBGeCo66f7yX8gdxmZViY79u6oXYjE0Xq8wAFFcRx3\nkbq9fUrYsrAKDdSh0fxLaigAAB5fSURBVMWYs9zUq3EGHXEGLplWZnjQli6MIGkkIs1cW7a42YGD\nwepZ7ZGhwMmu/RDp34i3jklWllsu0boekyY1IGqsZEiNsNpE1ztmTOJ5MrHaUKLcUr4SnxsBBOGX\nXByhXbZN0YVd4587QahzZN0KCroC9+IvvaBmAsxDbOyrf0bwi8FIt/cg1AEGnACdsjsx49wZvPjR\ni1zc42LK95bz3H+ec1/amM62x/cTmDQ4uo6RQk5E6H94fwq7FfLdvu/Ytmcb//3hvywpXcLM1TPD\noZ/ePpEzqb2os1hNEKrzl0Xa7jOtTDd89sCDmeurhIAAFmC7jtmYkXePQ3qwrGxZ2MyYefRSWAxV\nlVW1dmiWWIzInMz8p24nWGWDWogodpaDjBoRnogXmyQzdvLf1EVTyTkghxc/erGGz8i2bG4puIVO\n2Z3IOSCHNV+tiXJ21+jAa5lvdHrX06OXYojRoCR3OadxK0sWZxJstw354VCk20KcPJ+riTiK61iy\nkJJhDD4tC3+pv9ocGAcpGYZGPCd5O0cz8sK+Yc3Tu88byjeQ/eMS/mfGmzzw7MoaS24nur5DJlzO\n9pyXE54/koFHDmT6yOlpj94ygqQRiV2DInK0DvUzM8Ub0XsRYN46Jp6GEamRiESn0052XQ2Pogu7\nMvuhIJWVQbKyrISdOUQLR9t2Bajf754vVhuKl1vKS/+RaWe6HVruMjJHn8XYHxXXOdmqthxk8cp5\no/6gBsGBay/oRZeOHcg5YBQT31hTYyLnuAFutI6/1F/dyUeMZjOzJG7bxBNyfj/4VkC/7uu5YWP/\ncEe4r6QfU+6uYMrVheF94uV2ir3W2vxZM4+YyVwvJHfNaAhm1AidzrQyAXj4rIerw3PHFMHoDIrn\nbo7S5DKsDPr+pC8rv1qJs2UgWjKMnQeeHxKo7vFUBSdgcW2nOXQZWnOiXuQ1hKMEv+iHlpwOebuB\nU5CSoVjdFkGuH0cdHl7+cI1Z34kc7FIyFHWy3QSkDmGhaWHRzm4XpW0IgoZG7YKQufV0lhf/iWCF\ngAoqQTQUfbbunJvQ1x4GtbAzHf5242WU5xwUrcnhdtZj+48Nt2W/Ppdx8xIJv0PP/nZc+FnufVhv\nJj7+L1YsPQAnbwGVXVbQ6diPWTTrHG6bHWDRwmEhn0y1KfLQb37J9gjBdOg3l0QLkhhtLDM0l8cT\n6E0RAmwESSMRGx0VO1qH+pmZaozoi+P7QaBa69i5szpq7KabXD9NXansY/FMQ8loMQUFruP/ySfd\ncz3+eLS2FalFrflsCPbO0yA0Yi3MKwy1WQEP91rJmoy/AW6HVpCbWHg1lBqJFCPMa4kyLEO0YMg5\nIIc1Uf6a+BPACnILoKwA39OwPmrRshMJXjUQcpdC6anonLd5R9uz6CmYP7+ASYOTv1GJBGn53nKs\n3Pdds9Tha7Dm/Q11Msh4+zEKug5hxw5l48FP8Lg+XjNFR25NTc7TIArvnkTlnNfRYBZrMoWMjOrM\nul4SR7dNJuH3w9Sn4z8/vhIfFSX90TmhyDrLfTlUM2FREC0ahtN5SY1Z4UV9iqpNoMQEahyzFFlC\naMKgoMf4CYpNlp3FxT0uZtGWRdUTFmMnSe69jcer7JBQ1HBnvX3ZWSDL4OwbkB8O49qLT2DchUX4\nS/dUD3wgPOs+Ns3J6KvdzzXev7IC1v15oLsMg12JPeZst43LClhxbwFUKCJBrHNuhgEzybKzuHvM\nCG5+u1ow/fqyvtyw3o22q6mNjUDyVnFtv2trLOeQVlS1zf8NGDBA08nSpart26vatvv/0qWJt91z\nj/t/fY6ZlaWana1qWW4aSMuqPmYk99zjlvfSRYrEL9fY1y1SfU7bdusR7zqy2wV0/N/m6NItS+O2\nT23nqavdlm5ZqvcsvEeXbklcKJky9Tn30i1Ltf3d7dX+P1vb390+fNzIa8vIiLhvtqMZZ0xWa4ql\n1ojbVaxgjTarzzOS6Bq9OmWcMVkt2wk/M5mZ6p4z43tl7Klq/5+t9yysvlm1nXv8rSVR9R0/3i07\nY0b0PnXd16VblmrGGZMVqQo9M4HQX3X7xLZnuA6vjFeZIsoU1P4/W8e/Mj58PyPrHnufI7/X+G2p\n+1wiAXXtWFWKVKqdEXDrmPG9Zo0bElWXpVuW6vhXxuv4V8bXqGNd1x/5jooV0PG3loS3e88JqNoZ\nwfC7Eu/eeHXocWmxYrnth1Qqw28L39dUnyVVVWClJtHHNnsn3xR/6RYkkQ9Ho3YKof3Hj48WEPE6\nbK98oo69MR6qWJIRXJEviGVV1zlRm8Vrg7oETqIOPVVmzHA734SCe+E9av+fHe7YvE458tq8Dtyr\n/4yXPtB7Ft6jM176IKnBh9cG9bl3XmcZeY5IgQYBJf+xhMIv9lqXLnWfwezsugV/Mvd1xksfaGZ2\npVp2UO3MKs3MCtZon6iOO3T9M176oNHv84yXPlD75Jkq+Y+pddp9atlVijjVz3REZx+P2HtT2/XX\n1o5Ll7r3yDtv5LtSG959s2xHyfxerWsGafu728d9vhqCESRNKEjqM7pO5fh1aSRe2QsvdOvilZsx\no3HqV2NUFKM1jR9f89gzZkQLwBkzau6baseUqENPhWRe7GQ0Eq/94wmCGTNUzzyzuk3iXWuqz1a4\nE57h3qOQQUrtzCqd8dIH4XK1DYbqusex50umvlEaRC2CskZbxhE0DWXpUtXM7Mqw5iH5j4W1rmQ0\n+mQtEbFla3tXahu4JGqnSEHrtU2yA7W6SFaQGB9JI1DXGs91Udfs81h/Q12TEd98030VvImR3uJb\nqaxrkGhyY13XXf7/2zv3WDuK84D/vvuwoaQCYiJABdegoEZUTgxxKW5pZdJgQagiSyARGhWKrKAL\nlFKpqgOKVKVVFbf80RTHNDWkvJSoiQIlINLyMtwKyVcG8zA4cdJA6xIQLuAGIqricn2nf8yOz5y5\ns7Ozu+dx77nfTzo65+zZszvf7O58M99j5mBn8a6xMfs9lCklS04Ic+46HDlLADimp7tXwxwfn3/u\nsuixHNlmZjq+k6eesgEUMVnbJrP6ASBr1sAzz1jnOHMTHNy3+kjaT1k9++cHWLmy2m8Wk90trXDg\nAJx0kvUd+ItMlS2f8Oqr3fIf3Lc6OptvE6anKUKlBQ4bxmSciWWG2Q/ylsuOXZuySMmcerz66u5A\nmq7JGBPJxZ2AmtXFC1jf32WO55GjbRb7q98jkia4Ye7GjXkmg1z60astO24OvTh3ro9k6u/uNlOb\n92f1bFO9R3//sTE7MnGjhl6Raw7tVf3ljGhj9dyr83dGQ/a1fHleT9/5B/sx2vfNQpPL/89sv//F\nxn7MHD9fm3ps8vypj2TEFEl4Qbdvn+/zaNJAxx76lA22rb+m6YPQD/9M3fLFHsTQl7Fhw/z6jDmU\nB1HecN8256+SM6esbc/v++2c2ajsXg+v1caN3SbAXuGelypzXdUx6iiepvVYdb/06xlTRbJAFEnY\ns928udv2nmOPzeml5thge6FM+q0Q6uLK5AckpAIRYnWW6qn30/81qOvRTxlyyrIQRySDqBP/XL14\nblI+kn7JoopkgSiSMKxvbGx+72xycn7D7/eGw5ukqned25BWMUjFUXWuKrNLToNTdowNGzrXKKy7\nrnBNsddpWPjlr3s9Q8d+nfNUmhUzyuJ6/xs35o0AcjsITemVM9ova5nc/VZY/YoaNUYVyYJRJDt3\ndo9ARDqRGW7YHl5oP3qjKw9hrNMY1LH3G1PvwUmZyPpB1WiqTLZQJpfbUHcklqq7sDed6kn3gtxe\nZ24DW/daNhkN9LJRTpXHv9fbNpC9auB7+dzFjt10xNkr+XIViUZt9Zl16+DWW+2MvW6dktQ08DMz\ncN11nWx4Y2CiuEpzc/D44zbKJzzGzAxceaXdLxZpkhP95CJr7rzT7meM3d400isXP6Ll8GE7I4Cf\nIT893ZkC5tChTllCmWJy50yln4qyqppTLEXdNWRSZQ0jhMDuc+hQ95Q4seO9/37+tfTP46LWjEn/\nL3Vv5dZB2X7htYH2a/v4x60zkWqsjFVRdU1n/q6zdEQY1RmLduvn8wvoiGRQ5PYuQlPY5GTHLBEz\nv9TpceYMwWNO0UGNSMoy5MtyUapkMqZ9tEvT0VmTHmHKRBErQ2XCpHe83Gvp+43Gx7uTKav+1zTi\nq05dtYkebBORVrZv1Wg2FayRundTSa2xc4SjkF75llDT1sJSJLn4D7IfdpoavqamKMkl1ug4M1Ps\nQei1/yTVYPvKtcpPEZYr1QCUJQm6xtmfmqbMDFl23qYKrE4QRR2/WE4yYVkdNI1qyq2DumbXJr6+\nHNNo6rypfZsoqTq/j493nvGwg5Eyd5aZeuuQq0j6atoSkQuBW4Bx4BvGmL8Kfl8O3AN8EjgIXGaM\n2S8iK4B7gV8D7jLG/KH3n08CdwFHA/8M3FAIPBK4iRDvuw8uucQmKbntofnlmmu6zRYizZOPwvVU\nXDIWRNYViWxrOmx25jSw57viivnmg/XrrXnPmdvCtU/8Y+UkTZbtF5oVP/igU7cA3/8+bN4clyE8\nXpVJI2YmqTJRQHciW9U5mibKHjxozVpzc/a8VUmIvizQKf+rr3bMsq58ZfvmmF3d/2LXsyyJL2Xm\nqWN2Su0bm2G7yuRV9btverv99s59ODERTxb1zZ0pU2/fyNE2TV5Y5fEKcDqwDNgDnBnscy3w98Xn\nzwHfKT4fA5wHTAHbgv88DZwLCPAvwEVVZVkMIxJ/GOz3MlLDYt8JHIv8aloG/xhNIsRyzzU1Zcud\n48iemor3ynxy8yXKyh+aFV1vsGr+o7Lz5jrOU7/HTBShOaNOhFUOTU0+sclF/RFNSq5Urk5qVJnK\nm8ox85SNJqpMUbGRbx1zaG4dT0117j+wo+LUMXptKWDYpi1gHfCI9/0m4KZgn0eAdcXnCeBtQLzf\n/8BXJMDJwI+875cD26vKstAViX9DhFFaZbbRqrDUOg9ITtl8U1sT80LsmHWS1HIeGr+sVRncKXu3\nL2uVH6LOeR1VijgVjdbEVt+E3HslvA/Da+qX3ze9+PvWMSu5e73KrFtVh23CdcN9/M7fxIR9LzML\n1lX8oSIpe877kTRrzMJQJJdizVnu++9HRhd7gVO8768AJ3jfQ0WyFnjc+/5bwEMl578a2A3sXrly\nZW9rt8eUOdZ8pRKzy6acfL1s/GONadXDmMoYDv0xuaG1/nE3b47b8XfuNOacc+Y3UmEeRU6vM7Ut\nVrZUPkq4b1P7ednIrJ8huClyRiSuZx76nMJOUuqahCHY4YzYYSBBU4Wb4wsJfREbNsTv57CD1+QZ\n3LnTyitS/nz0uhPhs+QVif9aTCMSf5hfNWtv2YMXi/zyb/6602TkNlI7d9qht3/u2M0fNj51ktT8\nnn8suqx7llt7/s2bu/ft9VQbMblyo5xSpstQGcca1F6MSKoUf87/w162kytsdDduLO8ApMrvK1AR\ne5yqQILY81FlIi0b+fqmqphZLjbCDq9RLyPOfPrZiVgIikRNWzWo00POOVY4BfrUVD3zS3i8nOF+\nOA2Gb64K5aiTae0oG8n4pg2/d+h6hBs2dO+3YUOezE1MBXX+FyrUUGmEo8oyc2Yb80ZKObUlbIBj\nkUWpEVWooMJy1g2rzZU1PG/MhBZGRPmylpnbUte7bT33y1eSq0j6GbX1DHCGiJwGvI51pv9esM+D\nwJXADHYE80RR+CjGmDdE5Ocici6wC7gC+Fo/Cj9oytZWr7vmuvtPmATpIqK+/GWb1Dg314kWgerp\n1auif1wESciyZTYqJ0yuCqdQz5Fx/XobUeYimMBOTT8+buVZtsxGuj31VCf6DOz06Y8+2vnPJZek\nz5OK6vIjzMqmPk9FSPn7pBIx/STMuTl7Lbdtmx+Vk5NwmWJ62kanOXKS16qipMLEwfFx+MIXOlGA\nd9/dHf00MzM/wiu8Z3bsmJ8Y6hL03D2cishzsrqIPBG46KLOf93vYeSWuw5+qzQx0ZHFP/e6dXa7\nS+qdne3IsmVLJ+LM/R4uTV2H8F7sR9JmLXK0TdMX8Bng37Amqy8V2/4C+Gzx+Sjgu8DL2Gis073/\n7gf+G3gPeI0i4gtr3tpbHHMb3gim7LUYRiS9JNVDdb0ylyfiTEHue46Zoeyc4WjhzDM7ZSmzKVeZ\nylKmCd/xWtY7dDJs3pw/AirrHfu92bGx7ryS3FFbzEnr92B9mWILa4Wy1s1vCH+LmQKrTHI5vofU\nFC6xHn/YSy+7zmVObleWVH2Eia3ORxPz4/jnCKc4ipWlbFRUt4xVuLpJRTv20tTFsE1bC+m1lBRJ\nzg3uO+82bux+uNyU3TlO4xA3Pb6bT6ws7LNqDRb/YfEVXI58jrJInxxfQOwcX/lK2gae8/D6viun\nGLZvn+/zmZy0x8xJDMxt2MPORNgg5/pIUqHTYZRU3etUprT9evaVUNl/Q5NgrP79qLGyz36ghh9s\nkpppIiTmk2nSUfOvW1W0Y9Pjx1BFskQVSd3Q0nPO6b4pXehiXT+KIzYaKHNYphrGsMEOo19yoqj8\nXtvkZH7OSuwcZT4g53PJWc44NtVLTEH5DUNZfabKakz6PmjbI46NIJoqpzKlHY5ucx3jrp5jIdth\n2WOjkLKck3CEkbvsdSo4InUPV13TmJL1/9t2nRVHriLRSRtHjKps3fD3TZtgzx77fWzM3ppuWdxP\nf9r6VOr4Atwki7Oz85cg9bdDd8a0n4V86FBaxly/kb9U7uxst527yhcQnsPJdfPN8OCD3ccum0gz\nJLbssMva9/0UExO2HmZmyuszLFvoq4hllTtyM7pT2fcxO7+fle98YM6Xkzp2zP/mJqR0dVUnc9/P\nzPfrKzYBZNXncDlbfzaJ1DPijuHudRG46qrY8rjxujn//E79PflkJxN/YsLKNTFht73/vn2G/efI\n94/E6r4fqCIZEaoeTEc4DcfBg7B1q30PG4AyJVLlyCtrqMq2+8cU6W6koRMsUIfp6W7FIdLdYIdO\n0BzFtG4d3H9/x9H53HOwe3enwTp4sHsd8pD1660sofwi9n1sDM47D3bt6m6g60zlcdttnSCLiQnr\n5D7rrPlO4XAqFvebo2qN8Jhyc+V6/vn09B+xY4frt4f3aNk1ijXIYX2tWGEVAHTWig+V5MxM+piu\n3Hfc0bmvJifLlUisHLn38D33dDpThw7Z7+4c7tzG2PtkdhZeeqkTsFI19Uq/UEUyAlQ9mCHuxoo1\nFKtX50Vn+Teq2+7+Uza/1fR0vNfuH9M1qmA/X3DB/Ic1JzrKn58LbOTQtm22kQPbuPpK080hVnVc\nV3+xCKGqOc5i9bJlS3ev9aij4qO5nDmzZmbg2ms7x3MRSqGcfkPpR1a5+dXKGiS/fmKNtX+sstGQ\nO0aVoqkT/VbVWbr++s59cOedtuPk14kfRZiKckqNMGI0ness5MCB7vMb04n0M6b5/GE9Jcf+tdhf\no+4jaWLzbpMcVRU5U/WflMO4avrrOo7EVPJZU+dwTLY28fqp+qybaxBOp+HkyvGVON+M79Oqus5l\njm+XMJiKGKvKn6kKxMidOyv0QYnMjxqsE0XY1omd69/zI8X8QIOc56RJjlYZqLN96SiSJjd4m4ei\nKnImJGcf30GYctTWUYCpiKbcRLlBEHPsN1kDJTbBX070ViqBLvc6pxzLMTnLZKwKZ64qb0zZxRIZ\n63aGyq5VFaHMuZ2Esk5QeLxYeH+vIraMUUWypBSJMc16xm170+4YbUck4T5Vs7XWeVBijXRO1FHb\nB7BOmWI0GTG6RjMWMp0aHeQorZz6SY0Aq6KzykJtU1FL4fxaZXUWi2KK3Re9yARP1VnV/GCp/+aW\nq5c5JMaoIllyimSY5A7XU/uEppGq+ZCaPvRNE/h6RW4D0bQhcTLUnS6lF9cwVeYcpeGH1oY5IOHx\ny2bWHURnIPc+KTOfxhJQ25wn/I+OSFSRLFlyRyS9PE+/Rx4x6prmmii2YcpYVuawTDGlkWsCrZt/\n0Uvq1G1M5qmpzsSVuTlNTcrYqzrIVSQataUsCFIx/r0MX+xVJE1T6kTVNJlnDYYXAgrpOeP8endl\nnJuzEVAukq+qbqrqpGmd5VKnbsPoMT8y7OKL4YEHrCqZne3tNep3HcRQRaIsGGJJgIM4zyAZhCIb\nWghoBWG9h2UctpLPoW7dOpm3bOlWQCedZEO9F9o1aorY0ctos3btWrN79+5hF0NRBkZOLsawWQxl\njNGk3LFcL1j48ovIs8aYtZX7qSJRFEXpP4tRceYqEjVtKYqiDIBhmlT7zdiwC6AoiqIsblSRKIqi\nKK1QRaIoiqK0QhWJoiiK0gpVJIqiKEorVJEoiqIorVgSeSQi8hbwnw3+egLwdo+Ls9BRmZcGKvPS\noK3Mv2yM+UjVTktCkTRFRHbnJOOMEirz0kBlXhoMSmY1bSmKoiitUEWiKIqitEIVSZrbhl2AIaAy\nLw1U5qXBQGRWH4miKIrSCh2RKIqiKK1QRaIoiqK0YkkrEhG5Q0TeFJG93rYPi8hjIvKT4v34YruI\nyFYReVlEXhSRs4dX8uaIyKki8qSI/FBEfiAiNxTbR1ZuETlKRJ4WkT2FzH9ebD9NRHYVsn1HRJYV\n25cX318ufl81zPI3RUTGReR5EXmo+D7S8gKIyH4ReUlEXhCR3cW2kb23AUTkOBG5V0R+JCL7RGTd\noGVe0ooEuAu4MNh2I7DDGHMGsKP4DnARcEbxuhr4+oDK2GtmgT8xxpwJnAtcJyJnMtpyHwI+ZYz5\nBLAGuFBEzgX+GviqMeajwM+ATcX+m4CfFdu/Wuy3GLkB2Od9H3V5HecbY9Z4+ROjfG8D3AI8bIz5\nGPAJ7DUfrMzGmCX9AlYBe73vPwZOLj6fDPy4+LwduDy232J+AQ8AFywVuYFfAJ4Dfh2b8TtRbF8H\nPFJ8fgRYV3yeKPaTYZe9ppynFA3Ip4CHABlleT259wMnBNtG9t4GjgX+I7xeg5Z5qY9IYpxojHmj\n+HwAOLH4/EvAT739Xiu2LVoKE8ZZwC5GXO7CzPMC8CbwGPAK8I4xZrbYxZfriMzF7+8CKwZb4tb8\nLbAZmCu+r2C05XUY4FEReVZEri62jfK9fRrwFnBnYcb8hogcw4BlVkWSwFiVPZLx0SLyIeA+4I+N\nMT/3fxtFuY0xh40xa7A99XOAjw25SH1DRH4XeNMY8+ywyzIEzjPGnI014VwnIr/t/ziC9/YEcDbw\ndWPMWcD/0DFjAYORWRXJfP5LRE4GKN7fLLa/Dpzq7XdKsW3RISKTWCXyLWPMPxWbR15uAGPMO8CT\nWNPOcSIyUfzky3VE5uL3Y4GDAy5qG34T+KyI7Ae+jTVv3cLoynsEY8zrxfubwP3YTsMo39uvAa8Z\nY3YV3+/FKpaByqyKZD4PAlcWn6/E+hDc9iuKqIdzgXe9oeOiQUQE+AdgnzHmb7yfRlZuEfmIiBxX\nfD4a6xPah1Uolxa7hTK7urgUeKLo1S0KjDE3GWNOMcasAj6HLf/nGVF5HSJyjIj8ovsMbAD2MsL3\ntjHmAPBTEfmVYtPvAD9k0DIP21k0ZEfVPwJvAB9gNfsmrG14B/AT4HHgw8W+AtyKta2/BKwddvkb\nynwedpj7IvBC8frMKMsNfBx4vpB5L/BnxfbTgaeBl4HvAsuL7UcV318ufj992DK0kH098NBSkLeQ\nb0/x+gHwpWL7yN7bhRxrgN3F/f094PhBy6xTpCiKoiitUNOWoiiK0gpVJIqiKEorVJEoiqIorVBF\noiiKorRCFYmiKIrSClUkitIQETlczDLrXjdW/yv72KvEm5VaURYyE9W7KIpSwv8aO+2KoixpdESi\nKD2mWBPj5mJdjKdF5KPF9lUi8kSxDsQOEVlZbD9RRO4Xu17KHhH5jeJQ4yJyu9g1VB4tsvIRkT8S\nu57MiyLy7SGJqShHUEWiKM05OjBtXeb99q4xZjWwDTsTL8DXgLuNMR8HvgVsLbZvBf7V2PVSzsZm\nZYNdM+JWY8yvAu8AlxTbbwTOKo4z1S/hFCUXzWxXlIaIyHvGmA9Ftu/HLqT178UEmQeMMStE5G3s\n2g8fFNvfMMacICJvAacYYw55x1gFPGbswkSIyBeBSWPMX4rIw8B72OkwvmeMea/PoipKEh2RKEp/\nMCWf63DI+3yYjk/zYux8SWcDz3gz+irKUFBFoij94TLvfab4vBM7Gy/A54Gnis87gGvgyAJcx5Yd\nVETGgFONMU8CX8RO+T5vVKQog0R7MorSnKOLVRcdDxtjXAjw8SLyInZUcXmx7XrsSnZ/il3V7qpi\n+w3AbSKyCTvyuAY7K3WMceCbhbIRYKuxa6woytBQH4mi9JjCR7LWGPP2sMuiKINATVuKoihKK3RE\noiiKorRCRySKoihKK1SRKIqiKK1QRaIoiqK0QhWJoiiK0gpVJIqiKEor/h+mPrdO7d3H3QAAAABJ\nRU5ErkJggg==\n",
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZIAAAEWCAYAAABMoxE0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsvXl8VNX5wP19ZrKgotBGW0QCQW2V\nJcoSlxSRQSzFhapFW7cGcYmIqGj7WrBqqfqKWq0UtAouSFyKVt4iCqg/A1GQkR2ksghKBAQUUkUR\nyGRmnvePOzOZTGYmM5k14Xw/n3wyd+bce8+5y3nOs5zniKpiMBgMBkNzsWW6AgaDwWBo2RhBYjAY\nDIaEMILEYDAYDAlhBInBYDAYEsIIEoPBYDAkhBEkBoPBYEgII0gMGUdE7CKyT0Q6J7NsJhGRE0Uk\n6bH1InKuiFQHbW8Ukf6xlG3GuZ4Vkbuau3+U4z4gIi8k+7iGzJGT6QoYWh4isi9o83CgFvD4tm9U\n1ZfjOZ6qeoC2yS57KKCqJyXjOCJyPXC1qjqCjn19Mo5taP0YQWKIG1UNdOS+Ee/1qvpepPIikqOq\n7nTUzWAwpB9j2jIkHZ/p4lUR+ZeIfA9cLSKlIvKRiHwrIjtFZJKI5PrK54iIikiRb/sl3+/zROR7\nEXGKSNd4y/p+P09EPhWRvSIyWUQ+FJFrItQ7ljreKCKbReQbEZkUtK9dRB4XkRoR+RwYEuX6/FlE\nZoR896SI/N33+XoRWe9rz2c+bSHSsbaLiMP3+XARedFXt0+AviFl7xaRz33H/UREfu37vhh4Aujv\nMxvuCbq244P2H+lre42IzBKRY2O5Nk0hIpf46vOtiMwXkZOCfrtLRHaIyHcisiGorWeKyErf91+J\nyN9iPZ8hBaiq+TN/zf4DqoFzQ757AHABQ7EGK4cBpwFnYGnBxwOfAqN95XMABYp82y8Be4ASIBd4\nFXipGWV/AnwPXOT77Q6gDrgmQltiqeMbQDugCPifv+3AaOAToBNQAHxgvV5hz3M8sA84IujYXwMl\nvu2hvjICnAMcAE7x/XYuUB10rO2Aw/f5UaAK+BHQBVgXUva3wLG+e3Klrw4/9f12PVAVUs+XgPG+\nz4N9dewFtAH+CcyP5dqEaf8DwAu+z9189TjHd4/uAjb6PvcAvgA6+Mp2BY73fV4GXOH7fCRwRqbf\nhUP5z2gkhlSxSFXfVFWvqh5Q1WWqukRV3ar6OTAVGBBl/9dVdbmq1gEvY3Vg8Za9EFitqm/4fnsc\nS+iEJcY6TlDVvapajdVp+8/1W+BxVd2uqjXAQ1HO8znwXywBB/BL4BtVXe77/U1V/Vwt5gOVQFiH\negi/BR5Q1W9U9QssLSP4vK+p6k7fPXkFaxBQEsNxAa4CnlXV1ap6EBgLDBCRTkFlIl2baFwOzFbV\n+b579BCWMDoDcGMJrR4+8+gW37UDa0DwMxEpUNXvVXVJjO0wpAAjSAypYlvwhoicLCJzRGSXiHwH\n3AccHWX/XUGf9xPdwR6pbMfgeqiqYo3gwxJjHWM6F9ZIOhqvAFf4Pl/p2/bX40IRWSIi/xORb7G0\ngWjXys+x0eogIteIyBqfCelb4OQYjwtW+wLHU9XvgG+A44LKxHPPIh3Xi3WPjlPVjcAfsO7D1z5T\naQdf0RFAd2CjiCwVkfNjbIchBRhBYkgVoaGvU7BG4Seq6lHAvVimm1SyE8vUBICICA07vlASqeNO\noDBou6nw5NeAc0XkOCzN5BVfHQ8DXgcmYJmd2gPvxliPXZHqICLHA08BNwEFvuNuCDpuU6HKO7DM\nZf7jHYllQvsyhnrFc1wb1j37EkBVX1LVflhmLTvWdUFVN6rq5Vjmy8eAmSLSJsG6GJqJESSGdHEk\nsBf4QUS6ATem4ZxvAX1EZKiI5AC3AcekqI6vAWNE5DgRKQD+FK2wqu4CFgEvABtVdZPvp3wgD9gN\neETkQmBQHHW4S0TaizXPZnTQb22xhMVuLJl6A5ZG4ucroJM/uCAM/wKuE5FTRCQfq0NfqKoRNbw4\n6vxrEXH4zv3/YPm1lohINxEZ6DvfAd+fF6sBvxeRo30azF5f27wJ1sXQTIwgMaSLPwDDsTqJKVhO\n8ZSiql8BvwP+DtQAJwCrsOa9JLuOT2H5MtZiOYJfj2GfV7Cc5wGzlqp+C9wO/AfLYX0plkCMhb9g\naUbVwDygIui4HwOTgaW+MicBwX6F/wM2AV+JSLCJyr//21gmpv/49u+M5TdJCFX9BOuaP4Ul5IYA\nv/b5S/KBR7D8WruwNKA/+3Y9H1gvVlTgo8DvVNWVaH0MzUMss7HB0PoRETuWKeVSVV2Y6foYDK0F\no5EYWjUiMsRn6skH7sGK9lma4WoZDK0KI0gMrZ2zgM+xzCa/Ai5R1UimLYPB0AyMactgMBgMCWE0\nEoPBYDAkxCGRtPHoo4/WoqKiTFfDYDAYWhQrVqzYo6rRQuaBQ0SQFBUVsXz58kxXw2AwGFoUItJU\nhgbAmLYMBoPBkCBGkBgMBoMhIYwgMRgMBkNCHBI+EoPBkF7q6urYvn07Bw8ezHRVDDHQpk0bOnXq\nRG5upFRr0TGCxGAwJJ3t27dz5JFHUlRUhJV02ZCtqCo1NTVs376drl27Nr1DGIxpy2AwJJ2DBw9S\nUFBghEgLQEQoKChISHs0gqSV4nTChAnWf4MhExgh0nJI9F4Z01YrxOmEQYPA5YK8PKishNLSTNfK\nYDC0VoxG0gqpqrKEiMdj/a+qynSNDIb0UlNTQ69evejVqxcdOnTguOOOC2y7XLEtWzJixAg2btwY\ntcyTTz7Jyy+/nIwqc9ZZZ7F69eqkHCvdGI2kFeJwWJqIXyNxODJdI4MhvRQUFAQ65fHjx9O2bVv+\n+Mc/NiijqqgqNlv48fS0adOaPM/NN9+ceGVbAUYjaYWUllrmrPvvN2YtQ8vBuc3JhIUTcG5LnWNv\n8+bNdO/enauuuooePXqwc+dOysvLKSkpoUePHtx3332Bsn4Nwe120759e8aOHcupp55KaWkpX3/9\nNQB33303EydODJQfO3Ysp59+OieddBKLFy8G4IcffmDYsGF0796dSy+9lJKSkiY1j5deeoni4mJ6\n9uzJXXfdBYDb7eb3v/994PtJkyYB8Pjjj9O9e3dOOeUUrr766qRfs1gwGkkrpbTUCBBDy8G5zcmg\nikG4PC7y7HlUllVSWpiaB3jDhg1UVFRQUlICwEMPPcSPf/xj3G43AwcO5NJLL6V79+4N9tm7dy8D\nBgzgoYce4o477uD5559n7NixjY6tqixdupTZs2dz33338fbbbzN58mQ6dOjAzJkzWbNmDX369Ila\nv+3bt3P33XezfPly2rVrx7nnnstbb73FMcccw549e1i7di0A3377LQCPPPIIX3zxBXl5eYHv0o3R\nSAwGQ8apqq7C5XHhUQ8uj4uq6qqUneuEE04ICBGAf/3rX/Tp04c+ffqwfv161q1b12ifww47jPPO\nOw+Avn37Ul1dHfbYv/nNbxqVWbRoEZdffjkAp556Kj169IhavyVLlnDOOedw9NFHk5uby5VXXskH\nH3zAiSeeyMaNG7n11lt55513aNeuHQA9evTg6quv5uWXX272hMJESakg8S1zulFENotII/EtIvki\n8qrv9yUiUuT7/pciskJE1vr+n+P7/nARmSMiG0TkExF5KJX1NxgM6cFR5CDPnodd7OTZ83AUOVJ2\nriOOOCLwedOmTfzjH/9g/vz5fPzxxwwZMiTsfIq8vLzAZ7vdjtvtDnvs/Pz8Jss0l4KCAj7++GP6\n9+/Pk08+yY033gjAO++8w8iRI1m2bBmnn346Ho8nqeeNhZQJEhGxA08C5wHdgStEpHtIseuAb1T1\nROBx4GHf93uAoapaDAwHXgza51FVPRnoDfQTkfNS1QaDwZAeSgtLqSyr5P6B96fUrBXKd999x5FH\nHslRRx3Fzp07eeedd5J+jn79+vHaa68BsHbt2rAaTzBnnHEGCxYsoKamBrfbzYwZMxgwYAC7d+9G\nVbnsssu47777WLlyJR6Ph+3bt3POOefwyCOPsGfPHvbv35/0NjRFKn0kpwObVfVzABGZAVwEBF/F\ni4Dxvs+vA0+IiKjqqqAynwCHiUi+qu4HFgCoqktEVgKdUtgGg8GQJkoLS9MmQPz06dOH7t27c/LJ\nJ9OlSxf69euX9HPccsstlJWV0b1798Cf3ywVjk6dOnH//ffjcDhQVYYOHcoFF1zAypUrue6661BV\nRISHH34Yt9vNlVdeyffff4/X6+WPf/wjRx55ZNLb0BQpW7NdRC4Fhqjq9b7t3wNnqOrooDL/9ZXZ\n7tv+zFdmT8hxRqrquSHHbw+sBM71C6uQ38uBcoDOnTv3/eKLmNZnMRgMSWD9+vV069Yt09XICtxu\nN263mzZt2rBp0yYGDx7Mpk2byMnJrlincPdMRFaoakmEXQJkV0tCEJEeWOauwSHf5wD/AiaFEyIA\nqjoVmApQUlKSGmlpMBgMTbBv3z4GDRqE2+1GVZkyZUrWCZFESWVrvgQKg7Y7+b4LV2a7Tzi0A2oA\nRKQT8B+gTFU/C9lvKrBJVSemouIGg8GQLNq3b8+KFSsyXY2UksqorWXAz0Skq4jkAZcDs0PKzMZy\npgNcCsxXVfWZreYAY1X1w+AdROQBLIEzJoV1NxgMBkOMpEyQqKobGA28A6wHXlPVT0TkPhH5ta/Y\nc0CBiGwG7gD8IcKjgROBe0Vkte/vJz4t5c9YUWArfd9fn6o2GAwGg6FpUmqoU9W5wNyQ7+4N+nwQ\nuCzMfg8AD0Q4rMlNbTAYDFmEmdluMBgMhoQwgsRgMLQ6Bg4c2Ghy4cSJE7npppui7te2bVsAduzY\nwaWXXhq2jMPhYPny5VGPM3HixAYTA88///yk5MEaP348jz76aMLHSTZGkBgMhlbHFVdcwYwZMxp8\nN2PGDK644oqY9u/YsSOvv/56s88fKkjmzp1L+/btm328bMcIEoPBkBUkc3noSy+9lDlz5gQWsaqu\nrmbHjh30798/MK+jT58+FBcX88YbbzTav7q6mp49ewJw4MABLr/8crp168Yll1zCgQMHAuVuuumm\nQAr6v/zlLwBMmjSJHTt2MHDgQAYOHAhAUVERe/ZY86z//ve/07NnT3r27BlIQV9dXU23bt244YYb\n6NGjB4MHD25wnnCsXr2aM888k1NOOYVLLrmEb775JnB+f1p5f7LI999/P7CwV+/evfn++++bfW3D\n4l/cpTX/9e3bVw0GQ/pYt25dXOUXL1Y97DBVu936v3hx4nW44IILdNasWaqqOmHCBP3DH/6gqqp1\ndXW6d+9eVVXdvXu3nnDCCer1elVV9YgjjlBV1S1btmiPHj1UVfWxxx7TESNGqKrqmjVr1G6367Jl\ny1RVtaamRlVV3W63DhgwQNesWaOqql26dNHdu3cH6uLfXr58ufbs2VP37dun33//vXbv3l1Xrlyp\nW7ZsUbvdrqtWrVJV1csuu0xffPHFRm36y1/+on/7299UVbW4uFirqqpUVfWee+7R2267TVVVjz32\nWD148KCqqn7zzTeqqnrhhRfqokWLVFX1+++/17q6ukbHDnfPgOUaQx9rNBKDwZBxUrE8dLB5K9is\nparcddddnHLKKZx77rl8+eWXfPXVVxGP88EHHwQWjDrllFM45ZRTAr+99tpr9OnTh969e/PJJ580\nmZBx0aJFXHLJJRxxxBG0bduW3/zmNyxcuBCArl270qtXLyB6qnqw1kf59ttvGTBgAADDhw/ngw8+\nCNTxqquu4qWXXgrMoO/Xrx933HEHkyZN4ttvv036zHojSAwGQ8bxLw9ttydveeiLLrqIyspKVq5c\nyf79++nbty8AL7/8Mrt372bFihWsXr2an/70p2FTxzfFli1bePTRR6msrOTjjz/mggsuaNZx/PhT\n0ENiaejnzJnDzTffzMqVKznttNNwu92MHTuWZ599lgMHDtCvXz82bNjQ7HqGwwgSg8GQcVKxPHTb\ntm0ZOHAg1157bQMn+969e/nJT35Cbm4uCxYsoKmErmeffTavvPIKAP/973/5+OOPASsF/RFHHEG7\ndu346quvmDdvXmCfI488Mqwfon///syaNYv9+/fzww8/8J///If+/fvH3bZ27drxox/9KKDNvPji\niwwYMACv18u2bdsYOHAgDz/8MHv37mXfvn189tlnFBcX86c//YnTTjst6YKkdWUOMxgMLZZULA99\nxRVXcMkllzSI4LrqqqsYOnQoxcXFlJSUcPLJJ0c9xk033cSIESPo1q0b3bp1C2g2p556Kr179+bk\nk0+msLCwQQr68vJyhgwZQseOHVmwYEHg+z59+nDNNddw+umnA3D99dfTu3fvqGasSEyfPp2RI0ey\nf/9+jj/+eKZNm4bH4+Hqq69m7969qCq33nor7du355577mHBggXYbDZ69OgRWO0xWaQsjXw2UVJS\nok3FfRsMhuRh0si3PBJJI29MWwaDwWBICCNImkEy490NBoOhpWN8JHHidMKgQVaIYl5e8hyDLR2n\n0wrZdDjM9TBYqG9JWEP2k6iLwwiSOAkX736od5xGuBpCadOmDTU1NRQUFBhhkuWoKjU1NbRp06bZ\nxzCCJE788e7+TjMZ8e4tHSNcDaF06tSJ7du3s3v37kxXxRADbdq0oVOnTs3e3wiSOPHHuxszTj1G\nuBpCyc3NpWvXrpmuhiFNGEHSDFIR796SMcLVYDi0MYLEkBSMcDUYDl1M+K/BYDAYEsIIEoPBYDAk\nhBEkBkMLxUyMNWQLxkdiMLRAzNwdQzZhNBKDoQWSioWgDIbmYgSJwdACScVCUAZDc0mpIBGRISKy\nUUQ2i8jYML/ni8irvt+XiEiR7/tfisgKEVnr+39O0D59fd9vFpFJYvIvGA5BUrEQlMHQXFLmIxER\nO/Ak8EtgO7BMRGaravCixtcB36jqiSJyOfAw8DtgDzBUVXeISE/gHeA43z5PATcAS4C5wBBgHgbD\nIYaZu2PIFlKpkZwObFbVz1XVBcwALgopcxEw3ff5dWCQiIiqrlLVHb7vPwEO82kvxwJHqepHaqWr\nrAAuTmEbDAaDwdAEqRQkxwHbgra3U69VNCqjqm5gL1AQUmYYsFJVa33ltzdxTABEpFxElovIcpM4\nzmAwGFJHVjvbRaQHlrnrxnj3VdWpqlqiqiXHHHNM8itnMBgMBiC1guRLoDBou5Pvu7BlRCQHaAfU\n+LY7Af8BylT1s6DywbmOwx2zVWEmnRkMhmwnlRMSlwE/E5GuWJ395cCVIWVmA8MBJ3ApMF9VVUTa\nA3OAsar6ob+wqu4Uke9E5EwsZ3sZMDmFbYhIOlYENJPODAZDSyBlgkRV3SIyGiviyg48r6qfiMh9\nwHJVnQ08B7woIpuB/2EJG4DRwInAvSJyr++7war6NTAKeAE4DCtaK+0RW+nq4M2CUQaDoSWQ0hQp\nqjoXK0Q3+Lt7gz4fBC4Ls98DwAMRjrkc6JncmsZHujp4s2CUwWBoCZhcW80gXR28WTDKYDC0BIwg\naYJwvpB0dvBm0pnBYMh2jCCJQjRfiOngDQaDwSKr55FkGpNh1WAwGJrGCJIomAyrBoPB0DTGtBUF\nvy+koiLTNTEYDIbsxWgkMTB9OjzzjOUvMTPMDQaDoSFGkDSB8ZMYDAZDdIwgaQLjJzEYDIboGB9J\nE5hJgQZDeNKRb87QMjCCJAbMnBGDoSEmoaghGGPaMhgMcWN8h4ZgjCAxGAxxY3yHhmCMactgMMSN\n8R0agjGCxGAwNAvjOzT4MaatODFL3xoMBkNDjEYSByZSxWAwpJqWGFZtBEkcmKVvDQZDKmmpg1Vj\n2ooDE6liMBhSSUsNqzYaSRyYSBWDwZBK0rWMd7IxgiROTKSKwWBIFS11sGoEicFgMGSYUAd7SxEg\nfowgyXJaYgSHwWCInZbqYA8mpc52ERkiIhtFZLOIjA3ze76IvOr7fYmIFPm+LxCRBSKyT0SeCNnn\nChFZKyIfi8jbInJ0KtuQSfwP2D33mEW1DIbWSkt1sAeTMkEiInbgSeA8oDtwhYh0Dyl2HfCNqp4I\nPA487Pv+IHAP8MeQY+YA/wAGquopwMfA6FS1IdO0hgfMYDBEpzVEg6ZSIzkd2Kyqn6uqC5gBXBRS\n5iJguu/z68AgERFV/UFVF2EJlGDE93eEiAhwFLAjZS3IMK3hATNkDpOFoWXgd7Dff3/LNGtBan0k\nxwHbgra3A2dEKqOqbhHZCxQAe8IdUFXrROQmYC3wA7AJuDlcWREpB8oBOnfu3KwGOLc5qaquwlHk\noLQw/Xe3pUZwGDJPa7C7H0q0RAd7MC3K2S4iucBNQG/gc2AyMA54ILSsqk4FpgKUlJRovOdybnMy\nqGIQLo+LPHselWWVGRMmLfkBM2QGk4XBkE5Sadr6EigM2u7k+y5sGZ//ox1QE+WYvQBU9TNVVeA1\n4BfJqnAwVdVVuDwuPOrB5XFRVV2VitMYDI1IhknKbxa12UAECgqSVj2DoRGpFCTLgJ+JSFcRyQMu\nB2aHlJkNDPd9vhSY7xMQkfgS6C4ix/i2fwmsT2KdAziKHOTZ87CLnTx7Ho4iRypOYzA0IFmReqWl\nMHGi5V/zemHMGOMrMaSOlJm2fD6P0cA7gB14XlU/EZH7gOWqOht4DnhRRDYD/8MSNgCISDWWMz1P\nRC4GBqvqOhH5K/CBiNQBXwDXpKL+pYWlVJZVZtRHYjj0SKZJqqbGEiJerzFvGVJLSn0kqjoXmBvy\n3b1Bnw8Cl0XYtyjC908DTyevlpHxCw+/WSsdwsRMQIyN1nqdkplrqaXmbTK0PFqUsz3dOLc5GTh9\nYMDhvmD4gpQKExNpExut+TolM1LPRP0Z0oURJFGoWFNBracWgFpPLRVrKlIqSEykTWy09uuUzEi9\nbIv6a62a5KGOESRR2LWhKywcC0VVUPhRo9+T/VIYU0RsmOvUMmnNmuShjhEkEXA6Yd49f4BaBbuL\nnBFDKLu2rMHvyX4pjCkiNsx1apm0dk3yUMYIkghUVYG7zg4K4hWu/9GLlBZ2afB7Kl6KbDNFZCvm\nOrU8jCbZejGCJAINH3o7ZRd3ifK7eSkMhqYwmmTrRaLP/2sdlJSU6PLly+PeL9gHAo1fAOM4NLRm\nzPNtEJEVqlrSZDkjSJrGOAkNhxrmmTdA7IIkpQtbtRaqqqC21vKH1NaadUEMrZ9Y18IxqeoNYHwk\nMfGtfIbXezwAXq9ETIBnTAGGlki45zYWH6DRWgx+jCBpAuc2J3+fPw/kXtAcbDalpkYal8vil8oI\nOEMkIj23sTjGTTivwY8RJE1QVV2Ft8t8sI8FNyBCQYG9cbmqzL1U0QRFNgs4Q+aJ9tw2FWJtIhcN\nfmLykYjICSKS7/vsEJFbRaR9aquWHTiKHOQXrUTOux1sXlRtYVNyZ2pZ3KbSjpt13w3RSOS5bQ1L\nxBqSQ6wayUygREROxFp18A3gFeD8VFUsW/Cnkx+/tZb3JBevR8JqHJmKkQ8WFAcPQkVFw3ObUaMh\nGok+t2ZiqAFiDP8VkZWq2kdE/h/goKpOFpFVqto79VVMnETDfyF7TUROp9UBuFzWdn4+LFjQsG7G\nR2IwGJpDrOG/sWokdSJyBdZqhkN93+U2t3Itkk5Ohj+2CaoHUHZxl6zpkEtL4dprYcoUUAW3O7y2\nlC31NRgMrY9Y55GMAEqB/1dVt4hIV+DF1FUre3A64aY/fYHjgXE8s/taph/VDTplV9B8WRm0aZN+\n/4zBYDBAjBqJqq4DbgUQkR8BR6rqw6msWDbgN2cdrO2E2ubC8EG4Oi+jqroqq5beTbZ/pilTmDGV\nGQyGYGISJCJSBfzaV34F8LWIfKiqd6SwbhmnqgpqXYp67aC5UO0gp2gVBTUXMmFCdnWkyTJfNeUL\nSqavyAgkg6F1EKuPpJ2qfici1wMVqvoXEfk4lRXLBhwOsOe48XoBex0UVXGGdwxjrizOOqd7smhq\nPkyy5stka/CCwWCIn1h9JDkicizwW+CtFNYnqygthSdmbMA+6K8wfBAUfsSHC3OpdWnEeRktPfdQ\nU/MKkjVfprXOb2np9z8SrbVdhuQQq0ZyH/AO8KGqLhOR44FNqatW9lB+cTGrcv7JlBVLUECLFmDP\ncSPkNupIW8Mouyl/S7L8Ma1xfktruP/haK3tMiSPWJ3t/wb+HbT9OTAsVZXKNspOLWP6mum4PC7y\nilYyccYGatYXN+pIW0vuoab8LYn6Y/y+kYkToaamaYHUUnwpreX+h9Ja22VIHrE62zsBk4F+vq8W\nArep6vZUVSyb8M9ur6quwlHkoLSwGC5uXK41jrKTTbyj25Y0Gm6t97+1tsuQPGL1kUwDZgMdfX9v\n+r6LiogMEZGNIrJZRMaG+T1fRF71/b5ERIp83xeIyAIR2SciT4TskyciU0XkUxHZICJp0YxKC0sZ\n139c1LBfk3uoaeL1jbQkX0prvf+ttV2G5BGrj+QYVQ0WHC+IyJhoO4iIHXgS+CWwHVgmIrN9c1L8\nXAd8o6onisjlwMPA74CDwD1AT99fMH8GvlbVn4uIDfhxjG1IC2YWeXTiHd22tNFwa73/rbVdhuQQ\nqyCpEZGrgX/5tq8AaprY53Rgs8+fgojMAC4CggXJRcB43+fXgSdERFT1B2CRL0lkKNcCJwOoqhfY\nE2MbDFnC8OHW/7KypjunTCXDNBgMsROrILkWy0fyOKDAYuCaJvY5DtgWtL0dOCNSGVV1i8heoIAI\nwiEodf39IuIAPgNGq+pXYcqWA+UAnTt3bqKqseHc5qTirezLt9VSCPV3lJXFtp8ZDRsM2U1MPhJV\n/UJVf62qx6jqT1T1YjITtZUDdAIWq2ofwAk8Gq6gqk5V1RJVLTnmmGMSPrFzmxPHA+N4+tZLefpv\nxzHwHI+JqY+TdPo74pn30FrmSLSWdhhaHomskHgHMDHK718ChUHbnXzfhSuzXURygHZEN5nVAPuB\n/8+3/W8sP0tKcTph/Au11K28HDx5oDm4XB4TBhkn6fJ3xBPplamosGSHNLek6DZD6yMRQdJ44fKG\nLAN+5ssU/CVwOXBlSJnZWKnpncClwHyNskCKqqqIvAk4gPnAIBr6XJKO/wWtdQ1AORNsbvAqeXm2\nQEfYUuY5ZJpY/R2JXs945j1rF0QPAAAgAElEQVRkYo5EKjp9M9fDkEkSESRRV8Ty+TxGY82ItwPP\nq+onInIfsFxVZwPPAS+KyGbgf1jCBgARqQaOAvJE5GJgsC/i60++fSYCu7FS3KcM/wvq9Qg2ez4l\nF66iz0kFAR+JGQnGR1P+jmRcz3g0n0xEhaWi009lO8xAydAUUQWJiHxPeIEhwGFNHVxV5wJzQ767\nN+jzQeCyCPsWRfj+C+Dsps6dLBq+oMLEP/VplbPZs4VkXM94Ir0yERWWik4/Ve0wAyVDLEQVJKp6\nZLoqkq2EvqBr86Yy/sWZDOs+jPK+5S1unkO2k6zrGU+kV7qjwuLp9OPRBlLRDjNQMsRCTGu2t3SS\nsWY7wNQVU7nxrRsD23f2u5P2+e0pqLkwbO4tQ/PIlCkl20w42aANZEMdDJkj2Wu2G4CZ62Y22P7b\nh3/DJjby7PdTWVaZVasmtmQyMW8kGzvMbNAGWuuE0GwYNGRDHZKFESRxMKz7MN79/N3AtqJ41IPL\n40rq8rut6QFrDplofzZ02qFki9m0tU0IzYZBQzbUIZkYQRIH5X3L+ceSf7Bud8OI4zx7Ho4iR1LO\n0doesHjJVPuzpdMOprVqA5kmGwYN2VCHZGIESZzcdsZt9X6SbWfS/YdR3Pa7XlZq+SQQafb3odKZ\nZOoFy9ZOu7VpA9lAooOGZGjM2ThwSQQjSOKkvG85AM+9sY5VLz3KRncOY96A4iSNnEMfsIKC1qeh\nRHsRk/mCxfvCt4RO+1A3eyaDRAYNydKYs3Xg0lyMIGkG5X3LWfXaFyyrs6He5I6cQx+wbFCBk9l5\nNfUiBre/oKBeI4v3vK3RRNga25Qp4h00+N+BrVuT9z62hIFLrBhB0gyc25w8/+041DYXNJecXBsO\nhz1pxw99wNKlAocTGMnuvGIRjP7taOdtSrhlgwBONq2xTS2B4HfAboccX6/ZGkxSycIIkmZQVV2F\n57hFMHwQUn0OI4adRGlpjDnR4yRdKnAkgZHszitW01W088Yi3FqbDRpaZ5taAsHPIsANN0Dnzq3D\nJJUsjCBpBo4iB3n2PFydl5HXdQ1lF1am9HzBGkqqbOSROu5kd16xCsZo541Vq2lNNmhonW1qCYQ+\ni7EsyHaoYWa2NxPnNidV1VU4ihxpm4iYSht5tGNn20zz1ugrME705JCq63io3h8zsz3FlBaWpn0m\neypt5Nk42o3kjMzGuiZCaxSMmSCV17E1OcZTgREkCZBurSTVNvJwL0u2dnKZfrGTOUI1TvTkYK5j\n5jCCpJk4tzkZVDEIl8dFnj0vLbm2MjESPxRfzlAhEW47mcI1GQOEQ9X0EtxuE4yQOYwgaSZV1VW4\nPK6U5NqKRrpH4ofayxkqJCZOhDFjGgqNZAvXRAcI2ao1pppw7W5NJs+WhBEkzSQQueXTSGLJtdUS\nR42tzR/RFKFCYubMxkIjVQtTNffaHopaI4Rv97hxh0bbsw0jSJpJaWEplWWVVKypiKn81KkwerT1\n0Ofnt6xRY6b9EekkVEgMGwYLFzYUGtkmXLNda0zVACrb230oYcJ/EyBWP4nTCWefDW63tW2zwQMP\nWKOnaCGu2dJRHWo05SPJRtJdx1jPl2qzW7La3RLusZ901tWE/6aBUD9JxVubqPq2tNENrqoCr7d+\n2263HoJIL9mhavPOFkI1sJagkaWzjvE8n6k2uyWj3S3pfcvWutoyXYGWjN9PYhc79i/PYtodV3HP\nPdaNdjqDyjksc5bNZuXpeeKJyOlHIPL3BkO6cDphwoSGz7GfeJ5Pv/nJbs9e81M87Yl2XdJBtvYN\nRiNJgNLCUiYOmcjMdTM5/Ov7ebPOHnbkFcmmHsnG21Jtvy3JPBALra09sdLUqDee5zPb/EnhiLU9\n2aANZGvfYARJAji3ORnz9hhcHhf2ulpycisBOzm5Hra2fxnntp8FfCbhVPBIL1kmX77mdp7Z8JIF\n1yXRa5dN7Uk30TTlggKoqbHComtqYrvG2W4ajPV9y4bouGwVzEaQJECwj4TjFnHD31+G6gE8/+1w\nntm9iOkVTU9UjJYGJN0PSSKdZza8ZBDd7xTPy5fO9qRT84nlXJEWV6uttXx9NlvLizxsiljet2zR\nBrJRMKdUkIjIEOAfgB14VlUfCvk9H6gA+gI1wO9UtVpECoDXgdOAF1R1dJhjzwaOV9WeqWxDNILn\nkthtdnb96D+s3vw2rs9+AZ5aXJ2XNZiomOwOI9Hjhe6fSOfZ3Jcs2dck0mg6XgGZrk4jnZpPrOcK\nHfX6r6k/YMSb5MXcotU3m0be2aoNZAWqmpI/LOHxGXA8kAesAbqHlBkFPO37fDnwqu/zEcBZwEjg\niTDH/g3wCvDfWOrSt29fTRWLty7WkW+O1Nz7cpXrzlRyflCkTsn5QXNvOFsXb11slVusethhqna7\n9X/x4maeb7Hqgw+qTpmS2PHC1SfROvrrFut+ybomTR3zwQetbbD+P/hg7MeKpz3Nobl1S+e5/NfU\nZrP2tdmSd7+aOmcynw1D/ADLNYY+NpUayenAZlX9HEBEZgAXAeuCylwEjPd9fh14QkREVX8AFonI\niaEHFZG2wB1AOfBa6qofG6WFpVSsqaDOWwfVDvDkgeaAR+lde3tAG0mGqSR4RAnWyFC1eccLrU9F\nhbVYTzy271DiVblTYT6KNGpsjnaRDhNCOs0lzT1X8DX1+0hSPSLPFlOpITZSKUiOA7YFbW8HzohU\nRlXdIrIXKAD2RDnu/cBjwP5oJxeRcixhQ+fOneOqeDw4tzl5dtWz1kZRFdhd4FGw13HdJScEMgQX\ndLuQvLzihDqM4JcrmJyc+I8X3KnY7TBtmjVhMl2OZafTWv86FcuWhpsHki6TRLyTGdNZt6bOFa2u\nLT3HW7aZyVobLcrZLiK9gBNU9XYRKYpWVlWnAlPBmtmeqjo98upC3O//0RIihR/R4ear6fxNGddd\ncgLFffcFzXy/n4mvLKFmfXHgpZgwIb4H2/9yHTxoaSIAIjBiRPO0B3+nsnUrPPNM+kZ/oWtg33BD\n06vOJdoRpKMjjCXhY6YDKyKdK9ui1JIpYDPVtkNKeMVi/2rOH1AKvBO0PQ4YF1LmHaDU9zkHSxOR\noN+vIchHAtwE7ACqsTQcF1DVVF1S5SNZvFjVluNScCv2A8p1Z+rIN0cGfn/wgwfV/le7Mh61/9Wu\nD37wYGC/5tp/p0ypt3ODan5+bPtHs/en2x4dr60+lfWbMkV18GDrfyIsXmwdx+9HsNut7XT5P+Kp\nZ7jnIJ2+mnSTibbF+symww+XCGSBj2QZ8DMR6Qp8ieVMvzKkzGxgOOAELgXm+yofFlV9CngKwKeR\nvKWqjmRXPFYqKsDrzgEEPDZkzTVw8Wqc25yUFpZGzBCciP23pqb+c6zaSFMjsnRHo0QzW4QbxaXK\nXj51Ktx4o/X53Xet/+Xl8R/Hf32Dw2Pz8sInfMwk0Z6DbAltTQWpals0jSOWZzYbl7duNrFIm+b+\nAecDn2JFb/3Z9919wK99n9sA/wY2A0uxwnn9+1YD/wP2YWkfoRFfRWQ4amvkyHrNALxqO22K2v9q\n17zyATryzmorEmrrYn3wgwcD0VuqiY2wg/fNy1O9+GKrHtFGPKEj5XSPNsONuiJ9Fy6SbORIS/NK\ntkYyeHDw/bO2m0PwiNdms47jr2M2jTibGplnU12TTbLb1tQ7HMs7Hul+ZFPEGjFqJCkVJNnyl0rT\nVn6+qoiqPbdObdf3C4QAi82TMpXW37nm5mpUE1cmwjbD1TXWlyL0xRo5sqHQbEpgxns9p0xpKEia\na97Kphc/Gi2lnpkinmcoFnNZU8eLdD+yycwYqyBpUc72bKO0FBYs8IVFdlvPmE9WcnDB7agnD1Ub\nB2rdPPLySv5TenrYfZursvonD/rT0kN49Tl4IpnNBueeC+PHp1dVjscsFWqCgIZRap07J9dR7Ddj\nzZxpmaGaY9aCzE9Ui9UMkux6tjjzSxTifYZiMZc19Y5Huh8t0swYi7Rp6X+pnJDoZ/Fi1ZF3VuvF\nf5yjOfm1irisyYnXnalTlifoyY1wvry82DSSTI5A461D8Cgu1n3TMYLLVrNPqu9xpHZnw7OVTJrz\nDMUbqBHPM5QtzxvGtJU+QRL6UnUb8bgyaKxl5hqPDq5opvE9+BwRfC0jRybf5JNsEjXjNbVvOjrT\nbI3ASaUQjdbubDK/JIPmDHhSWT7euqfquYtVkBjTVhIINd+cdPgvWN+/fu5lr2N7AQQmJzqKHFET\nOYYSaSXGWMxjoWUyYY5I1IwXSxtTaVpKNAInVppzb1JpBonW7mw3v8R7LeN9huKNJIyUAy7RZzZb\n5v8YQZIEQl+qO686nZ//cCePLn4UVWXiRxP5dM+nzNs8D7fX3WhZ3qYe+tCVGIMTQcZDph+6VAqx\neIRVvPWIpdNMNEQ5lnsTrt6pFKLR2p0pv1As9665z3k8z1C8gjRSRuXgyavNST2TLalkjCBJAuFe\nqqqF7REEL15cHhezNs4KlHd5XFSsqbBSp9RcyK1XdMflEvLylEn/WkdNwVsUHF5Azf4aHEWOiPNR\n4tVwMvHQ+V/8goLYZnmnmuZ0MtE6zeD2JTJCb+reRKt3qmbGNyUs0p02JdZ7l47nPF5BGlo+uI61\ntTB6tBUUE++7kS2aoREkSSB4lEQnJxMWVlFweAF59jwOug+iNJxjqao8t+o5vOpFF+7DW/tXUDu1\ntXWMevI19KwH8eLFJjby7flUllVSWVbZQGhEMneF1if4gUz3Qxf84otYL0q6UpBHormdTLhOM1xK\nlFhHlaH3qKl7k6mRZ7qFRTRivQb+a1lbaz13BQWpqU+81ya0vP9+i1htas67kemIQT9GkCRIcGeS\nk+tBy8bhOW4RefY8Jg6ZyKqdq5i2ehp1njq8WAs6ePHi9S/u0GU+2P8cSPTo6fIe+MupN2DKGtd/\nXAOtI5K5a+pUa3Tj8TRefCjdD11FRX1eMJvNyqslktmRUzKFaWjHVlMD48Y1vV+kkXW0exOp3q0p\nBLcpYr13paWWUPe/B2PGQHFxdpnfgu93qLYe7zOZDcLeCJIECe5MvAp81g/t+D4uj4tVS9vQ+dun\nuO24XzFtTzm79+9ufIDCj2D4ICsFfVGVte3Dhq2BKSuYcOauqbPWctOo7ng9NkCorW08uknXQ+d0\nwvPP1yeXzM2FSZPSk4I8GskUps0VSpFG1tHuTbh6Z9rnlW6aundTp9bPCaqpSVwDbo6QjueeBN/v\n4uKWPSAwgiRBgjuTnFzQEz7EI3bsX57FtAlXUVeneGUwDD8BCsMIEkAKl0DhkkYmMBFh4pCJYf0f\npYWlDcxdADf/8994PfcCAih2u2Rs5F9VVT+R0J8TrLkT/pJFcMcQi+bQFM0VSk0JoEgdWKigyRZH\nazqJJGxD86bdeWdimmdzhXQyTafR6pZtQscIkgRp2JnYodMEqqqr2PrWlTxTZ8frASTX0jiCtA0/\ngnBax9NYtWuVtThWEB71sGrnqsjnLiwNCJkJCyfg7TIfcsaCG2x24Ykn7IGRa6QHr7khyU0R2lmW\nlSXt0M2iuVFRTdEcDa8p532sHVi2OFqzgZkzG26vXp2Y5hlJIIR7RoK/S/U9yVYt1AiSJNCwM7E6\n96nrLb+AquK11VlmqzDk2HLoc2wfVuxckVAdHEUO8ovup/aawdi+OIcnR11G+cXFUR3CdIrssI+H\ncMIoW5yAfhKJigomWaPBSAIotJ4VFdGjprLhGmfDCHnYsPoMzv7tRMy4Doe16JrXW79wXOg6Otde\nC717N45GzPScpkxgBEkKcDqth8vtUUS8DL5yI9v3jUIPdGfouUczeclkDm7pjXzh4PYrT+PiU3/K\n9DXTcXlc2G12VDUw36Ts1NiG8qGmrtLCYiB6mOGvbv6Gg6tuR4vm4+q8rFnzU6JFj2WDE9BPMqKi\n0jEaDK5nLCtXZvoaZ8sIOVl504Lx+/f8/4OfEY8Hpkyx7lGoL2bcuNRdg2zVQo0gSQFVVVDrUtQr\nKMq7L/XAJr3Iz/s9JxwPv1o/hjdfOwavx8bjCz2cMGMDE3ssYea8GoadV0Bx331JMzcFP3jBYYa1\ntfDmxCGoZzDY/4z92vPDOvWbbGuE6DE/8Y5Ww2k3yRjxNjcqKphUjgaD25iplSubQzaNkMvLGwqQ\nRJ4bv49P1frvP05eXn0koqr1LoVGI0Y1JQfNO2pO4Em2aKGNiCWPSkv/S0fSxmAWL1bNzXdZiRtt\ntYrUBdK45+ZaaeetGC9VxKX206Zqfht32Dw8oTm2wuXc8n9/2AOHqf2vdj3sgcMa5eR68EHVO++s\nP7/N5q+Hqtjc1vopEY4dta1NnDdSfqGwucOCjuVf02XKlOTkKIqlbc1N+50okY47ZYp1vzKR/j9W\nknlNkpkzKtF6Rdo/3Po4U6bElmg0G5Z1iBdMrq3MUVoKT8zYwM3//DeeNl+hbz+OzWvHJhIY5Vh4\nwF6HVz24XIKGhCqGmo1uOeMWHnc+jkc9gYmK/lF7JM3Auc1JRU0Fu3K7Mm/SH3C77YHRFFh+nPx8\nO71Lv2uWv6SxSc2nRWxzMv6FWmpdA/B6JGq7/OcKtGHraXimz2WKtw12W+JhnFNnreXmf87D22U+\n+UX3R2xbqJnIrx0V1FxIzfriBtpCMkeD4Ub1YJlHPR7rHk2cmEWjzyAijZDj1kSTbCJLVFOK1C7/\nM1JWFr59EyZEPq+/Tv4pZJmenJtMjCBJEeUXFwdMVAXXfkbN+uLAxKNal4K4ofc0OLWCHFsO8t8b\ncNc1NKsEC4dady2PLn4Ur1pPYa2ntoEZKdy8Euc2J2ff/yfcn/eDvUdCrRIcYWyzQclZ39Dn8jdZ\nleOMKIiqqqsapGwJ7YSDo8egXlDUevvgtb2LjcPIy5Ow7Qo+l78NB6vPCazp4iWxiYxOJ4y+/GTc\nrnvBPpbaawbH5AsKtKG6D97pt2HzKvl5QmVlckKH/XXzmzlCzWrBnY5IwyWWYz1uukwfjQRwM4RC\nsk1kyfAlNDWvJ9xvoectKLCEi8Nhfbb5Bkb+SbrZ5OdIBCNIUkiDDvZi35c/WWvN9+gyn5wuyzj/\nxPPp0LYDvc9bx6p3itm1bxcVq9+FTj+joOZCZNEBbF3mY+u8BI/XEzi2IGzduxXnNidgdc4Th0xs\n0Nnf9FQF7mlvgycPbG7rzwuoHZtNyM3zsPrkS1ixexH2Gjs5thzwUj/BccVURs8djdvrRlFsYiPH\nlsO1va6l7NQyS9CEW2O9usrqgLf0R4bczrnHXsH4axyB30OFXsHhBUxYOAFHkYPKskoqfryJaR8K\ndS7rZbv9dmjfvv6F87+Yscb1e9w5oAIexfbFOTH5gvzCzrulP7jz8Ko1wXP8+OQsDhZLepXmdITZ\n4PyONXAh+LkJDTLYutUqkyqfWKqINGPdPxjyeKzPd9xR/0y3dG0EjCBJOzUFb1m5tNSD22NjzqY5\neNWLbP8E7/T/w1t3NPzrt0yd/Db2z87A6+mJPecebn9qDpN3XEmtuxYARZm6cirPr34e3Xom7s/7\nYes6l3+O/H1AeO365GRLiGiO5ZLp+yzSbhu2I76hb/tf0bH4U97cvwiPelCPUtKxhD7H9glEio2a\nMwqP1gsvf8qWKSum8Pzq5zm/zf3Mu+cPuOvsDTqtgpoL8U6/Ddx5qN3F4ROegE75gC+aK8gcVnB4\nAWPeHtPAzPXUTWX0tlsRZm6P8vg/3DwxYwNQHHcn6XBAfp5Q61LsOfDEqMsCEW1R9/MJu9quC/Hm\nuBCPHa9XeO89WLgw9g46knYQ2tmGplcJ7ZD85q5kjewT1VpC88sFmzZjmXAZKUVMRYUVqfbMMzB9\nemKCMFMRbf7zBpu5/OYsVUugtG+fPM02GzCCJM0Ej8ZFBI96LHPV5/2gzm51+h473vW/xotgzVLP\nof2ui6ksq+SRDx/hjY1voD4nh6u6L0yfB548PHYXo/gVxfcWW8KkqArsPQN5vDi1Ai38CA+wjCnk\nHswlx5aDehQvXpbtWMbKXSvpfWxvVu1c1UCIBKOoldH47W8D5rLgTqtmfTE2r+L1aQFvvL2XuQcd\nAe3Lr82UFpZy01s3BRJbBpu5amrA47Ui3+pcMOrJV7mhz1G4XF0izwWJOp9FcDhyKS1tWohAiLC7\n4DNmPlXMe+/FZ9dupHW8spaagresjM6O0piWaoXYNQznNidb228iJ/cqwB7xuOE68lBhEGu7QvPL\nVZZVUlpaGjadS0VF/TEipYipqrLCnbMhCixRQrUskfpQ7tZgzgrGCJI0E240ftB9EC2qArsL3ALY\nfH8KePDaaino9hlrv17L7E9nN0ilItUO1K91eBTPlv6MrxrPsO7DmHvwHhj+n7B5vBSlzlPHaR1P\nA2DZjmUo1vyVUXNG8fOCnzeod1G7Iqr3VjdsTNECq85eyMkVCgrs3PSnL9jl/YTc3F/hUlBbnTVP\nJSiV/rTV01gwfAGApVH52pNjywmYnRwOEHsdeMWXzLKSXcccRV7enWE732AHvt1mb2B+a+7INNg0\nWfxTSxOJx9QUrB3UupSb//lv9KwHAx1uZWVpk07qmDWM4PaXPc8N7adTdnGX2CY9zvqC6Uc1HWjh\nr9vWreHzyx10H6RiTUWja+50Wu1xuazt3Fxrkh8Ehcz6fXHdLiQvr7jJ6xxPRoZMTZgMNa9BFobt\nJgkjSDJAqHN61JxReHzJG2XNNciaa1GPDZU66D0NW6+XWZVTzLNznw0428Hyk9i6LsRjd9VrHUUL\n+L/Pl/DelvcsraXwo7CpWcASJst3LifHloNNbAENxKMe1u9ZHziH3WanV4de7Ny3k1pPbf0B/HWu\nHoin7beMvnUSda7jwH4MORfcwkWdrmFu3Z24OjY8v1/zAAJ+H0EY0WtEvYPfXUW/u/fywftSLwQ7\ndWD4YxVQPSDQSfo7lK17twYc+B6PJ2B+u7bXtRzV5ihW71zNsO7DKO9b3ug6xNQpdXIy/LFNUD2A\n3qXfUeV+C7Y5GgUZBAcmBHeKthw3ni7zrQi9QEbn0rBOassMZ5nzHI5iKyW6S7HluCnoZpn4Qnnk\n1YUcWDDGEu6dF9F54CuUloa3nYSanih6H9fu6Aunhc7q9guCnFzwHL8QN9bzNG31tIAA91NVBXVB\n2X/cbisvVufO4TIs3M/EV5YEouSiCc1ady02m40nz38y7H0NrXckjS5hM1+U5yd0ENPaBIgfI0jS\nQLQHtWZ/fTiOFC7hopMvoUNJLrv27WLu4ZfjOW4RdpudD774BrfXXV8Wodsx3dggiwPZg4/pvo49\nBUvQbWegPi3E1nkpObYcPF5PI1OVIHjVi9vj5qSjT+LTmk+tNVJ8GoINGyUdS1jz1Rre/PRNRARB\nGq+vguL+8hSoE1C7pRnta8/pv6vkvMOvZORbHzTUohCWfrkUoIGDv+zUskaaRe4AS0vKseUyZ9Mc\n3N43sLe1Q831zJr2Kx57ZQWeLpXkdrEEotdj1V+3nYGr2sHT1VUBQfru51YODX+n49zm5JEPH+HN\nT99EUfLt+dxyxi2NhE6DOh1pR/4rjVa6DA1MkO2/wP7FAe6493C+++IEdu2rYa4tB4/Y66Pqwmgf\ntS7F6xG8XisJ5wfP7WPiK22tUPIu87nlv8tYlRMU7LDNySOvLmTWuNGWP8zuwjbivEDUXnBSz0Bn\nF2J6otPPmF4RZuG0oPpVzPqCg7WFqNcGwA03+AWBnYqaHkxZsSig0YYKIofD0kJcLp/mmauUldkC\n78KEhQ2j+GoK3mLcuIbCMlhIz1w3M2AO9Xq9jJ47muKfFIcdBETT6PzmtqayB4TDuc1JxZoKdu3b\nFXHl03D7JGuicVPHSrcWllJBIiJDgH8AduBZVX0o5Pd8oALoC9QAv1PVahEpAF4HTgNeUNXRvvKH\nA/8GTgA8wJuqOjaVbUiUsPZogpK8BflM7F+exbyKP/jCgDsw+ZXJrMr5J8+teo51e9YFjmnDht1m\nZ8PuDdYaJz6towYbbC+F6f8X6FRO+sMtPHfz9az9em0D57kgAQHjxcvGmo3k2HIYeuLQBi+GPw+Y\nRz2IhgiRbWfC9Epw54EoNpvilTqw16FFCyg4fAQ1+2saCR8RCZi5cm253NDnhoCDf3zV+EAnoV6l\nvE85ndt1ZumXSwP7uL1unp61BqY/Bp4LwT6OuuGDuPjcDnRo24HnZq+jzuc3wu6yBK1PmMxcN5Py\nvuU4tzlxTHfg8rgC9TrgPsAjHz4CNBQ6wVFonqL3kcKPUDQQgg1w89yb6wX9tjPR6f+H25PHY1WQ\nYwO3uwM5uZUMvf8xOpy8hbUr2jLmyobPhcMB9hy35Zi11+HtMp+q6sOggECAhssDT694mmdXPcsd\npXcweclkSxMJMm8e97+rqFhTwbTV03B73dhtdoR64TdxyERq3JbGVFVVjMPReC5QqB/E86uHUduj\noLnY7DbAXt9JbSsLpPgJt+xBaSlMfnUtox78CK96kD4zoNME/MEXkVYADbxDQRqIF2+j58mjnogh\n3eEc/1NnreW5F+pYNa8X7jpbYE5VrUsZ8/Aq+pxUUK/xhkvSGObZASJqc8FtSDSvXYPrUd0H2xcH\neHJUW8ovrhe8mYjcS5kgERE78CTwS2A7sExEZqvquqBi1wHfqOqJInI58DDwO+AgcA/Q0/cXzKOq\nukBE8oBKETlPVeelqh2J0sgeXWFFo9SHfZYy/Lv1UPQ+/DCAZ+rsgbLzXj+a5fuLqftx3wbmqZKO\nJazYuSKwUJYfL15ky4AGncrGFccCVoe4aucqnl7xNGBpEV71UtKxhOU7l+NVLx6vhw5tOzCi1wh2\n7dtlhSUf27te0NnsDTWbaoclRMgBVbxeD/R9Fk6tQAqX8NxKLx2P7EiuPZc6Tx02m40Lf34hb2x4\nI1Bnt9cN20qpWNiR578dTl3Heu3Fq152/bCLslPL2Lp3a8MLW92wnVQ72PH9fO7sdycs7MgUbz7q\n046CMy/3OraXNVmyavf30Y0AACAASURBVHyjjiC0g5q5biZsK+XFqb/A++7t4M0Buwv1CSaveik4\nvICq6qr6hcr818VXN4/bixdFVVBsvPXuPnT/M8iiY/C6euL1WBFl4194n/F35/PEjLaMevI1n5a1\nEkfR3wAarbbp9rp5dPGj1vn8viqfeXPrjyqYsmJxoKyn+nSfn2wBtYVLGT13NJ6tp+N94TbE6yEv\nDxbML2Vc/4bmqAZ+kH0/sgTymuF4Vt/AlCnWSH7BAiwNJ8ykVAgyPeZshQufQdWDR+z1S037zIDB\noetAIBw8eKKqF68lpNdYgw7p9RJS+BH59vwGIeTB5w/1U6z9ai03/vYEqMvHCmTx3XtRvNSy9K3u\nLJ2dw7RJHib9w94gIeMt937G6i3bOPxnS6nzNMzULUjEtYOg6VRCkYgaXv/Cu3g9eYz+QCle0Hji\nYzoDFlKpkZwObFbVzwFEZAZwERAsSC4Cxvs+vw48ISKiqj8Ai0TkxOADqup+YIHvs0tEVgKdUtiG\nZhNpshk0TqLo8XTBZivjjjvqy9rsHmbN+BF4y8F+TWBUnW/P57o+17F63mo8Hk/jE3d9H8mpQ93q\nG9VWUrGmhtLCUspOLePZVc8GRs5etTr6/K/zA4IieDVHQci15zL5vMmBl3zt12utjkg92I7/EPcC\nr6+nqX8pqXagwFKszjvXlkt533LKTi1j7ddreXPjmwFhlPNlf6Y9dBW1tYC8C+ffDCXPBg41a8Ms\n5m2ax6TzJpFvz6/30RS936DzpKiKZTuWcfYLZ3PHyf+mTX6XQMjvwEH5vFdnQ1X5u/PvPLb4sUZm\nPpvYuKLnFby89uXAd8d882tuvO4EqOuOFfwg4AWpHogWfoQgzFw3k17um7B9+Ge8nd+DQqcvWs5X\nN5sHBWzkYw/yk9D5Pez2u7GRi9d2gPe8d7OwYiUTh0zEPuARvJ46lFygPkAjELHnExCqapkGC5fi\nHT4Iqgdi6/oBFDoDUX1sK4Xp7wW0M7nmV3gKF1vzYzx5qG+J54pZ26HTDirWWKFVvbuNCvh3/H4Q\nz3FLkI+vweu2zFu1tcoj//yKOzttCewHMHXFVOu6HNuLyUsmB54tvxlTtv+CZxYejbfLWyheqB5I\n7vFv8/49lsHCP9q3i51/XvDP+omqW3qjL1SCJ9+6Z2uuZ+iDj0MnJ7fOuzWieSnYTzH+xhpwd8My\nkiiIl7xcG72GrGLZjqXoiutBc3C5PMycWf+uHqxVHvlzIWgXsJ+O7ZrZaKcPA8/3db2vo7d7FFUv\nFYOjccfdlNYFjYVGpOi6rXu3ItUDGwxWgoWFw2FpkV617p3DYW90rmSTSkFyHLAtaHs7cEakMqrq\nFpG9QAGwp6mDi0h7YCiW6SyrCHVMnn8+dOhQvyaHXyPxhwP6k789/jg88YQ1p2DWR2tY+uYp9SPu\nNWV0+t/vuaesP+V9i1m1cxVTVkyx7PEIIlZHnl+0ipPu/AOrP2oXcFLv2tchMFp78vwnLRPX1tPQ\nagdzvlzM7aWvsPqj9hz+s6W8uf+u+pFftQNXURXzNs2jQ9sOVKypoOzUMt6/5v3A6HNs3mt88NRv\nQW3WhMdVIwIjd7/wc3vddG7XGYAxb49BVbGLnaEnDaXD/ok8XSvW/mqDuU/CT//bQAOr9dQy8aOJ\nnHfieYHv/tf5fyySwXi3nI10fR/t9BGKNVJ/fPtveeKVFVY2gW4beG7lt+jiP6FFC3BHCDwoObaE\nm0+7mbO7nM3MdTMZ1n0YM6f+3Kdx+TodPGBzkXP8h7h92su773/PuxWDEc+vsefcjZadg7fww4ar\nXiKU1N1Jx+JPmXNgqWW6KnSiZedw8r4b2dB2Ct5OH3LQLYx7bxx1njoUxeP1ULGmImCLn7NpTgP/\nVX5OfmAkH3DwH17GmLdXUlvdB6l2UKhnUR2kufXz3sUy+zAOhAjiXce8imP6PQEtLdf2XNA1XMet\n/12K26P1AsrH7A1vMOeFWwJr6UxdMTWgKb/7+bv1Wp4Xhv58KDvWdWH59Ifx1tnB9mfrIN4c6t53\n8cjPn6DDyVsCdfCoh1FzRrFwxEIqyyoZ/0At73rz8A9aPHU23nh7L1r0FVTfDkVV1BYuZXzVeMY7\nxoedMDvsvALefb5eyNv6VDD5rlKK+9bieGAGrtVl4FFycoVeAz9jwfudUXJQPOC1Ba6jbhnAxYOO\nCWjtq5a24dY7ugeyU4ROMA2XSsgvcId1H0axq5yB51ipknJyvVz3+CtQPaBBuHtwdB1FpWAfBx5r\nmYqCbp/hD8JYmzcV99Uvwpb+6AmLG5gRU0WLdLaLSA7wL2CSX+MJU6YcKAfo3LlzGmvXON30G29A\nmzaWIAmdaHbzzZYwAausf2Jawaxcls6tf+BZNYIdq/IZ87ZQXAllpza0Swd3KLfMuwX6Wy+jXezM\n2zyPNz99MzBaG3r4g8yabjln6/7/9s48Pqry3v/v7zmTBKyiNi6gBILU/ZeyloIIBFGKCm3uT2+1\nWoMW5cat0tam0t5abb1w9d5eqdYFXElri721LqAIylKWhCVs4oLValgUBXFFJcnM+d4/nnNmzkxm\nJpMEAsbn83rNa2bOnPOc53nOM9/v890XR/ntwxHwIkTyhuGWP0Ms1pC0i32Ss9CiJ2DrYO773RHc\nffV3mVw2mZqtNZwyZjk1DffR+MZp8FEPWHOF+bNFgcW/gtKbcXqspvCgQqo2VMXVM6JC1690pd+Q\nj026GM0DDEMp/vAyNqdUjHzlvVfinmRx6GAE4aKSi5j14cq4lBH1ojzywTXsOWIPa57OJ/bwvLT2\nkjACaeauc+5i3iXzzMGzNzL/oQaI+s/AT2lzav89vLBDDFGtG2ECL9XBi7oM837BMhmLl+Itt875\nLrWfxpLG5HVfziZqzCZAjbrx/T3vA4ZRiEgSYQ7jpCNO4rrB16UwEfN+7TF/4re/Pgcv6rLF8fyM\nBoZhfPXkjXyj8BssjS41Krq6Utxey3j70AYaPk2o+hq9RtZF7uaeyfcwdemchCqnz0xYdynE8ozE\n22cmXqggW5O+bhuCvDkC7bWEp+VpotXXo/F4qUCKNSrItzeeQNeT3ky6PLB/lEYmc5wDjhPFC65z\nG9HOO5LWqjd+FM/xHAvrFnLBodP4S+VEYtFIPL3NxLIS5k69zcRAFS+ColU89smZlHATd068iKu8\nb+G9OQw9bhm/i64idslAqBuBdtoJz06LM14tXkTXg/tQ3qecUVWj2LNoEurHVNXXw1VXe6aWic8U\nysceH/fWrNlaw7/M+pe4zW/+G/Pptr6e+vorQV0aG2Lc+9grUHwvjrsQhwKcSIzZDdcn/j/dl8P4\nM6FuBE6vpewqPJearbu54aEnWbLEgeIoDJtKVJxWlYdoKfYlI3kLKAp97+4fS3fONp85HIoxujeH\nGcBrqjot0wmqOsM/j4EDB2qm8/YFSkuNJBJontRfXIH4meoSaNRbUFCQ8DefWFYC/7uRBx7/J7t3\nHs6m+cOTkh9OnpxmhzMD/uv+N2jseikMmIEgDOg2IG4sD3SzXXdeFLIvCDHP8XNwuYzLu43anbVs\nC+1itW4E4MHMBcRi+VyzVGHWRia9ZHZHUiS43ZcT2/INWD/eMBFceONM2Dwc79KzuHbutcQ2D0Lf\n/BkUL0aLVnDf2vvodvAcOGeMkUQ8B8SjR9eD2JLGOywJvqFfY/n8eWkMKZ8F3ZeZ+UZZsmWJOe/N\nGxJjjQpsKE/LSAKPo7AHUMmA3ZRN+T3/qO3GpkOM1ACw/t3QhSE1ljqNrIjcyvWnXZ+UFw1oUv0y\ngIdHumF2O6Qb7+x+Jy0TAdj03iZ+OPeHNNQNQOuGG1VfUQ2OOMjSycQax4K6qHow4CE4dAsUL+aJ\nT1fAp34jAbMTl9VvGyk0HHM05x9z6PJ8Fz7e83Gc2VG0Ai4dCXUjkeK/k1e8BtW89OPbOhiteh6i\neahP5JPsOY7/B/E8cBspLYWPMUzU2zoo3pe/zNvKL/+rAY3lIQ5w/JNw8HboU5Vkjwokd60rJVq8\nmEfqtkGDgAr1DR43PbyEm7oXUHnBMObVj2JPdA8eyvw35rPgzQWMO3GckRS7LyOGn2C1+3LwnztH\nb0yanwfW1bJ2+1o/DswfVxR/U6GAzxQe/JT7/vY04761lLNHHsakZyfxefTz+BxRV8r2zi80UdVS\ntALvkpFI3Ugaixfx1mFm3To4RNwIUryWaI9VfpqhcpNXL0iJ5G+a3J5rWlUeoqWQVFF1rzVsGMM/\ngFEYhrEauEhVXwqdczVQoqoVvrH9/6vqd0O/XwoMDLy2/GO3ACcD/6qq6f9lKRg4cKDW1tbuhVHl\njiuvhHvvTXx3XeMyGUglYTTnqpeLF0aiZrV5njKugk7f/APTxkxrkoKEbUMYMTJKY0Piz+xQQF6e\nR+xb1xpX3nWXIpqH4yra9wFDFH1pw3GVMyf8nQXHnmm8uXxVg6LI1tNg8a/QN84wf25phDNuNIQu\ntHNsIhnUXm6YiTrk5YOMP5PoMUvjKru4TSMgdmHpx4nCyBth2NSmk7d1MDy8KK5Xx603hLAoYb/x\n1Iu374jDLSNvofCgwrgtKOJEOKzgMHZ8tiPerCPG5qJoEgF2eqxKuj4TA3FwMjIJgLKTynhq01MZ\nzxEE3To4/ZwG3nS+1NBkrkP9laKV5tlt/aZ/Tfrn44qb/Bx8VA6tpOzEMqo2VPHyzpdZsrwhQWzr\nSmHhb1LWwWLDzMEwAoC6kYwu6c/Cl14w5aIB7+H5ib70nQm1V2D2vVFk1I04w2/DEYfGzQMSY40z\nJl+1OuY6ePZ38XmQ8Wfh9lzFXefcBUDFnIombun5bn6Sp1tDrCH7hiaMYA17LuCAxMzaDPXJvfRb\neN2XJ9ZNeM7HXAefH9EkeDgVxx5yLONOGEe/bv3Y9dkuPqz/kL+9/Ddef+r8pPmWM37FvbcWZ4yx\nyQUiskZVBzZ33j6TSHybxzXAPIyS+UFVfUlEfo3Jcf8U8ADwBxF5HXgfuDC4XkTqgC5AvoiUAaMx\nG5ZfAJuAtT6R+b2qJqyzBwjKy40tpL7e2EIcJ3P+oOYir3NJQJeoWS2A0vudn1FVfqnZWR9VkuxR\nUwQTbv8T0x97FS1eiCMuAxuu5xO3jlf+8N/xJI8nnfY6b6w+kca1E3EkChFTRqUgX+jbq4iFC3+O\n4yefjLuX9lrHtUM/47cVHrHGxvjuytk8Ci/FywpIEJ3Pj/DtJBEaG6OU5d3GoDMWxA38D6x9gIbN\nA1g/M9G/uMrGMYGYaVG0wqikaicCrvmT143E7bGacSeOo/K0yrgDQdSL4ojDSztf4s8v/jkuUTTE\nGpKYCMD1p13Px3s+ZsbaGUlqLMGNz/OS5Y088tS2BGFI2vGvShuTA1B2YhknFJ7QhMidddxZLKpb\nZBwdxCFWNzIpq0HSnPpEye21DIpWEwuaSiFeculoI2ml7uxDnm5gGEjZiWW8uONFXv/g9fjx9dvX\nc+uZt8aThC6ZeX4yYQzvsju/l0w4+1RB0QqcHX14/s4yPK8M3BsM4wj3ZffRJOxULtp5JwAT+k1g\nbde1rGJU081FTM2aCtmqtGgFUc8EAJ9YeGLaeKizv3Y2XQ/uCkCXTl2Y/epsXt31Kt6Wb8Y931KJ\nvCCwbQj6yvlmDeMCUTjueTj8zaQ+xd4cFpecU+fc2XM0Eyft4pOG3jyycUVS++G+vvvpu8xYMwPH\ncTij+Iy4q3qSk4fbyLARHiVH5ZYSqK3YpzYSVX0GeCbl2I2hz3uAf81wbXGGZiXD8QMKYeK/Nyrd\nNcdskmtWCz+9/DiGFB1nrk2JpAcoH3s8M9+viHvUbOBCGhb/OCnJ4yHOUUSj4MUE183jistNEFph\nIfzwumJiDb/Cifw7dz76apOqjh/vqYozKilaybdP+hfmLhMaGmJE8oRTexWx/uEEUXHO/jFeXOXR\nyDONlVQWGwkjkKhk1bcQrxOqviqu/31xlQ2+F9WwHsOo2VZDo9eYkJT6VMH68YhXQH6+w2XnnUT5\n2KVxffW6VZ0YUvcky5z/INp9eZLnVhiC0Pvw3vx06E/ju7yX33uZJZuXxM8Zd8K4uJH3zz+dAI2p\nu+OELj9OkFJUSl/J/0o8niWAoixY8hnjCpZB8WLm7rmRWEA4PHAjSknKnDJ+FFd8py/9ul2SiCEK\nEy8P9M3hRnVTvBgnEkVjgpunRIsXNxl/14O7csIRJyT17aD8g6jZWmPWV6pLdgohT8usAO/pO/xd\nvJjoMEhmQAe/a3b3GgGJIp8fFQ9g7detH2u2+5khtg42qtUU9VAq4Y9pLCkuK4z176znrU/eSpYk\n00h+TtEqXMc1CVfXTsSbcydGWHOMzc9tgNKb6XpwN97dcGnci9LptSQhY6Y4PJw+PBqPp9r60VaW\nbllKkHV7QPQaVlcflOQw4nke8//+MdTdEB/rof92HrJ+PB/Vf8TSzUsZ/vBvs0b+7y18IY3tXxQE\nxL+mJjl2JLCD7E2UlEBZGbz9NkyY0HzN6rAXyZaPtnDf2vsSel4PCvIdJlx8OBtXJ/odqOWu/Nlm\n6uuPNa6HjR7rarowsSw5sjjMqPLdTlReMIzKoW4iUrrqKtbHFFRw1GHiyZNZ2/fn8T9L7NjV8WC/\nwP/e6bmQSN4v8aIOkTxB+z9K9JileHg44lDgFvCfZxoX0qTMwj1W4/7gHH5w2Ez69e7Jrl3lsA1q\nqKH0lsk0PPiMTyRGNFHpBIGbnnq4b53OmZ/NhDU9ufIvm3nnyEep+bwmfm6ek2fiWDCbBY3mmx1q\nTOGV89LbappICGex8vCVTR/Y1sHEZs7jSa8TTqQvsUuegKJqn1CPxO1dTXHebawPEWp385mU9zkn\nLpVWbajiZVGWL/XQmHGNbgwkuaIVyPizmPjVP1Je1pON+ZfxwFqPte+sJebFyHfz6dKpC7fX3B6f\nF9dxmf3qbOa9Po8F5QsoLxvCQ3fEaGiIkZ/vcN3F3+TRF+t8e5dJ5+OFCKfbaxnem2f40fJGkkY8\nE4fU54/GyB30L8Qgho2I8Z/lJrI38AKMOBFOHfg5GxiVxJSbRQoTb5JPDtIwwJFMLOtLeZ9ybrvj\nA56cPQb1ggSrviRSerPxmAQoPwPqSpHiJUaV6AsXbo9VeJeeZZh58WKWeCsY+uBtiEhCbQrEtnyD\nVTOnZFFhJo7vlt3EVn7bHFt/CdHxo7jq6asyRv7vLVhG0g5Il7ytJTU10iEpjTfJNpSSZqTZxLUm\nCK1ma43xAAsR3CCyt6QkjUqt+O/gnh/a+f0dKE8ec7rKiUUJxvrgg5gaIUBexKG8rCfl3S9gVMGo\nJr72cf/74rVMm7XJz8PkQvepGYtuxZMthtR6bOtpclnVG1Xj2Ekf0PjPoRlVOo44XH/a9cYGMOc1\nHpp6MdMbHNRTkGPBvcYkxfSloQn9JgAmmK7w5LF0KiihvsEzuvtTnsCrK4WYrz9fczl0XWt27WH1\nRt1IjjltaZL6yBEH9dVYqg6xRjG7/6Ka+I47Ji5dj9xEQcEAGhpiOBHl7qu/G0+ZH5dKx0LNWb6k\nfNifuHeNwFKzo/WKltPjjD9B91J21ZkgQUgw5aTofSDmGS+0cO6wRQvNZqHw5I088vxuNt8z0zBT\nt4HrZ8yn97n/5LG5uzjv7EJKBtxm5nW5mPQpEsM5dxL0WG0qgl54OrfXrDHSQUiyWa6rqdrwEe9s\n6sWeRT9CO+9APz+Sjb2WQNHy7AwkYByd34N3+ht3dc0Dpz6jR1+qyii/dzX9ul1E1ZzXmD3t+yEm\nouB4cSYSh/+MFBIqRqCoSxHbe66hvnt1/JjS1MW6iQps8xlIj9XEQrEkxNR4O364ucl6jhWtiCfT\n3FewjKSdEJZO2pq+ILWN8eNzj2RNf39D9KvmvAafjkjKGptOpVY+9ngeXH8Ojf8cSl7v5ZSPnZrU\nfoLxNFWpgV9syldhiMBllwX3SB8hnXysJFEkjCGwbQiL5/sMdRtM/WMy0wur9ab+0TCRoHTv7Glj\ncM+dQ9T3tkEUOu+KG5YV5c6Vd1J2Yhk9PiynsVF9oqFJkoUUraRTpBP9uvXLkHywE3Qfz6RrXmbV\nU/0wbs4RY5g952ojiXiCE1GcXktZvnU57rbT6bbre1w07hjKzjyaqiMDghtDHV9tgzHaI36usrHH\nU943IOQvs6twDjPW1DRlskOA7jXc9ug7SSqbyGVnU3hQYZNUHpOHTWbq0qnENg+CuuG+U8FqIk4k\nHgQY9gza8uFmbnz4HqKzp4Hnu3XHYP2Kw7h1egkT488Phlw5hC6dnuC//1SLFi/C7VnLD/peEc8l\n1vvw3omkpj5xjm0ZzL2zv26YQCwCuKjE0GC3jh80WrwYt8cqVE2ZhIQxPLBjBIG0Al5+E7tQHL60\ndtLbUzgk7xBKB09i0rMXGSYW9TBk1DBCzrk6N0kI2PzRZvLcPE454pS0qjYHxzCWECNzIjHuufpC\nSgaMperI13hgGUQbYziusGfz19HdRya5ewfrZF/DMpJ2xuLFbU9fkNoG5F5NL+P9tw1h5k+G0NAA\nM+9syuBmzDAG/b594bDDhpho98I5lBZPTdRpz5FJlpYm97c8JMyks+ekO5Z6v6DeQ2OjkTbuuqup\neq+0NFHqFMCLOXzjg/+g03fXs/wvg/E8l8hzd3HuiK+ZwEy/kNfiusWUlg4xebBiQoIIObDuMr5z\nwSdUXjCsSRqMddvX0YOERDDhXKidE9zfxMzI50fhXjaG/vU/9guN+W7UM+fxlteJO591KFsA91w5\nhPK+Jijt/ldvNYZ2QItW4orLtDHT4lJfkE03yE0VqP2CiO8gV9OeRT+K717FEyYcXsWuz/4UzytW\n32tpXL24aqWLhnK4/WTGfHp/fUc8oC6cn2tPfRHKNJ9gJ1RWfXsVNZHEa7bW8D/b/hXvdCPpRD2H\nHof2iD/vcFJTIJTfrYB4toGAsccUNow3asNYJ/LyYvx+1iZKBuzmtkeX8uQzP0K9SOIanFDDLsNP\nOpVTBlTQr1s/5i76kH/UduO1LvfjdV+OOC7/WDAEjeWxbl4JsUv6o8ULIfILiAmuKwz9t1l89fT3\neP/z4SzfujzurBGoqARJyrIdBJ0O7zmc195/zXig+VKX22M1d597N3Nfm8sTPBGXyE4fruwqPBQo\n5Z4ryynvC7fdtZ0n/vxV3qn1472dBr522ibeaKjBQyhwC+K2l30Fy0jaGalENFd7SXinn44Ql5fn\nlu0z0/2zMbiEa7Ex6ItAp04lLFhQwpBQpFCuTLK1ZVDTZssNSRiBRsDzTGxOSUlT77i77vIrL/oZ\nBWqXHY7jnIZ6oB540Ty67rwA98Nn8P45FLf3cn83D7+ftYmr7/5folv7waZvAy4OBQxqrIzPQ76b\nT31df9gwngc2fB8vloh0njQp0UfHgbx84ewxhzJ3zyrWeOezfo9JI+KFatanFn6Cnjx0xp1EGwCn\nHh0/Cu2xOongJuWmgiSGGM5dpcULwf0F4gmdClxjG3k3Ud3Scxt4tujX3OyMpGHxT+JMx1GXf6w5\nhtu3fZeoF2XBks+YW9AXPurBnvqjfJuHY9Q8XhRx4aIrdnDnr3s32WQ0yVMGFB5UGP+clNTUcfna\n7qt4OZZPgpkbqcLMp0OfYwZRu64znid4UYddr5TAgBre3nhCsgoKs44Tz0MYc+z3Ke3Tm6o5r8Ur\nf0byLuLc3/yW2W9+QqzR8dWxrlEvnT4lRRX8fWq29mZU1Si8Ld9ENpfyvXHH8LdPfpoUOLxu+7p4\nQs2400D0Kq665URiURc3EuPuR19l4oASSo4qYc7CXUTrTsfttZSVzmqWL0qkgqE7vNjwCnjjifsh\neRHeWHEK8P/Ie+Ey7pi1KaeqoG2BZSTtjNaUUE23009HiHMhyJmIeDYGl3AtNlBNzyhS2ygszGwL\nammxqXRzUFiYXMI0LG1EoyZBZuo9Jk40DOamm4hXPISERJOfD/1690SuWwANgixXuNSFIhMkWjJg\nt9Hr/xiijeC6EqovPoRpp67kmt+cRLQhQsy3ATU0wAMPwJ49iX6eeSbcdJNL1fquNDz7E0PUe6zm\niv5XQOGJPLRc4uk2ws9i8WKINrrgYdQxi2/CHTU1SbVUWlxqEmyGcrGFi4YV7hqLLPvcOC+k2MQW\nT02ubrlkicCweggxnbw8mF3/E2JeY9wJ4Ak/AzQSBRHy8oVzr32ers6plJf1ZPHi7sxKs8koLS6l\nIFKQyPisyqRnJ8WNw6m2NrYNYeSTxqAficCEH7j06xekI3GB/ox6LrQGT97IqKpRfJ7XFyKjjQrT\n8Rh+YS2ndDktKYV8cO6eRT+KR6mDy2evDUKL/z2ekiQo2byrsLO/yeiZeD51JqGizpyPxvL5y1Ll\n97OGxytjBpJWeZ/ypDE9dg9oFPNcY65hgGXAtiG4f1hEtF6IiUfshNkw9DYaeqymakMVMzfMZE+X\nfuBeBDFfwhIP1QjqCUJeoq19CVXt8K8BAwbogYbqatXOnVVd17xXV2c+d8oUcx6Y9ylTcr/HlCnZ\n287l/OnTzb2Dl+Nk7nPQxvTpuY8vF6SbgylTTF+CPpWVJc4B1YKCzPdNnf/p0xNjz2W+q6sT9wvP\nR/haUBVRzc9Xzctr2q/qatWCTlFFGpXIp5o/cYRWb6nO+iyCfptxeyoS04JO0Xh7wTUVsytUbhLl\nJpQJQ3RQ+ePxczp3VnVcT/MKGnT64y+kbV+cqBL5VJkwWLkJlZtE8yeO0IrKOq24e6Y6Nzmm7VE3\nKDT6Y/MUaVAZeI9W3D0zqT/Z1nv1lmodXTVanZtNm+7Nrk5ZknmRN7euw79PWTJF3Ztdfx4GK6Mm\nZ5zn+LljL1ecehUnZtbG4y9o51s6q3P5UI2c9csmc5Z07y3VGjnrl+aZoiqOp6NHN78Ow+s4PD/h\nNW4MH57ifm6exeyK+Nhkwml65PC/6ilnL9bKqa/vtf8eJuavWRq734l8e7wOREaSSqwqKjL/OVrC\ndAJMn26IVzaiyNzysgAAEElJREFU3xJMn646erTqxReb98rK7H/m1jK/TAjPQX6+ma90zKqiwhDv\nXO6bC7GORMx90l0biSQzjIqK9P2sqEjMRXBe6hyJE9WKyrqc52L06ASBCdZP+L5l39+u+RNHqHP5\nUCXvU3VcTzt3Tu5LMD+p81BdrTqo/HFDeCcMVkbdoCf/7AcJ4rulWjvf0tkwqgmDFafeJ3KqSGOc\n2KZ7XhnX+JZqzZ84QmXUz5MIfVsR9NW92dX83+RrxeyKjG0HfSDyqUKjupFY/NlXb6nWKUum5NSv\n6Y+/oHkFDSqOF3/mmdZReA04juqgQYl1FKzn8MbEzHFMKyrrksbW+ZbOSX1r6SYyEywjOcAZSSrB\nKSjIzihasjBSiZzjZCao2dpNR2Cy7Z4yjW9vMDJV80ccNMgwyHSSxN68b2WlGZ9I+naSd4raRNLI\npU9Zd+k57LrD16YSHBEj7Qz69hp1XC8twwnmL23ftlRr5IphhqhKY1zqid/fJ6zTa6dr2fVPq+PG\nFPHUzWvU6Y+/kFYyyzbGQDoTJ9b0Xm0kii1hAhWVdSpOLOcNUKa2A2YfbGpA1Y3EjKSWQvBT6YDj\nmO/BOo9EktdaXn6inZaMrTWwjOQAZySqiT9Iul1iW5BK5PLymif2wa4xG5FLJQ7N9bc1BKA5KSHp\nj5lF7RQmUK3pQzIj9nT0xEVpCUAu/VFNSHSpu9J0/cuVGaZTHYX7E+xwA+IU7IqT1DlTMq+9XIlq\ndbVZP8F5wT2yzU82RhiW0Pe2irQ5tGQjkk0iCNoKryOkUWXUzzNKDxUVTTcn4fmoqEhImpnuubdh\nGckXgJEEaA3hyKW9bKoZ1fT6/GwqokwSSTqpoDU7yGzzkMsOtyXtZUMTvbRTr87lQ9MSgIqKpoS6\nrf1Ip/YMXs2NN9yf4BkFO9rAXpMr08q135k2LuH+pLaRpNYTY29KJ6GHd+PpmFkua62l6zHX88P2\nl0x2nUDFLE4sbnPKdG4TxpNmnedyz70Jy0i+QIxENTdVRkGBr7LIYkTOtb3gnHS7xoqKxA4zVaIJ\n2g2YR+qOsS07yGy742zSU6bxtsZOE1afOY6q40ZVxk3MiVhkUvW1tB+pYw0b6vPycrA3VCfbUFJf\ngY0m29zl8lv4nGyq1ExSV3iNFRQkxhSWThwnWZUZXoeZmFSmuQzOyeW/ltOGrRmJJNxeRWVdTpJE\neC2lW+fTH39BI2f9Mu3GpiV9zxWWkXzBGElzqKjITgxyQaY/dOofMlXEdt3su9EwkRw9uvVquub+\n9M2pvVKlgoBYpduJp0Oqd1pZWcJjJxsBaI5RtEYyCqs7wow+/MrVRpWqjiwra/7+LUVrnDsyOUak\nzlcmO1guasV00l22Z9HSZ5Vqo8jKlHO0ZzS7zl1jiyr7/vZWSZAtQa6MxMaRfEmQLeq8Rw+4445E\naVCA++9PjtHIFFyYGjty3nmwdGnrAy7DJUqhaZ8nT256bWpgYhCMCCY2JPyeDanxMp99FsSONE3b\nkm0OUsfcmgDMcEqdBx9MZDAIw/OyB36OH2/e+/Uzc9LoJ7SdOzeIe2l6TXO1cTIhiM9pybVBqYXw\nvKWug8LC5JK1kHjeqom2HIdQPE+incLC5GcD2YNmcw2qbZKvrgaunEJSXEqTchEZMjSkIvzsw3FY\nQd+8mEAswpOPdGXeY6Hgzhz7vk+QC7f5or86gkTS0t11KtLtmrPtYFqyw0zdQWWSfFrq3pyrSiiT\naiUXSSHcp1SJJJNtqaXjaysCqbGsrKn9I9dddXNu0bmqivbmmMIq0kDizMmjrLqpLSGQvNJdF1bD\nNjfGXNZ9pn7m6giSq70rnXQeVgc2J8m1p0Sy34l8e7w6AiNRbRuxSrfIWkpoU49n09M3d+8wclVx\nZLtPOgKQ7fpMv2Xyrsp1LNnmbW8gl7lv6aYhnapIRLMG0rV1DKmqyFS7TnNq0vDzTjXIp7su1e6U\njpBn6lcq4c+mzs3mCJLKCDJ5U6pmdnYpKEjeMKWq/qyNxDKSfY50kkNrdPfpPLeyXRv26klniE01\nurZ0p5+NuGa6viWBi6ljaYs9pDnJLFfG3JIYk3SSYvhzOrfuXD3jWoNULy/XTZa0ggC+ysrmbSW5\nOnzkIt2ms6ekW5vZJKdsTGr06KZznM7WmY7hpAa2Ok4iMHhvBh6nwjISy0hyQkt3MC2NJVHNrjJK\ndQNtqRNBa5lhNubVlvvl6nnWWlfblpyXiyoxPz+hMktn1G+pw4RqblJdalaA4N4ivrecTxgrK40X\n3fDh2VVS6TZJqWrL5ghu6jmpjg4iydJyri7vqZuv5hhJ6n8sUNulBi5Pn5574HFrYRmJZST7BPtC\nImmLXrc1rrVhFUprmVdL7T3N9TXXcbTGpTlbG8EchBlKS55tKnK1M6W6uaaLGwkivNvK3NKprHI5\npy0bjjBSN0vBK5OtM7yGUtV24UDNJpHzbvO2l5YiV0ZivbYsWoTU7MWpHjXpUFoKBQXpvZpam1I+\n3HauafkDz7XAw8txTL/KW1iqIVvm4mzjydbXXMfR2jIE6doIshGrGk+fQYOgsrJlzzYVqZ5vjz2W\nvuxzqpcXJO47aZIZn0iiAFqAIENzS8Yd93byzPW7Ukqc1NSYbNDBugjOCTyhqqrMeUGp6ZYi9ZmF\nPRObK7MQno9w7Z5gHaua/jqOed13n/GEW7AANm4083/eec2X3m4zcuE2X/SXlUj2P9rDAN1c2+Gd\nYaBj3hf9yYb2sJHk2o994aWVq0TSXN+CXXc6W0FL+5mLSrEtUlhLxtSadlOvTbeOU1PMlJW1/Tmo\n5i6R7Hci3x4vy0gsVNuuRuuI2BcMvjkbSUuQzmuqte2kG+eBsLloKdKt49RjgwYlM5LRo1t3r1wZ\niZhzOzYGDhyotbW1+7sbFgcAWhtwZ9ExkWt56AMN6dZx+NjGjYmqpgDTp7dOvSUia1R1YLPnWUZi\nYWHxZUZH3VzMmNF2G8kBwUhEZAzwO0yB5ftV9T9Tfi8AqoABwC7gAlWtE5FC4K/AN4CHVfWa0DUD\ngIeBzsAzwHXazCAsI7GwsLBoOXJlJM4+7IAL3AWcDZwCfE9ETkk5bQLwgap+DbgduNU/vgf4JXB9\nmqbvAa4AjvdfY/Z+7y0sLCwscsU+YyTAIOB1VX1DVRuAWcB3Us75DjDT//xXYJSIiKp+qqrLMAwl\nDhHpBnRR1RW+FFLFvi9rb2FhYWGRBfuSkRwLbA193+YfS3uOqkaBj4DCZtrc1kybAIjIRBGpFZHa\nnTt3trDrFhYWFha5Yl8ykv0KVZ2hqgNVdeCRRx65v7tjYWFh0WGxLxnJW0BR6Ht3/1jac0QkAhyK\nMbpna7N7M21aWFhYWLQj9iUjWQ0cLyK9RCQfuBB4KuWcp4Dx/ufzgYXZPLBUdTvwsYgMFhEByoEn\n937XLSwsLCxyxb52/z0HmIZx/31QVf9DRH6NiZZ8SkQ6AX8A+gHvAxeq6hv+tXVAFyAf+BAYraov\ni8hAEu6/c4Frm3P/FZGdwOZ9MMRUHAG81w73ORDwZRnrl2WcYMfaEdHWcfZU1WZtA1+KgMT2gojU\n5uJz3RHwZRnrl2WcYMfaEdFe4+ywxnYLCwsLi/aBZSQWFhYWFm2CZSR7FzP2dwfaEV+WsX5Zxgl2\nrB0R7TJOayOxsLCwsGgTrERiYWFhYdEmWEZiYWFhYdEmWEbSAojIgyKyQ0ReDB37qog8JyKv+e+H\n+8dFRO4QkddF5AUR6b//et4yiEiRiCwSkZdF5CURuc4/3hHH2klEVonIBn+sN/vHe4nISn9Mj/pB\ntYhIgf/9df/34v3Z/5ZCRFwRWScic/zvHXWcdSKyUUTWi0itf6zDrV8AETlMRP4qIptE5BURGdLe\nY7WMpGV4mKZp628AFqjq8cAC/zuY9PlBqvuJmPT3XxREgZ+o6inAYOBqvwRARxxrPXCGqvYB+gJj\nRGQwpqTB7X6Jgw8wJQ8gc+mDLwquA14Jfe+o4wQYqap9Q3EUHXH9gqn59KyqngT0wTzf9h1rLvV4\n7SvxAoqBF0PfXwW6+Z+7Aa/6n6cD30t33hfthUlDc1ZHHytwELAW+CYmGjjiHx8CzPM/zwOG+J8j\n/nmyv/ue4/i6+0TlDGAOIB1xnH6f64AjUo51uPWLyU/4Zuqzae+xWomk7ThaTQ4wgHeAo/3PuaTR\nP+DhqzT6ASvpoGP11T3rgR3Ac8A/gQ/VlDaA5PG0tPTBgYRpQCXg+d8L6ZjjBFBgvoisEZGg0GxH\nXL+9gJ3AQ77K8n4R+QrtPFbLSPYi1LD4DuNPLSIHA48Bk1T14/BvHWmsqhpT1b6YHfsg4KT93KW9\nDhEZC+xQ1TX7uy/thNNVtT9GlXO1iAwP/9iB1m8E6A/co6r9gE9JqLGA9hmrZSRtx7tiKjcGFRx3\n+MdzSaN/wEJE8jBM5BFV/Zt/uEOONYCqfggswqh4DhNT2gCSx9PS0gcHCoYC3/aToc7CqLd+R8cb\nJwCq+pb/vgN4HLNB6IjrdxuwTVVX+t//imEs7TpWy0jajnAq/PEk0to/BZT7XhKDgY9CouYBDRER\n4AHgFVX9n9BPHXGsR4rIYf7nzhhb0CsYhnK+f1rqWHMufXCgQFUnq2p3VS3GlHRYqKoX08HGCSAi\nXxGRQ4LPwGjgRTrg+lXVd4CtInKif2gU8DLtPdb9bSz6Ir2APwPbgUbMTmACRm+8AHgNeB74qn+u\nAHdh9O0bgYH7u/8tGOfpGFH4BWC9/zqng47168A6f6wvAjf6x48DVgGvA/8LFPjHO/nfX/d/P25/\nj6EVYy4F5nTUcfpj2uC/XgJ+4R/vcOvX739foNZfw08Ah7f3WG2KFAsLCwuLNsGqtiwsLCws2gTL\nSCwsLCws2gTLSCwsLCws2gTLSCwsLCws2gTLSCwsLCws2gTLSCwsWgkRifnZZYPXDc1flXPbxRLK\nMm1hcSAj0vwpFhYWGfC5mtQqFhZfaliJxMJiL8OvhXGbXw9jlYh8zT9eLCIL/ToQC0Skh3/8aBF5\nXExNlA0icprflCsi94mpkzLfj7xHRH4oplbMCyIyaz8N08IiDstILCxaj84pqq0LQr99pKolwO8x\nWXcB7gRmqurXgUeAO/zjdwB/V1MTpT8mGhtMzYi7VPVU4EPgPP/4DUA/v52KfTU4C4tcYSPbLSxa\nCRHZraoHpzlehymW9Yaf/PIdVS0UkfcwtR8a/ePbVfUIEdkJdFfV+lAbxcBzagoTISI/A/JU9RYR\neRbYjUmH8YSq7t7HQ7WwyAorkVhY7Btohs8tQX3oc4yETfNcTL6k/sDqUPZeC4v9AstILCz2DS4I\nvdf4n6sxmXcBLgaW+p8XAFdCvMjWoZkaFREHKFLVRcDPMOndm0hFFhbtCbuTsbBoPTr7lRUDPKuq\ngQvw4SLyAkaq+J5/7FpMJbufYqraXeYfvw6YISITMJLHlZgs0+ngAn/0mY0Ad6ipo2Jhsd9gbSQW\nFnsZvo1koKq+t7/7YmHRHrCqLQsLCwuLNsFKJBYWFhYWbYKVSCwsLCws2gTLSCwsLCws2gTLSCws\nLCws2gTLSCwsLCws2gTLSCwsLCws2oT/A/YPU8QwDc1NAAAAAElFTkSuQmCC\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
@@ -802,7 +844,7 @@
{
"output_type": "display_data",
"data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAEWCAYAAACXGLsWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi40LCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcv7US4rQAAIABJREFUeJzsnXl8VNXZ+L/P3CQssmnUgiQQ6goY\nEYhoqmAQtWBdsNiK2gbceKvCW2zVV21VXFqs2hb3AgolVUGrPyMWECwSQAhCICCCCwiBhE2M4gZk\nMnPP74+75M5kkpksk0zC+X4++WTu/txzzz3PeZZzriil0Gg0Go2mNnzNLYBGo9FoEh+tLDQajUYT\nFa0sNBqNRhMVrSw0Go1GExWtLDQajUYTFa0sNBqNRhMVrSwSHBExROR7EenRmPs2JyJykog0es62\niFwoIiWe5U9FZHAs+9bjWi+IyL31Pb61ISJlIpLTyOd8SUQmNeY5NfUnqbkFaG2IyPeexfZABRC0\nl/9HKfVyXc6nlAoCHRp73yMBpdSpjXEeEbkJ+JVSKsdz7psa49yaxkFEXgK2KqUmNbcsrRWtLBoZ\npZTbWNs915uUUv+taX8RSVJKBZpCNo1G03AivbN1fY9b4nuv3VBNjIg8IiKvishsEfkO+JWIZIvI\nKhE5ICJ7ROQpEUm2908SESUiGfbyS/b2BSLynYgUikivuu5rbx8hIp+JyDci8rSIrBCRsTXIHYuM\n/yMiW0XkaxF5ynOsISJ/F5FyEdkGDK+lfP4gInPC1j0rIn+zf98kIh/b9/O53euv6Vyua0RE2ovI\nv2zZNgEDw/b9o4hss8+7SUQut9dnAs8Ag20X35eesp3kOf439r2Xi0i+iHSLpWwiyPyIiMyx68f3\nIrJBRE605dsvIjtF5ELP/l1EZKb9TMpE5CER8dnbThaRJSLylYh8ad9/57Dy+Z2IbLTrwGwRaVOD\nXLWey+Zs+9l8LSIvOucSkeNFZL5dd74SkWWe8/YVkaX2to0i8rMarn+TiBR4lt26LiK3AlcD99pl\n9qa9T5qIvGmX23YRua2Wcm8rIn8TkVIR2Sciz4lIW3vbhSJSIiL3isheYHqkdfa+0erBrSKyFfik\nJlkSFqWU/ovTH1ACXBi27hHAD1yGpazbAWcBZ2NZej8GPgPG2/snAQrIsJdfAr4EsoBk4FXgpXrs\nezzwHXCFve13QCUwtoZ7iUXGt4DOQAbwlXPvwHhgE5AGpALLrKoX8To/Br4HjvKc+wsgy16+zN5H\ngAuAQ8AZ9rYLgRLPucqAHPv3E0ABcDTQE9gctu8vgW72M7nWluFH9rabgIIwOV8CJtm/L7ZlPBNo\nCzwHvBdL2US4/0fse7rQPvYVYDtwt718C7DFs//b9vXaAz8C1gI32ttOAYYBKfbzXgE8EVY+q4Cu\n9nP5DMsSjiRXLOf60H7Gx9rndcrncSyFm2wfP8Ren2Lf2132tgvtcj8pQhmHPAMi1/VJnu0+YD1w\nr32dk7Dex2E13N/TwJt2/egEzAce9tSrAPBn+1ztalgXSz14x75Gu+Zun+rcnjW3AK35j5qVxXtR\njrsD+Lf9O9JL8Q/PvpcDH9Vj3xuA5Z5tAuyhBmURo4zneLb/P+AO+/cyPI0QcAk1KAt7+yrgWvv3\nCODTWvb9D3Cb/bs2ZbHT+yyAW737RjjvR8DP7N/RlMUs4M+ebZ2w4lRp0comwnUfARZ4lq8EvgF8\n9vLR9vk6AN2xFEsbz/6/Bt6t4dxXAWvCyme0Z/lvwDMxPv9I5/I+48ud54bVoP4/4MSwcwwFdgHi\nWfdv4I8RyriuyuJcYFvY9e4Dpke4Fx9wGOjpWTcYWynb9eowkOLZHmldLPVgSCzlm4h/OmbRPJR6\nF0TkNOCvWK6R9lgV64Najt/r+X2Q2oPaNe17glcOpZQSkbKaThKjjDFdC9hRi7xg9aavsf9fa/93\n5LgU66U/Geslbw+siXI+sKyGGmUQy/12O5bVgS37sTGcF6z7W+ksKKW+FZGvsRpzp0zq8sz2eX4f\nAvYrpUzPsiNfT6ANsE9EnP19WJ0URKQr8BRWw9nR3rY/7Frhch0TSaAYzxVevifYvx8FHgQWi0gQ\nqwPzuL19p7JbVs9x3SPJUEd6Aj1E5IBnnYFlXYbTFascN3jKUcL22aeU8kdZF0s9CHn3WxI6ZtE8\nhKeNTsXqyZ6klOoE3E/1ytrY7MHq8QAg1ltS20vaEBn3AOme5Wipva8BF4pIdyw32Su2jO2A14HJ\nWC6iLsCiGOXYW5MMIvJj4HksF0+qfd5PPOeNlua7myolg4h0xLIAdsUgV0MoxW7glVJd7L9OSqkz\n7O1/wcrGy7Sf2VjqX69iOVd4+e4Gq9FUSt2ulMoARgL/JyLn29vTxdNC28dFKrcfsDoGDl3Dtoc/\no1Isy6CL56+jUuqyCOfeh+UaPtWzb2ellDcmE6kOhK+LpR602Gm+tbJIDDpiuRp+EJHewP80wTX/\nAwwQkctEJAn4LXBcnGR8DZgoIt1FJBX4v9p2VkrtBd4H/onlythib2qD5R/eDwRtK2NYHWS41w4I\n98CKozh0wHqJ92PpzZuB0zzb9wFpYgf0IzAbuFFEzrCDupOxXHw1WmqNgVKqFFgKPCEinUTEJ9YY\nliH2Lh2xGtlvRCQdy3VYX2I513jPM74HK0aGXcdOtJXCN1iuGROrFx4Afi8iySJyAZaL8tUI594A\nnCEimXan4YGw7fuwYlkOhYBfRH5vB68N+9iBYcehrJTzF4ApInKcWKSJyMUxlo1Ds9SDpkIri8Tg\n98AYrIDzVCK/LI2KUmofVgbJ34By4ESgGKv32NgyPg8sBjZiuYxej+GYV7D8wq4LSil1AMtV9CZW\nkPgqLKUXCw9gWTglwAIgz3PeD7ECnKvtfU4l1MX2LrAFy93jdds4x78DPGTLtQerd3xdjHI1lF8B\nR2EF7L/G8vk7ve4HgEFYDfRc4I0GXCeWc80G/gt8DnyKFasAqzzfwwperwCeVEotV0pVYCUsXIGV\niPEUVqxqS/iJlVKb7fMV2OdeFrbLC0A/OxPrdWWlpV5iy1xin38qVhwhEr/HcoGttu9xEZarM2aa\nuR7EHQl1F2qOVETEwDKjr1JKLW9ueTQaTWKhLYsjGBEZbrtl2mAFjSuxelYajUYTglYWRzbnAduw\nfPU/Ba60XQMajUYTgnZDaTQajSYq2rLQaDQaTVRazaC8Y489VmVkZDS3GBqNRtOiWLt27ZdKqdrS\n5oFWpCwyMjIoKipqbjE0Go2mRSEi0WZUALQbSqPRaDQxoJWFRqPRaKKilYVGo9FootJqYhYajaZp\nqKyspKysjMOHDze3KJo60LZtW9LS0khOrmmKs9rRykKj0dSJsrIyOnbsSEZGBqETxmoSFaUU5eXl\nlJWV0atXr+gHRCCubih7OolP7c8M3h1h+xARWSciARG5KmxbUETW239z4ymnRqOJncOHD5OamqoV\nRQtCREhNTW2QNRg3y8KemO5Z4CKsr2itEZG59uyRDjux5sWPNN3xIaXUmfGST1OdwkIoKICcHMjO\nbm5pNImMVhQtj4Y+s3i6oQYBW5VS2wBEZA7WVMSuslBKldjbzEgn0DQdhYUwbBj4/ZCSAosXa4Wh\n0WiqiKcbqjuhnxAso26fS2wrIkUiskpERkbaQUTG2fsU7d8f/oVHTV0oKLAURTBo/S8oaG6JNJrI\nlJeXc+aZZ3LmmWfStWtXunfv7i77/eFfPo3M9ddfz6efflrrPs8++ywvv/xyY4jMeeedVy1WcOml\nl9KlS5eQdU888QTt27fnu+++c9f997//pXPnzu49nnnmmSxZsqRR5KoLiRzg7qmU2mV/8vI9Edmo\nlPrcu4NSahowDSArK0vPiNgAcnIsi8KxLHJymlsijSYyqamprF+/HoBJkybRoUMH7rgj1JOtlEIp\nhc8XuT88c+bMqNe57bbbGi6sh44dO7Jq1SrOOeccvvrqK/bt21dtn9mzZzNw4EDy8/P59a9/7a4f\nOnQo+fn5jSpPXYmnZbGL0G/yplGHbxIrpXbZ/7dhfR2rf2MKpwklO9tyPT38sHZBaRqfwtJCJi+f\nTGFpYdyusXXrVvr06cN1111H37592bNnD+PGjSMrK4u+ffvy0EMPufued955rF+/nkAgQJcuXbj7\n7rvp168f2dnZfPHFFwD88Y9/ZMqUKe7+d999N4MGDeLUU09l5cqVAPzwww+MGjWKPn36cNVVV5GV\nleUqsnBGjx7NnDlzAHj99de56qqQnB4+++wzAoEAkyZNYvbs2Y1ePg0lnspiDXCyiPQSkRRgNNbn\nGKMiIkfbH+RBRI4FzsUT69DEh+xsuOcerSg0jUthaSHD8oZx35L7GJY3LK4K45NPPuH2229n8+bN\ndO/enUcffZSioiI2bNjAu+++y+bN1ZuRb775hvPPP58NGzaQnZ3NjBkzIp5bKcXq1at5/PHHXcXz\n9NNP07VrVzZv3sx9991HcXFxjbJddNFFvPfee5imyauvvsrVV18dsn327NmMHj2anJwcPvroI778\n8kt325IlS0LcUCUlJfUonYYRN2VhfwN3PLAQ+Bh4TSm1SUQeEpHLAUTkLBEpA34BTBWRTfbhvYEi\nEdkALAEeDcui0mg0LYSCkgL8QT9BFcQf9FNQUhC3a5144olkZWW5y7Nnz2bAgAEMGDCAjz/+OKKy\naNeuHSNGjABg4MCBNTbEP//5z6vt8/777zN69GgA+vXrR9++fWuULTk5mXPOOYc5c+YQDAZJS0sL\n2T5nzhxGjx6NYRiMHDmS11+v+lT90KFDWb9+vfvXHDNsxzVmoZSaD8wPW3e/5/caLPdU+HErgcx4\nyqbRaJqGnIwcUowU/EE/KUYKORk5cbvWUUcd5f7esmULTz75JKtXr6ZLly786le/ijjOICUlxf1t\nGAaBQCDiudu0aRN1n2iMHj2aX/ziFzzyyCMh64uLi9m2bRtDhw4FoKKiglNOOYXf/OY39bpOPNBz\nQ2k0mriSnZ7N4tzFPDz0YRbnLiY7vWn8nN9++y0dO3akU6dO7Nmzh4ULFzb6Nc4991xee+01ADZu\n3BjRcvGSk5PD3XffHdEF9cgjj1BSUkJJSQm7d+9m+/btlJWVNbrM9SWRs6E0Gk0rITs9u8mUhMOA\nAQPo06cPp512Gj179uTcc89t9GtMmDCB3Nxc+vTp4/517ty5xv19Ph933nkngGudKKV49dVXWbx4\nsbufiDBy5EheffVV+vXr58YsHB544AGuvPLKRr+f2mg13+DOyspS+uNHGk38+fjjj+ndu3dzi5EQ\nBAIBAoEAbdu2ZcuWLVx88cVs2bKFpKTE7IdHenYislYplVXDIS6JeUcajUbTAvj+++8ZNmwYgUAA\npRRTp05NWEXRUFrnXWk0Gk0T0KVLF9auXdvcYjQJOsCt0Wg0mqhoZaHRaDSaqGhlodFoNJqoaGWh\n0Wg0mqhoZaHRaFoUQ4cOrTbAbsqUKdxyyy21HtehQwcAdu/eXW0SP4ecnByipeBPmTKFgwcPusuX\nXHIJBw4ciEX0Wpk0aRIiwtatW0OuJSIhMq1fvx4R4Z133gk53jCMkPmjHn300QbL5EUrC41G06K4\n5ppr3NlbHebMmcM111wT0/EnnHBCyLxLdSVcWcyfP7/adynqS2ZmZsi9/fvf/64239Ts2bM577zz\nqs1M265du5D5o+6+u9qXrBuEVhYajSbuFBbC5MnW/4Zy1VVXMW/ePPdDR870GIMHD3bHPQwYMIDM\nzEzeeuutaseXlJRw+umnA3Do0CFGjx5N7969ufLKKzl06JC73y233OJOb/7AAw8A8NRTT7F7926G\nDh3qzuOUkZHhzhD7t7/9jdNPP53TTz/dnd68pKSE3r17c/PNN9O3b18uvvjikOt4GTlypCvz559/\nTufOnTn22GPd7Uop/v3vf/PPf/6Td999t0Hf1K4rWlloNJq44nyy9777rP8NVRjHHHMMgwYNYsGC\nBYBlVfzyl79ERGjbti1vvvkm69atY8mSJfz+97+ntlkqnn/+edq3b8/HH3/Mgw8+GDJm4k9/+hNF\nRUV8+OGHLF26lA8//JD//d//5YQTTmDJkiXVvla3du1aZs6cyQcffMCqVauYPn26O2X5li1buO22\n29i0aRNdunThjTfeiChPp06dSE9P56OPPmLOnDnV5pBauXIlvXr14sQTTyQnJ4d58+a52w4dOhTi\nhnr11VfrVrBR0MpCo9HElXh8stfrivK6oJRS3HvvvZxxxhlceOGF7Nq1K+IX6RyWLVvGr371KwDO\nOOMMzjjjDHfba6+9xoABA+jfvz+bNm2KOkng+++/z5VXXslRRx1Fhw4d+PnPf87y5csB6NWrlzu3\nU23ToEPVR5Ly8/Orzf/kfPPC2c/rigp3Q4UrmoaiR3BrNJq4Eo9P9l5xxRXcfvvtrFu3joMHDzJw\n4EAAXn75Zfbv38/atWtJTk4mIyOjXq6a7du388QTT7BmzRqOPvpoxo4d2yCXjzO9OViB6JrcUGB9\nm/vOO+8kKyuLTp06ueuDwSBvvPEGb731Fn/6059QSlFeXs53331Hx44d6y1brGjLQqPRxJV4fLK3\nQ4cODB06lBtuuCEksP3NN99w/PHHk5yczJIlS9ixY0et5xkyZAivvPIKAB999BEffvghYE1vftRR\nR9G5c2f27dvnurzA+pb2d999V+1cgwcPJj8/n4MHD/LDDz/w5ptvMnjw4DrfW/v27fnLX/7CH/7w\nh5D1ixcv5owzzqC0tJSSkhJ27NjBqFGjePPNN+t8jfoQV2UhIsNF5FMR2Soi1ULzIjJERNaJSEBE\nquWyiUgnESkTkWfiKadGo4kv8fhk7zXXXMOGDRtClMV1111HUVERmZmZ5OXlcdppp9V6jltuuYXv\nv/+e3r17c//997sWSr9+/ejfvz+nnXYa1157bcj05uPGjWP48OFugNthwIABjB07lkGDBnH22Wdz\n00030b9//3rd2+jRoxkwYEDIutmzZ1dzS40aNcp1RYXHLBo7GypuU5SLiAF8BlwElGF9k/sa7+dR\nRSQD6ATcAcxVSr0edo4ngeOAr5RS42u7np6iXKNpGvQU5S2XhkxRHk/LYhCwVSm1TSnlB+YAV3h3\nUEqVKKU+BMzwg0VkIPAjYFEcZdRoNBpNDMRTWXQHSj3LZfa6qIiID/grlsVR237jRKRIRIr2799f\nb0E1Go1GUzuJGuC+FZivlKr1A7RKqWlKqSylVNZxxx3XRKJpNJrW8oXNI4mGPrN4ps7uAtI9y2n2\nuljIBgaLyK1AByBFRL5XSjVuxEaj0dSZtm3bUl5eTmpqKiLS3OJoYsBJs23btm29zxFPZbEGOFlE\nemEpidHAtbEcqJS6zvktImOBLK0oNJrEIC0tjbKyMrTrt2XRtm1b0tLS6n183JSFUiogIuOBhYAB\nzFBKbRKRh4AipdRcETkLeBM4GrhMRB5USvWt5bQajaaZSU5OplevXs0thqaJiVvqbFOjU2c1Go2m\n7iRC6qxGo9FoWglaWWhaFI051bVGo4kdPZGgpsXgTHXtTEjXWPMMaTSa6GjLQtNiiMdU1xqNJja0\nstC0GJyprg2j8aa61mg0saHdUJoWgzPVdUGBpSi0C0qjaTq0stC0KLKztZLQaJoD7YbSaDQaTVS0\nstBoNBpNVLSy0Gg0Gk1UtLLQaDQaTVS0sqgBPVJYo9FoqtDZUBHQI4U1Go0mFG1ZRECPFNZoNJpQ\ntLKIgB4prNFoNKHEVVmIyHAR+VREtopItS/dicgQEVknIgERucqzvqe9fr2IbBKR38RTznCckcIP\nP6xdUBqNRgNxjFmIiAE8C1wElAFrRGSuUmqzZ7edwFjgjrDD9wDZSqkKEekAfGQfuzte8oajRwpr\nNBpNFfEMcA8CtiqltgGIyBzgCsBVFkqpEnub6T1QKeX3LLZBu8s0LYTCQj13laZ1Ek9l0R0o9SyX\nAWfHerCIpAPzgJOAO5vSqtBo6oPOoosNrVBbJgmbOquUKgXOEJETgHwReV0ptc+7j4iMA8YB9OjR\noxmk1GiqiJRFpxvDULRCbbnE072zC0j3LKfZ6+qEbVF8BAyOsG2aUipLKZV13HHH1VtQjaYx0Fl0\n0dFp6S2XeCqLNcDJItJLRFKA0cDcWA4UkTQRaWf/Pho4D/g0bpJqNI2AzqKLjlaoLZe4uaGUUgER\nGQ8sBAxghlJqk4g8BBQppeaKyFnAm8DRwGUi8qBSqi/QG/iriChAgCeUUhvjJatG01joLLra0R+w\narmIUqq5ZWgUsrKyVFFRUXOLodFoNC0KEVmrlMqKtp9OSdVoNBpNVLSy0Gg0Gk1UtLLQaDQaTVS0\nskB/u0Kj0WiikbCD8poKPUhIo9FoonPEWxZ6kJAmnmirVdNaOOItC2eQkGNZ6EFCmsZCW62a1sQR\nryz0ICFNvNBzRWlaE0e8sgA96lYTH7TVqmlNaGWh0cQJbbVqWhNaWWg0caSlW6362xMaB60sNJoj\nkFiUgA7QJybNpcC1stBojjBiVQI6QJ94NKcCP+LHWRzJ6DEARyaxji3S355IPJpzXJi2LI5QtIvh\nyCXWLC0doE88mjPDTiuLIxTtYjhyqYsSaOkB+tZGcyrwuCoLERkOPIn1pbwXlFKPhm0fAkwBzgBG\nK6Vet9efCTwPdAKCwJ+UUq/GU9YjDT0G4MhGK4GWS3M9u7gpCxExgGeBi4AyYI2IzFVKbfbsthMY\nC9wRdvhBIFcptUVETgDWishCpdSBeMl7pKFdDA1Hp5VqjiTiaVkMArYqpbYBiMgc4ArAVRZKqRJ7\nm+k9UCn1mef3bhH5AjgO0MqiEdG9y/qjYz6aI414ZkN1B0o9y2X2ujohIoOAFODzCNvGiUiRiBTt\n37+/3oJqNHVFz1asOdJI6NRZEekG/Au4Xillhm9XSk1TSmUppbKOO+64phdQc8Si00o1RxrxdEPt\nAtI9y2n2upgQkU7APOAPSqlVjSybRtMgdMznyOVIjVXFU1msAU4WkV5YSmI0cG0sB4pICvAmkOdk\nSGk0iYaO+Rx5HMmxqri5oZRSAWA8sBD4GHhNKbVJRB4SkcsBROQsESkDfgFMFZFN9uG/BIYAY0Vk\nvf13Zrxk1bRO9Ah1TWNT31hVa6iLcR1noZSaD8wPW3e/5/caLPdU+HEvAS/FU7ZE50g1dRuLI7kH\nqIkPhYWwcyck2a1mrLGq1lIX9QjuBKS1VK7mRI9Q1zQm3nfSMODmmyE3N7Y61VrqYkJnQx2p6LTM\nhqOzlTSNifedDAahR4/YG/zWUhe1ZeEhUVw/eiqOhtNSs5USpQ5qQmnIO9lS62I4opRqbhkahays\nLFVUVFTv4xPN9aMbjSOPRKuDmlBa6zspImuVUlnR9tOWhU2i+RV1WuaRR6LVQU0oR/o7qWMWNq3F\nr6hpueg6qElktGVh01r8ipqWi66DmkRGxyw0mhZEa/Wba5oPHbPQaFoZOgAeP7QSjo5WFhpNC0EH\nwOODVsKxoQPcGk0LQQfA44MeBBsbtVoWItJJKfVtDdt6KKV2xkcsjUYTjg6Axwc9CDY2ormhCoAB\nACKyWCk1zLMt39mm0WiahiM91z8eaCUcG9GUhXh+H1PLNk0D0ME1jaZ50Uo4OtGUharhd6RlTT2o\nKbimFUhioJ+DRmMRTVkcLyK/w7IinN/Yy/qj141ATcE1nZ3R/BwpWTKJqhATVa4jlWjZUNOBjkAH\nz29n+YVoJxeR4SLyqYhsFZG7I2wfIiLrRCQgIleFbXtHRA6IyH9ivZmWSKQMF52dkRgkwnOI9xfW\nHIV4333W/0T5kluiynUkU6tloZR6sKZtInJWbceKiAE8C1wElAFrRGSuUmqzZ7edwFjgjgineBxo\nD/xPbddp6dQUXNPZGc1Pc2fJNIVlk6hjNxJVriOZOg3KE5E+wDX23wGgtiHig4CtSqlt9rFzgCsA\nV1kopUrsbWb4wUqpxSKSUxf5GkJhaSEFJQXkZOSQnd60tTI8uKazMxKD5n4OTdFg1qYQm9MNFE9F\nrd1b9SOqshCRDKoURCXQE8hyGvpa6A6UepbLgLPrI2Qtso0DxgH06NGj3ucpLC1kWN4w/EE/KUYK\ni3MXN7nCCEdnZ4TSXC94cz6HprBsalKIzR2viZeibu77aslEG5RXCHQC5gCjlFJbRGR7DIqiSVBK\nTQOmgTWRYH3PU1BSgD/oJ6iCHA4cJm9Dnru+OSwNTShH6gveVJZNJIWYCG6geCjqRLivlko0y2If\nloXwI6zspy3EnjK7C0j3LKfZ6xKO1Pap7m+FYvpbHzH9yfmojCW0yXg4ISyNI5nW+oLHYi01l2XT\n3PGaeNFa76spiBbgHikinYGfA5NE5GSgi4gMUkqtjnLuNcDJItILS0mMBq5tDKEbk8LSQia+MxFT\n2WGT0nMIzloIwRQw7qFi7MUUlBRoZdGMtMYXPNGtpeaO18SL1npfTUHUmIVS6htgJjBTRH4E/BL4\nuz03VHotxwVEZDywEDCAGUqpTSLyEFCklJprZ1S9CRwNXCYiDyql+gKIyHLgNKCDiJQBNyqlFjbs\ndqvjuKCUYzCVDLUUhUqCoMK34wJyMnIa+7KaOtAaX/CWYC211rhZotxXSwu01ykbSim1D3gaeFpE\nesaw/3xgfti6+z2/12C5pyIdO7gustWXnIwcUowU/EE/hs/gnCEmy5b6IajAqOR31wxo0VZFS6uQ\nNZEoL3hj0RqtJU3sJLplGYloAe65UY6/vBFlaRay07OZMnwKb2x+g1F9RlF+sJz3Sy/G3D4YX6/l\ndDnpZ8DI5hazXjR3hWwtiioetEZrSRM7LcGyDCeaZZGNlf46G/iAVjh5oBOz8Af9LN+5nCnDp9Am\nYx3+9FWkGCnkZDze3CLWm+askM2tqFoCsVpLWulGp6WVUUu0LKMpi65YI7CvwQpOzwNmK6U2xVuw\npsKbNusP+ineU8yYfmMAyO2X26JdUM1ZIVtizykR0Uo3Oi2xjFqiZRktGyoIvAO8IyJtsJRGgR2I\nfqYpBIw3TsyiIlABwIvFL2IqkxQjhdx+uc0sXcNozgrZEntOiYhWutFpqWXU0uJwsYzgbgP8DEtR\nZABPYWUwtQqcmMX4+eMJmAFOJlLKAAAgAElEQVSCKghARbCiVaTMNleFbIk9p0REK93o6DJqGqIF\nuPOA07Eymh5USn3UJFI1MeUHyzGVWZU+C5jKDBmsp6k7La3nlIhopRsdXUZNgyhV84Bse4K/H+xF\n744CKKVUpzjKVieysrJUUVFRvY515oY6HDjsKgwfPsYNHEePzj3iMuVHSwvIaTSa1omIrFVK1TYp\nrLVfbcqiJdEQZQGWwsjbkMfM9TMJmAEMn4EgVAYrEREuO/Uy7vrJXUDD54xqiQE5TdOiOxOapiJW\nZVGnQXmtmez0bLLTs8ntl0tBSQE7v9nJtLXTMDFBQf4n+cz7bB4+8REwAw2anbalBuQ0TUNzdSa0\ngkpcEuHZaGURhqM0pq2dZjvbqrZVmpUIgkLhD/rrHQBPTQWfD5TSATlNdZqiMxHe+GhrN3FJlGej\nlUUEnIF64S46o+w8KMlBZSwhJWNdveaMKiyEiROthsDngylT9MCspqQllGNjZvdEut9IjY+2dhOX\nRHk2WllEIHxywd7H9ubUQ2NZ8NLv8fsFSfoDE/7xTjWrIpaGyHnwpgkiUF4eXZ5E6Vm0dFpKOTZW\ndk9N9xup8dHpp4lLojwbrSwikJORg+EzCAatMRdbv9qKbOpGhR8wfSh/Ek/8+ShOPHoj40ZmArE3\nRPV58M3Rs2gJPfC6kig9tFhojLTjmu43Uh3U6aeJS6I8G60sbEIbx2xuOPMGpq6dikJRaVayucNz\n4BsFZgpgYH4+lFuvNsksANIKmfTPCir852MGpdaGqD4Pvql7Fi2lB15XEqWH1lTUdL811UE9LiZx\nSYRno5UFkRvH3H65zNowq2rsRfoqGDMMCh6AbReCSiJYWcljL69mYbdhVJgDMH2L8NEWX1KQ1N6f\nAJkRr1fXB9/UPYuW1AOvC/Utx5ZqZdV2v4nQ+Di01PI90tDKAsjLg8OHrewkp3G8555sFucuJm9D\nHi8Wv0ilWWkpjJwHYccQ93sXu495BX/Qj5m2Asm9CHYMJZhRwMRN68gc2HifY23Kl7s1Z2vVtRxb\nupWVSEohEi29fI8kfPE8uYgMF5FPRWSriNwdYfsQEVknIgERuSps2xgR2WL/jYmXjIWFMGOG1TAC\nJCV5zPX0bJ6/9HmeueQZfE5RORbGBQ+QdP1wbryiDylGCoYY+Hp8gDrvz5hpK9zU2pZGfbO1mpLC\nQpg82fofbyJZWZrGQ5dvyyFuykJEDOBZYATQB7hGRPqE7bYTGAu8EnbsMcADwNnAIOABETk6HnIW\nFFgV1bouXH999cax/GA5IlWf8vD1WI0MfhSjx2oyj89kce5iLjvlMpRSbgZVki+pRX6O1ZutpVT1\nbK2mbKgj4fRE77vP+h9vORy/v2FYfzt3Nt+9twbC64+3fFubFdvaiKdlMQjYqpTappTyA3OAK7w7\nKKVKlFIfAmbYsT8F3lVKfaWU+hp4FxgeDyGdyurzWRW2f/8I+9jTmBtikOSzPHcKRcAMuNbD25+9\nbY32BgTh+jOvb5Ez1tb28jZ1Qx2Jpu6JOn7/m2+2OhPTpzfdvTe3Ym5sItUfp3wfftj6D63rnlsT\n8VQW3bG+sudQZq9rtGNFZJyIFIlI0f79++slZHa25WoxDKs3PXFiVUV1XlbKrPjFw0Mf5tlLnqWN\n0QZDDPtLejnkbchzpzYH8Ikv4rcwCksLmbx8MoWlifsmhL+8XisrEVwGzdETzc6GHj0gEGi6e28M\nxZxoyqam+pOdDffcY/1u7s6IpmZadIBbKTUNmAbWRIL1PU95uaUoTDO0EocG3rK5Z7DVcmYen0lB\nSQEHtvZm0iMVHE7rHHK+y065rPqAPXtmW3/Q36B5peJFeEZKY40RaWyaK+c8J6eqQ2EY8b/3mhrW\nWO87EQPH0epPa83C89KSM7/iqSx2Aeme5TR7XazH5oQdW9AoUkUgUiWureJmp2ezcW0H7v2fEyGQ\nAsYgjLErMNNWkGKkMOLkEUxePpnU8ksp/zjTOl8g9POtifRhpVgbltoa6qZ8CRojw6c+8jphK6Ws\nDDpHlngQXidTU+vW+Dd1wxtLeUZT9InQGYkn9VHgCaVclFJx+cNSRNuAXkAKsAHoW8O+/wSu8iwf\nA2wHjrb/tgPH1Ha9gQMHqoawcqVSf/6z9d9ZbtdOKcOw/jvrHS4et0QhlQqUQvzquMv+pn7z9m/U\n1KKpqt0j7ZTvpnMVyT8on2Gqdu2Umvrmh6rdI+2U8aCh2j3STq3cubK6EM3En/9s3SdY///857od\nH62sEo36yOstI1BKJP736q2TdX1GTflMGvNa4e9hayJRnyFQpGJo0+NmWSilAiIyHlgIGMAMpdQm\nEXnIFm6uiJyF9YnWo4HL7G9791VKfSUiDwNr7NM9pJT6Kl6yQvXearRe0JnnHGDRDL873mL/8a8x\nc30xgDXuYvtgCKRgKmtEd/nHVtZUQ7+FEQ8i9ejq0qNpae6D+sjrlJEzHsc7Jide9xpeJ+vS625K\nd11jPv9EHxfSEOpqOSXaexXXmIVSaj7WJ1m96+73/F6D5WKKdOwMYEY85YuGU3GdQKE7nXNpIVN2\nXQ1jBkBJDmQUQPoq/EHLT5FipFDRazlmkh+faZCSItax9vTnhYUw+aXGe4kLSwsbpITCGxaom7kc\nPogvNTW0vBKN+ro7xoyBvXthwQIr2N2UrpK6NP5eRe8EjuNJa3cfNRZ1VeCJVq4tOsDdFESczjlQ\nQGXQHtGdvsrdN8VIIbdfLv279eeNzW9w5k8W8e0n/SFjKaSdDGS756vwK4ykAM/M+cSdjDDkujEq\ngMYKnHt7dJMnx96jCR/EN2GCtZxIgdVw6vrShteBp56ykiIiHRtPH3Msve7mCGwnykR3LYG6WE6J\nVq5aWUQhkimY86scko1k/EE/YH2v+/LTLnc/uzrxnYn4g34WsxjVXmF+YfLCP5N49pJnKS8YR4Vf\nYQYF04Rbn32V4qTnyO2X6zbyXgVg7DqPS5Ifo2vfT8i99ORqisCZTr0xA+d16dF4B/GJwPr1iWU6\n10RdXtrwOlBebvXYq1mcCZCB1Fyui9bsPmpOEqlctbKIQsTpnNOzKRhTQN4GKyXG29Df8p9bqiYf\n9BAwA4yfP57bk4cBGSBWrCPYczFT137ArA2zXKvAVQA7zyI4az75wRQwTmfG+kso+OPkEGWQk5GD\nses8zM/PxThxRaOMGq9Ljya8fEaNguXLG246N1UWSE0fB/Kuqymmk4gfEGpo/OlIoznrWUtDK4so\n1Didsx1/8FJYWsiM9TNQpWeHxDIcKndk8dd/paOUD6QShk+E9FUooCJQwaSCSUzKmeSOGD9cMhQV\nTAGVBEFF5efnVrccyrKRvMXgF9RyRZ7PgNyGV8hYezSRyiczs2EvRlP10CNdByJfO/weI7nqEsHH\n3ND4U31JpMYwVlniUc9i/TJhc5dRfdDKIgZibTgLSgoI7DgLZi2CYAoYfmvSQUdhlJxPsNIHShCf\nAYeOc+0PE5N3t73L8p3LWZy7mCnDp3Bryb8IGlUZV8knriAnY3LoNQsgUGmgTKj0w9SpMGtW01bI\nSJlkDbl2U/XQI10HIl87/J4izcybKD7m+saf6ksiNYZ1kaWx61ldvkyolUUrJdaeSk5GDr4dhzA9\n1gAlOVXKIqPAUiBBhfJVQsaSkOMVisOBwzy24jEOVh5Epa+EMcOQkqGc9ZODTLl5cjVrpq4pnXUJ\nnDdWmq9TfqmpNQeGI91TvHvoNV0n2rVrm5k3kXzM0DRlmUiNYV1kaeyyqenaiWBxNgZaWYQRrhjq\n0lPJTs/m2Vs7MH6ZIhgwEUPh+/FKgvgwSwdZimP4b+HQsdVcVA4KRf6n+QiCQuHrsZo2vTYwJdfy\nkdzyn1sA6N+tP+UHy8nJyGHx4mzy8mDmzNpTOmPNnGrMqUnc7K8KKwju80GbNlHKMY499PDnG+k6\n0a4dHtSP5TvqzUVTWDtN2RhG67jVRZbGLpuarh3JNZjIqeU1oZWFh8YIWo4bmUnmEmc6iBT6//QZ\nFmxdQP6s8RFdU45SCEeh8OEjq1sWA7oNYOMXG5mwYIKbgeUc2zapLYtzF/P889nk5to9+N4bKQj8\nB0pz2Li2A28sKGfUiFTKU2PLnGrMDCtvwwqh82/VVo6ReugR/cF1sIBqUvzh14lmHbS0nmK8rZ14\nKySvZRopLTuWDkBtsjeWvLVd27lOIrns6opWFh4iKYbaGobaejmzZtnHzMok86JKS1GEuaZ84nOm\nNwGsFFzTM1u7iFC8t5g1u9fgEx+mCp3JXaGoCFa4jXl2NpBWZRVI2U8IzHwHAr1ZNF1xUjYYpy+E\n7u+7M+ZGwgmwO5ZFLBlWNTXaTvl5LYv6NLARg9FpsVtKBSUF7PzPtfj9PRvsLomr5dMMbsKQ89Yz\nUB0vheR97iKxTPjZvK7AaNdOJJddXdHKwkNOjvWlPNOs+mKe0zA4E8c51NZDCK8QJ3TsBkl+CFiB\n6iFDFMecMpJ5W+ZRqSoBaGO0YcTJI8j/JN+9RlAF3anPvVOgezHECGnMvWm3FNxrTXRIEijF1pWn\nk7Tmv9z85CsRx2w4ZKdnV5uapLbGqTa3lbdhDY9Z1KXBixiMPi+6BRQyZuXAQpKSFwNGiMKqT8Mb\nq+VTF+LlJoxZAdWx1xsPxRa+r/e5+3zWjL9gKY7U1NB6cfiw9Z7G023ZUFqaVepFK4swnI5+MBiq\nIBxLwck0qq2HEF4h7rqtGyOu2ui6gzIHXsGkgkmuAhCEESeNAFWzWyoSgnB79u1uY563IY+93++F\n0myYtdBWFAagAGsqkkClj235uXAmoXMCU/1FjThI0Gdww5k3hIwtiea2itiw1tLgRWpcUntvBONU\nRBkkJUNOjgFp0S2ggpICKkoGYG4fjOq1nHF/e5keB3JDFFZjxGfq0tDW1HjG6v6ri5vQub+KQAU+\nn49nL3mWcQPHRdy3Lr3eeCi2SPWsf+9bSUnJdMt1wgT4+98tGSdOrPoWTTBovbszZ0JuHVLHa+0E\nxcFlVJNVmkipxzWhlYWHgoKqShcIVKWhjhlTN/dUxApRmEl5BsBG9+U1MfGJjyRfEm8v/pLg9vMg\nY2/EwHckFIonVjzBqrJVFJYWUmlaVopsv8dye5EEBLAUhc8+Slj0bpCly2DJe0aI79/7Uk8ZPsUN\noHsbp2AwyNS1U0MGEaa2T7VcaqiY3Vbec1YEKpj4zkRO6HgCAAu2LiBgBqoajG79mfDRBIK/tubi\nMn+8EtIeDbGAUtunUlBSwMYvNrpyZ6dnk1p+Keas31qTOvoCbP76cxiW506/UlvDW5cecV7+Dg5X\npKNMH4crFLm/3c6d9/5QbSqX2hrPWN1/seznut6+2cnh7f1RJUMwMwoYP388mcdnRryfuvR646HY\nItWztkmzmPLKBxQvtMrx229DXVHl5XDDDda76ry3XiVXX4sYGu4yquna4Z2nlhLH0MrCQ01pqFD1\nEjnfYYba/dbegNYtt1RlKvmSTiP46wGYaSvw4ePCXhfSft8w8v8ZOQDuw8epx57Kx19+HFFmE5Nl\nO5ZVrSg9B/VNOvgCYFpur2N+/hBfbesJe/rD7ixQSVRUVJKXX0Z2dk8KSwuZVDCJimAFpjKpCFQw\nfv54TGW6iiPFSHFHpjspvs4I9onvTCRoBvH5fEwZPiXiSxn+0jgNnqM0V3/gg5LT7CyxCgC3wfCJ\nz7LC7Lm4KsEdwOicz6uABcHwGdb0Kh+Pw2cqTCUQNFj2xmkse6uXOxo+JyMHw2cQDAZRSrF692r3\nS4ax9oin5W9k+pJVKPk1kIQyDbYW9eB/fumH1zaSOfB79/4LSgqqytkTb4LI7r9IhO8HMHn55BCX\noTdupWa969atwNiLQhViHYLDjvUKVjaeV2Gltk+1vuHSPjVEWdcl/uUORg2rZwu2LmDhrEz3/fMZ\nJqZSVRYmnhhhmHuxVmXgUU5OfY4Ub/O+99OmWQoqtfdGylP/U+NzqotF1VLiGFpZePDGJ5zG3fGR\nTpkCxcXW+unTq9xRtc3q6fQYHOUDoEjCt+MCJH0VKUYKk3ImkffMCSEBcN+OYSRnFBMwA6QYKZzf\n83w+Lf/UDXCHB8JdSs+BWYutc/kCMPAF6JfHV+mroI93u6VEyFhKYenJ5MzKcbOsBAGxpidRKPxB\nPwu2LCCjSwaHAoco/aaUoAqiUMxcPxPAmpIdE1FC+cFy997z8new97hXWXD4fvdenJfGafAmFUxi\n0dJvq+QOU5YKFTFes2jbIt7d9i6Dew6mz7F9XBmcY5zpVZ7JzKZNSqbnGRgQTHZHw+dk5LhJBiYm\n+Z/ks2DLAq4/8/paGxKHafkbueXqkzEre1tl3r3IVcgEFA/nLWf/R79z7//nvX/uPkdTmaS2T42o\nTCNZSd7GOrdfLvcMvidio+RtBGXbYDDbgDIgqDB2DHMb7Gn5Gxk/+jQClQZJyUF3Usuaxud460my\nL5mfnfwzunboSv9u/Zn4zsQQa7mN0cZ91rUpNi/Ovnkb8nix+EUqzUoUircXfodZYaJMHwoTGfAi\nSgUJiI+N+7IZNzIzopKryapxyvtAxQH32grFtLc2sved1dx13SC3s+dtD6ZNs9OlfQplnIhvzDyS\nek6q5pat7dpueXqUdCSLLl4JDA1BK4swnEqSm1tVSaZPtx7imDHVv8McS+aDoyhEoE2KMOXWX1Ce\n2q6qIoyEmU8F8fuD+JIUz912NZkDfxbygs3aMCvERbRgywLyP80PuV7bshEcdpSOqaDzzlCXVvoq\nqyHekItPDPp3yyZvw3PV0nGVsnp1UvYTVEkO+Tvfg/SP3e1OXKUyWMm6PetI8iWhggoRsRq/Qhh6\nQZCKiu5gjIcxb9pTuFd/YUf1GcW7s3ZUTWsSENiQW6Mr7vj2x/PFwS8A6wVftmMZK3auIMmXhBk0\nQ+I9ATPAG9/dwZRXnqB4YSYvzlBUVgZCRsPnbchz3XcOFcEK1u1Z51o0CsX0ddPp361/iL+/sLSQ\n255bgFl5f1WZdyuGfWe4CrnsmJcgaFlKhwKHeGXjKyFlXbyn2J140nm2kRreKcOnhKROT183ned+\n9hzlB8urNUre3rxx4gpkBVRWKowkeObWX5CdnunKHvDfD8pHpT8YMqklZdnk5e+AjKXkXnoyBSX2\nTMs2lWYlb336Fm2T2rL3h70h86GZyuRw4DAT35nIgG4DalVskZInnMZx6tqpKBRmz/dQvntAJWNK\nJXRdAwumEAymcOvVJsVT8si99GTuuSf0ZYxk1XhjOCEdrtJzMGctIj+YwoIXg66L1hmBHQg46d8K\nZYoly/bB+NNWVHPL1nRtt95EcDuFjMOIIcuvOZSJVhY14K0kjnKAumUyhJuxN9zgBN8ygSpfdna2\nFT+wKothbyekEoS7J8YNHMe0tdO45T+3uJXen74IjDs8lkNBZMHWj0GZbbj16iDp47+BTlWbnHNJ\n6U+Qfy3GrEwC4w+WkgFUSQ5Gr/dRaSsxMVmze41rjQTNILfNv41L952J35/l9mYpyUHSP6j2wjov\nw7WXP87LBQEIGoAPiq+HfnkRFcbXh7+ulgQQVEEuO+ky3v7s7ZCkAYXiv9v/y3LjbBbfu5jc3Gwe\ne7mI3ce8Qs6Qs90ebCSK9hRZ9+W5xvj54wEo3lNcVV49N4Jxtx0aUtB1HTLmJY7edyVfd30TlVYY\ncl6v3Ek+6/XzWjAvrnsxxEoylYk/6OeNzW+ENNaOPM9c8ozrznOUNcCYfmMAyL0+F8Y6dSvZrVsF\nJQWYPd+zZA9WTWr5j7WreGHuJtSsdwlWdgfjKl4oHs6lF6SS5EsKUayOm+jtT9+ulpShsFx6q3ev\nZub6mSwZsyRibxuIGIDP7ZfrdpCk52qCYy9CbR+CZCxDdgx1Z0kI+oP8Y8ZBZpTnuD18wLXAvLG3\n7PRsJi+fHFK+LiU5rnXv9wd57OXVHNx6H6P6jCInZxxJyUGCQQUYIIGQ98uxwJ37cWJoY/qNYe/3\ne+naoSsbv9hY5YosyK7mdrrnHiDNjjNt2OlmNB4qGcpjbZbz5h0eq6QRB83WhbgqCxEZDjyJlZLz\nglLq0bDtbYA8YCBQDlytlCoRkRRgKpAFmMBvlVIF8ZQ1EuHmYW4uVQPfUqvyvBtjJHK0/OxIExeO\nGziO4j3FVT2wtBVWo14y1JpKxBP3cHzAzkuhlEGw0qRkQ08YHOGCJTmYgeSqBn9DLqwfA8EUfCnQ\n/647WG08WXVeu60ImAHyD09EjPesY+2Xqvdxvbn0lEvdoKu3gfyAKZw07FS2LhoKGGAa1j3YY1EE\nwVSW1RA0q7ukkn3JfHX4qxB3VfeO3dn9/W63sbVeVFjYbRiHA4dZvaJ6xlnvY3sjCB9/+TGmMvHh\nCxnfEjAD3DrvVvc6PnwYPQzM4RNh/jOgfPDOkyTfcAmTJ3Vi4jvF+IOWHzPclWaIwTOXPEPm8ZnM\nWD/DipmgKN5b7FpqTvwF4HDwcLXGOqiCFO8p5qcn/tRSlGaQW+fdiogQNIOkGCnWSP9AATm/CnVl\n7f1+L8k9i6gYcyGUnB8yo0Bg27lQabhu0UDxNeRv24lkfEGXkzdz4HCV+0YkevZeRbCCvA155PbL\ndS1An/hYvWs1+Z/ku1aJaZrcMs+aoWDcwHEhyQsTFkygMq2QZCOZiefeyV+XQrBSYXUuxuLvN4up\nwanWRJ5KueXUxmjDkjFLaoyV+cR+xr2WY9pT8fiSTPIP/xa2rWLRtkXcde7n9LuzgtUr20O7/dVm\nYBAEEWHT/k08UPBANUvV6bgIQoqRwm+7zwHjkpDMvmlrpzF+/niCKmh1IpyMxmAK+Uv9/F9qPn+5\nfiTQuINm60LclIWIGMCzwEVAGbBGROYqpTZ7drsR+FopdZKIjAb+AlwN3AyglMoUkeOBBSJyllIq\ngqM+ftTW2NeUvRAeMIz3ACGnB+a6AdJX4UtfjeEzMJXhujaK9xQzc/1M/BnLUJ7JCckosGIZYQpG\nZbwHvj+ASrb2A7fnFagMUrEtG05+MrJQ6YWo3KEhM+9u3g+b92923VhOp12h2Pr1Vki/j6Q2SwlW\nQlKy8LPhXSBtJJRlQ0kO8yvvInDCcus4T9vU59g+/Pac33LrvFtDRPjx0T9m3w/7ADB8Bqt3real\nD1/iUOBQNXGdkfATz5nIhAUT3MbP8Blkp2Xzfun7KGW52byNvokJJsih46yZhFUSYgo3dJnFuIE9\nyTw+k7wNeawqW8X6fetDrnfzgJvJPD6Tx1Y8Ruc2ndl/cL91TmVyY/8b6dG5BwcqDvDXlX8lqIIs\ne9+Pb8ddnDnoKza2mYZCkeRLsp5p0O/KHFRBt3wqghVuuaQYKUw4e4J7PrCU7KCzg6xJ/0tog59R\n4M5hhi9oWXpmEsrwc8COJzlJBL/L/h1TVk0JcWVG4sXiF9n7/V5X6VealdXcqM79ezO2HAXnKE1B\nOPGMLxg44kNWv90flNidixxU+ioqg5Uh9+IP+snbkMdjKx5j93e7uXHAjW5cxOn1u9bIgNfZXHQc\ny+ThEKv28RWPowwVuVNFVYzs5Y0v17jd+V8RrOCxnVfCr89xM/s2plzHbfNvI2AGXJmPKvspP3ji\nmE+8UsTIC39ULWnA8Bns/GYnhaWFcVcY8bQsBgFblVLbAERkDnAF4FUWVwCT7N+vA8+IiGCFY98D\nUEp9ISIHsKyM1XGUNyLerCZnPpeashea5StldlDwsVeX8/bC71AZS2iTsa6a+Q2WYplUMIlFeHqT\nEDm47MQ3nAYfsS0LaxLEDW2nhMgRYr1Ata8IOoRbIi7pqxhw112MbDeF1N4bmbhpEhWLB2D+czxi\ntsFIXoT8ehhm2gprKhTbl//C5S+QtyEvpBH34eODXR8QNINuLztSw+RlwtkTKD9YHmK5mMrk/Z3v\nVwXOVfUetIlpKVnjDxC0MnT6Z3/rZgbNWD+jWkMqInRq24nBMwdXk9vwWZaIkzllKtNNTDCDKXy4\nLMA1j/dl/9FzaZ/SPqILKEQ+u3GuCFTwxMonQmYBCJgBTuh4Akm+JDehwXkW7rP/pgesvdmOJwEF\nD0DOg5x1tskJHU/gsy8/CykXR4lc3fdqlpYspey7MoAalYP3OK/C8/aWC0oKXPkqg5WMnz+ewHFn\ngfEuYrZBkkxUxjJAqjLnnDIVH9PXTXfXrd69mrvOvSskBugGpy+FIV8OAbvRdoh13FOd8GT2Pbnq\nS0wztB/8Q/d5YPzO7dSpjCUUlBwVkhzy2IrHePuzt5m2blq1mEk8iKey6A6UepbLgLNr2kcpFRCR\nb4BUYANwuYjMxho6NtD+H6IsRGQcMA6gR48ecbgFi3AlMGVK5NiFV4nEYzRpjZRls/D+bJT9qdYp\ncz5h3MDqn2rNTs9mUs4klu8cxuH0VdZLsPzuiFORACEN/pCeQ1hGdZcFWC/685c+HzHoDpbLxWm0\nauPGK/qQebwnjXf7YNdlFvCbyPbBqLT33ZTjSTmT2PjFRqavmx5yrctOvYy5n8x1M7SiXVeh+Hvh\n313/v9O4xyKzVU6FyNiLMHYM4/ZrBjBx07WWr91WVNWup6zxMeF+824du/HlwS+Zvm46szbMYsLZ\nE6xG1ONPNysVL88twzfkv/jKzoXt9+DLWIKvh6UcXXlLz0FKhuLrtRwzbQUiUqUoSs+xOwHLmOeb\nR9AMug38nI/mhKQpG2XnEVw/xlIUGLDtQtgxhLXyU1anVX/WIsLVfa/mtU2vuT3lWAaa+sTnlrlP\nfG7spbC0kJ3f7LRcM6Z1/oAZcGdkViU5qIylkG7FhryKQhAGdhvI6t2hfcx/rPmHa4kfDhx207AL\nSgoiPq+Q+7MV0nFHHWcNgK2FJF8SF2RcwKJti2rdb/OXm6379xaRrbClZCgqo4CUnuvIyXgi5Dhv\njK62jL3GIlED3DOA3kARsANYCVR7ikqpacA0gKysrDiof4twS6K42MqMgtDRojk5DRtNGk6sozod\n+cygICRT/nEmjIy8rww1L44AACAASURBVDeV8UDFAf5a9r77zQxfsolhz5Lr8/m4uu/V7P9hP6P6\njCLz+ExyynLwpxdWO+fgHoMpP1jOiJNHMH/r/JA03GQjmXO6n8OK0hU1TlniEx93/OQOMo/PDM1U\n8bpDjEqk1zJ8YrgpxwC3zb8tJKh984Cb6d+tvzttikJhiFHt2pGC5OUHy0NSN2uSNxIn9N7BWRdu\n5DM2ug2RT/lCerqOKyXEAvPgfKpXoTgUOGS5P1DVyoGMAsydgzBnvQNmG3xJf+Dqx1/ktW9ut/zl\nToq02Ybg0goYcyG+nkX4xEfljoGuJakMP5WOW0kJfY/ry80DbnZjYIYYXDbsWN7mpwSX/BG2DXNd\nbcHtgyHt/Wr3oJRi9kezXcUkCGedcBbFe4upNCtxPkF8SuopVfdnl3/Pzj0p+7YMU5lMfGcin3/9\nOX8v/Lvrx7/slMsATyNZg/XqkGKkkNMrh6LdRSGK+Vv/t1Xyoli09Dve+9cCLhjqi3SaapjK5MuD\nX5LsS64Wn3AQhEtPuZTd3+6O+ZxASF01eqxBpa+2pUwO2T/cmnZS2cNTeBuTeCqLXYROKJFmr4u0\nT5mIJAGdgXJl2bW3OzuJyErgszjKGhHvbJferCbvVOC5uVX7Z2fXPpq0rteuzaUVLU+7NhxTdvLy\nyVaPbMwwpOQCxo06ldxLH62WkldYCAUvwdOnF1Gc9Bx7v9/LvC3zCJgBknxJfLDrA1aUriDFSGHi\nORP5e+HfCZgBd6LE5TuX19qzFIQubbq4gTsTK7jcrU8puzyusMuHdWVQ94dDMlu85nuSL4ncfrkU\nlBS4gWmf+Lh5wM2s27PO7WF6s5yc5TZGG1Lbp5K3IY91e9aFNPBXnHoFXTt0DXFnhLPru13s+iS0\neicbyTw14ik3e8oZjxDps7uGGJQcKAlZV90tNNS16mT5PXa6sYFZafLK3N0w2HaflAwFsw3KNKyY\nU8n5BNM/QERCsn68mWqO77t/t/60TWrrumi6dugK6W9DziTYMRgxBV9SEHotr9Z7c1xQ3t55ki+J\nGwfc6GaSmZjM+2weu7tWb0R3fLPD/R3uNvMH/cz9dK4b9I/FWjn6y0t4/NEkVMbZiGNJh1N6Dsz6\nL4FgCouW+GHMwloVkHOOgBlg5KkjGdR9EKt3rw6Z083Z761P3nLdirFiKpOenXsiIuz8Zqer5AJm\nwM22yvvPFpYt/Ql0WB/6JU6zMq7B7ngqizXAySLSC0spjAauDdtnLjAGKASuAt5TSikRaQ+IUuoH\nEbkICIQFxuNOJNdTebk1inP69OpfV3Ma7tzcyKNJ60pNcZFIsoXnaceinELM+x5rSOm1gdxLF1fL\nugq9ViaLFz9P9qWh00k4jag/6Gf9nvWu+8ZUZmRXjusGKQhJqQVCctPvP/9+JhycQGX6ByQbydx1\nbuiLkJORQ5ukNm7a5TOXPONub2O0cQOAADcOuJGNX2x01wlSfUqRsCngnbjIXefeRXZ6Nrn9cl1l\nsmb3mqgKcMRJIyg/WB7S23OC3jPXz6QyWInP5+PSUy7lrU/eqv2BeXrR12VeR6mYLFvq9/i038Px\nYxi9llsWhZOckLEUxO69RrBSjm1/LAcOH2D6uukYPoNLTrokJPA7a8Ms/D3WINcPR20/H5WxhKSe\nazi3+xBWlK5wg+1OOTrjRJxnUn6w3HVJgdWohbuGIhRgtVmWvYMuaypzrxtu76yX7FjcH1BjLsTo\nsdpS9m79W2q5VSO4YY9pewwHKg7UPhC2LBu238WI3htZmLTQ7QQ4cigUSilGnjqS3d/tpnhvMaYy\n3frnTUxwUKgQpem9t9W7VvPAv96hcuYC+75GhQxgDZ9UtLGJm7KwYxDjgYVYqbMzlFKbROQhoEgp\nNRd4EfiXiGwFvsJSKADHAwtFxMRSNL+Ol5w1Ed5Yl5dbudCFhaHKIDW14Q13JGqzFvLyqkaFe/O0\n6zJ5mnfCtpsH3Fyj+RpeDnl5zr1lc89gK1PFGywc1WcUy3cuD2mUQ14K7yhzw88Vk5/hrqsHu9cO\nH0+SeXxmzYOPyrIZ8+3H7sAx73Ynx33elnlMXTuVZCOZp0c87Qb9gZDzTl4+OWQcA8CPu/yYO8+9\nM2Q6Dic7xzuaORIKxbwt83j7s7erjVx3FI930OX8LfMjns+Hj2OPOpYvfvjCXbf/h/207bXfHWDp\nRRAGDvJTxMVWzCdjKb4eH1QNtEz/AOVNXEhfxZcHPcHlkrPIL+hNyokrye0HG9d2IPOzVzgh8zO6\nXr6d6esmY6ogQdNg+EnDefTC6pao88ycqT9S26e6LrZweh/bm61fbXXdOYYYKKWqKYracKw/xwUa\nHuexZkW4gN+PHszb//2ST//1HCqQTFJykF/+fiWvva8IVAbtr1cWAHDTwJt4+oOn3diTqcyQmELy\nriEs+NfvebvS6kRNeeUDylP/Q2r7VDfzsDJYiYgw4uQRjBs4LmQgHRAyUt3F05EKT33P/zQftoXG\nGKXkArAtQ29nKR7ENWahlJoPzA9bd7/n92HgFxGOKwFOjads0aipsQ5Pp41kAdSl4Q7H614KVzqF\nhVZj/eKLVaPCfT7L2iksjP2a4TOx9ji/R42VLHxg4f9v79qj5CrK/O/r7plJlF2EwQcKIaCsGg9I\nIDs6iybR4CwqSHbDCugxEQLjCHHJHg8jkaMnKE509Wh4yU4WwjKrKz4wLnJ4GUiA3c4BA4GExypJ\nDCFKNjBr8ICazOPbP+re7uqaqlt1X909PfU7p0/fvl236qu6VfXV96ivZBWcUI1NjGckT/AAKivp\n0fFR4LkFGAvCTxS4iK6RfnRLykpVstHtLwnbSTDpY9DevhiLgyi6MiMEIEK175qPgzM3YssLW3DD\nGTfU5F2p58z5Eya0nft3Yvndy3HCG6ob2Sqhs5dsrImTtOWFLRMkjnAS0EbiVeqlyw8QHmzb9m3D\nZ+74TCXtolmLAAD3PnBzZe8LHl+Cwqd70DHzsUCKWo4DR20CCBVGUUABpx13GhadsSigdxybXyjU\nGr4DRn7wgYO4vO2neHDN3wOjIpTJ3IU7UOx8puY8FN37qexpuGoFRnacira33oNre6/FXRv2o/xg\nG/a94UeViXDeMfNw08duqtR97yt7a5wkTOqmtkIbClSohFF50yFvqqi/CASS9k2gOILzzjwS1z58\nGf68+Z/AI0WACxgfLeBdh8zDAxvEONv/pnvxeOkvsWjWIHpP6cXCty+sML1wl30oieKPl2PNwSLG\nx8VZLcPPnIAVK6pOJbOPnF3ZN1HpQ3u6gf/qBkrBpt/Q2y3AMX84F8/dclNlIVX89N/izAVH4PZf\n315Np0iGZ51+KLre87W67ORuVgN3w+Fy6lWIrOLTr1kDLFsmGE949GgYe0oXZwoQIQjkWFUuDKMm\nEmvpIDo/usOYVm4HnQquuxs1gwBH6yf8cCXdeeIZWP7fxaC9KHM1nbxhSWxsWl8ZfHvfcR1wxsS8\nBIPurthkHnvhMfzykRLGfzMXB459CENPDNVIT7KUIOeBh57D4/uXYPTND9WoLAp7TsXuOz6BTaWI\nDZwGphj+BwC3PX0bFs1aVAk5cte0k/AzifGeVrgKKxd3VNIvu3NZxeU0VKnJwRdVCfNtr1yMp6VV\n687ybBHmnksiCONPZ6Gj4z5c9O3vY/EZxwN7urHqe3oJeuiOZ3Fw7Z0VxrN6ZA12/uDzGBkpAPQZ\nYMlp6Jj5WEWiDWkKjw0OEdqLgIlMFEDNoqQmJE7fYtz19uvwu21/haV/91YMd/4Bt244KNR1xStA\n44T29iI6O6tHESyevxDf6K56hsh0qRLumt/Vnv7Y2Vlb/+E/DlfUsAfHDmLojmdxy+e7azQQakiQ\nD5e+jjU8HeNMoHHCRYd/DzPe8h+1HoaKS/uHP3C+MeR85mDmlviccsop3CiUy8wDA+I7TR6lUhjr\nlpmIua+v+v/AAHOxWP0fYC4URDpA/Dcw4FbWwABzoTgu8iiOW58rlwUtCxcyd3SIsqZPF/fLZXFd\nLDK3t4t0tnbIqr3CckNamJnLu8s8/arpXLyyyMXTrmDQiGgvOsh9/buc8hlct5XR9qp4tu1VXvjN\nb3DxyiJjJbh4ZZEHHhww5tExbZS7vnQpF64sMFaCsbSbi+0HJtCZBUxtwMw88OBAhebCygL3DPVw\neffEwsu7yzzw4ACXd5e5XBb0U2GUO6aNcv+q7aIdMMoi8FW1n0WVzczc17+rpu3x1rsrvwvFce7p\n3WCkp+OrHUwriTu+2qFNY2wPqS4qBtdt5dKHvsSFC0/l0kXv567F67h/1XZub+egbuPc1j7m/H4G\nBsT4C8ehOobKu8vc3juPacEXub13Hvf176qMX3lsq+2vtml5d5nbv9ou+pLmo/bFJIAwC1jn2IZP\n8ll9Gs0s+vrcJkoT5M4Xfjo6pElQ6kilkvgOGUWhEG8isg10Na0YUOLT1lZbT5WJESWbFF0ZiDq4\ndM+EaQbXba2Z/HR56wa9ykz7+ndVGND0q6ZPmIzkNigWuSZ96UNfquQlM/QsGKYpn3JZ0NDeO89I\ns2t+g+u2ctdZj3Jb+1hNf1HrPGGyDBgPCiOM0quMMy5klF5lKoza+5xh0k/aZmF/LxTHudh+gNsu\nmlt5N6AxacyNahcUUXmaxlCV8Y5xx7RRHhysHUfy2LbVsby7zH0/7+O5a+dyYWWhwijiMlMTPLOo\nE9TJ1NQJop4fGGAeHBSdLmQAukEYpu3rqw7UQoG5pye7yVm9PzBQSxPRRJpc6TbR6Mq8ZKnBdQK0\nlT04WMugBwf19EStWqPSD67bOvE/k1SUscTVMW2U+757S2JGEfWfyztTmVa4wk5SvzgLHBUyY6PC\nKNOCLwqJ68JTmUp/rkgWKP6J+757SyyaTO2lY6Z9fck0ATVlBoyj7+d9mTAKZs8sckPcydSWlzwA\nBgdFh1JVPbbnslJtaCc9B2YYSlY6ul1ota1SK+kk1YpOHZQEJnVC3Ik7Kv3goGDog4PVMtX6pn2n\nuoWEqS1N0kjc8k356FbGJkbrCtc+YqJz+nTxfoulMS6d9dnKgqN/7Tou/vUapjk3cHvvvMwmYNNY\niquyrQc8s8gBWv32YK0aRp1M5cGjYzQ16ou+qpRhekaXb1YwDUhXNZuOJpdBnrVkEadtspqk40hN\nuntZTIayitKkmjTVN035urxdJsOosRGVd5L3NDgoVKiFwkSJy8bM0qi/tCpChwVhHDrSzgWeWeQA\n3eReWbEUhQFYt1oL/29ri15pRBmPszaO6pBHec6MwLHDuwxsU3lZD7Y0UpOrWseFNp3zQ6lUlWRc\n6Un77uPYr2x9X4c0k6JpYWbtbzmMiSSMOap/pKXPlVl419kYUPdeAEFMpnGxB6GrS9yTo9MeOFB1\nsRsbqz4T7seIcksN07qezOcaS8qEOOdvuJbtmqdrKPcoF1MgWUTgpGHko3bZh4jar2NrJ9coxmEZ\nNcf3sthIakpr2z+UpD3mzwdKpWo/Zza3i9x24fiISh/SmDR0zu7dgjbAtF9I/6zLO1bLsrWhLTyP\nLg+jq3hM+lLBhaNMhk8jbBY6m4P6W3aHjVptuaorouiqpxRS77KdJY8c1SyuZSWlXUUcmlX1RpQa\nKA8VZpivbN+K8tLTSRaFglkiSkOTqhpzsevons9yDEZJuXEkiHpKFg2f5LP6NNIbKrQz9PRUjaVh\nJ5R1pboBrNPbutgsVMgTS1IPqaTIYyKWEXdA6NpMfg9RE1jcSTSviTfMO4nROYlOPIt6xO2Dcpku\n7yctTUmdCXTj0rWsLOiV6dDZDr3NYpIwC+Za24RuRRVnFeHSkXX52WjIE1lKFrq6pR2EctuYVq71\nlMziDO4kE0Hc9sqq7mnyyWvBkcYW5JKPjCwYXlQ5efVRV2bhbRYZINQbjo+LWE2nnQasXGnXiev0\njYBZ575xowgrsHz5RD12qHNeuRJYv17QkrsOE9X6ZRE80aSjV3W8nZ1Vu5BLWfL7IdLr8uul+417\nmmISPb1ryPqwT+3e7V73KJ18mn6gozmtDS6Kpqh2jWMzkJ9Zvlz8XyiIKNVZ2w1N80XaNnKGC0eZ\nDJ9mkCzicnzdSsQmbZRKE1VdaWnJU5USp6yoTUuyKiBJ/fJcFcZpvzxVdjqVZpQdJYk3Up7Sl0p/\nI2xwSW0DeatidTQkGQs6wKuh6oukYq1ONaLmpeqCVRfcpLQk1W8nhU3EdtkJn3RQRrWJ/C50LtBJ\n65Q0fRLmHZfZJXElzWovRlwbXBbvOYtyXfpQHJdt3f9x0mbFoDyzaHLEedHqRGAztLkg7NxRYTqy\nRpQBVP5PDaKoozsvv/e48a2S+szrbE5pJCfZ604X2E73TB4SWlbPZ0lfPco1GcBt+blKeKY+U0/J\nIlebBRGdDuBqiMOPbmTmryv/dwAYAnAKgGEA5zDzLiJqA3AjgJMhAl8PMfOqPGmtN0Kf9PFx8R2l\nV1Z1ob0ZRCQO9Z8c+OUTpQ+xbkOolw73nqxfDzz0UBCueX6tznrxYn0eoU43DCstI66OWz02N9yn\nwBZ/f12d4oSoV/Xlsh2DSLRNHJvTxo3VvQqA2EdgPVo3oh1tz6htHPeseBfbSBL7RxZ7EeKWa7NB\n2cp22W9iKiMrW6EzXDhKkg8Eg9gB4DgA7QCeADBLSXMxgH8Jrs8F8MPg+hMAbg2uXwNgF4CZUeVN\nFslCXoW0t4tVbHu7eVWQdOepS/mq/3neKqgoF2M5TRJVTlp1kGtcrqi6JW2/tGrGKJWmje60Lsl5\nr9pd6mCzc+Rp/7BJlq6ShSnKg0sZaYFGq6EAdAO4R/q9AsAKJc09ALqD6xKAlwAQgPMA/Dy41wng\n1wAOjypvMjALuePYDNW6Z1wNkS7l69RZaSe9qHJlxmhTtdjo0Kmz4my0UvNIwrCyhO29yHSZ2i4J\n3XE3/enKdWnHODp5XbmmZ02LhjQ2i7hwYUS2skM1M5HeZhannknQDMzibAjVU/j7UwCuU9I8CeAo\n6fcOAEcAaANwK4AXAbwKoNdWXjMyC/WFRq0go+wQYT5xJ0QVNuNdXquvvj6u2cUeSjGue09UyKux\nsC3jMlKTJ1q9GUWIqLKTLDJcy3R951FMIUrKi/teTJKCLp96eSDZFlRZSJbyWTZtbdHMLuux6sos\nmnWfRReAMQBvBnAYgIeIaD0z75QTEVEvgF4AmDFjRt2JjIJOz6jqt1evFj7/pr0TIUL95KZN4vhU\n1Re9cizk4to9GaoeM0q/HhVTyaQTTeMHH7X3JLRpHDig1y93d4u2++Y3gR07RNqxMeCii4AZM+z0\n6OxAgNv+hyx8/3WI8vuX302hIGwSWdiY4ui8TXtdOjuBJUtEmtmzJ+7bcI37BEwcM0uWROeTxF4U\nByo9q1dXx2mxCFxwgajz8PDE9ovTT+bPF+9VjiGntpPcP1atqmM8KBkuHCXJB+nUUNcD+JSUbi2A\nj0eV12ySRRw1R1x1gLzCUN1Nk6p44toBXFc35bKQJEIx23Y4lHoYUX//xBAHOskijgpA194u70Bd\n6SYJ2Z40jU1NVQ+oqjCTZCfHejLp4XVQvdLmzrVLKHlKg2qf6Omppc8UAyvJyt/V9TkcT1m6u6MJ\n1FAlADsBHIuqgftdSppLUGvg/lFw/QUANwfXrwXwNIATo8prNmYRp8MkFSsHBiYevCR36LiiedgR\nZTWRKS9ZdDa5asadXNV8Qx2uuvfC5IKr0m9q17iMUabN1cU2quwk6oRGqshUqO0QtoW8oXLhwuiY\naDqUyxMDb/b3J7N32NImZdKqu7nO7TzrPSI6erJ0Smk4sxA04CMQxukdAK4I7n0FwMeC62kAfgxg\nO4BHABwX3D8kuP9UwCgus5XVbMyCOdvObXomjmQRpzPKg0OXl+44UhVJ9yDI+nndKYSmyV5tiygb\nTxLdc1iuy94UXd1Vum02qGZiEDJ0kp268k9qX+vqqu1XPT3J6IvyNHRh5HJa1Wah0qguGrK2KYTI\ny0bjyixytVkw850A7lTufVm6/jOAf9A894ru/mRDnLg+SWIAhT7kqs3ihBP0vvA2nbxqtxgeNvvV\n33ab0JszC31rnLMTbHUKy+zsBD73OfE8IPaj7N4trlW6Vq0CRkaq+YTPmMqX7UBynClbnKZwb4J8\nHoKuXrq6q+1roi+0Q6lnLgDp7CU6+1YSqO8o1NnL9AET7WsuWLoUeOSR6u9Fi+LTNzRUbd+DB8Vv\nlxhLCxYIOxkRcOaZQH+//tyR1atFfUZGhO3iwgtr2zOv/Q9522iscOEok+HTjJJFI6GuiOLq5G3q\nlXB1bYuwm3Z1HKqWFi6M1tPqJAub6iLJClDW28e1R5gkIl0aVXoJT2VM4zbtEk7FRHsSmPKw5a2e\nWR4XOu87tXy1LVWPJFsbubZPFv0/ajxl8Z7QDGqoen48s6giSk1j2wRo63zqoCKqDmqdGisrNYor\ns3M5K9wlT5NKIumEHcVkZLpl9Y2s4sjCbVqn0jPRGqeervr/qH0iWaJcFhM9kbCb6PqDbtLVHVRW\nL9fkJM9npe5yZRbN6jrrkQI6MXv+fCFeA+J72zbzMag2F8owH0AMqy1bJpZ74ACwbJlwBzSpvWxq\nEdn90EUEl1VLoWohjjuo7Ip8881CzVAoANdfL0KsJA1jrnPBDOkL6QjVJm1ttcd/XnBBNfSJSa3j\nepRnW1ut+sukxohTTxf1pi2cCZBMPRbVf84/H9i7F7jrLnFc8S23TDxKN6xr+Pv664GLL64eCxs3\nVLqaLml/CTE0VA1BYwsVUhcXWheOMhk+XrKoQjZAhuEfTBsCk3hVLFxYuwLr6qpKL66bx2xqEReV\njZw26YYl3bOqt0u4SSpO3nK+UZsx5bDscnu6rIZN7RRVPxfJKyrPJOpN22ZUF/WYThIwqR2j+qBN\nwlHbyPWdm/pr0pX/4KDeE9D1PcUBvBqq9REl/usi1eoGkc0F1FRuR0f1ed0uaJu6waYWcfX8UAdM\nGnWNziVUpc1F5WJq+0JBfGQX076+2klPZwdypVmnSkurNrO9Uxdmpe4LkFVxqn1Bp/ox2Rh0/cfG\nmFwXMy7t65LO1l9MCwBZJWaKwhxX7WqCK7PwaqhJCpv4PzxcK+7Lnk3hjvEkUVYBkWbDBv2pfCtW\niDQbN1Z3qOvEd1UtUigIuuT/XTw/TB5GoVdLZ6e7GkEuE6iqI5iFh86mTXY13aZNwCWXCC8mQNAx\nPCy8ur71reou3UKhGl138WKhcnjsMWDz5ngRZ5PsyneBrNIzqZCGhsSOedN7lp8tFsUOe1ldtGkT\nsHZtbbm6d21Sq5rUarooCao3mutOeNd+aEpnO5FPN4Y3bqyNIlwqTYzCrD5ritKcKVw4ymT4TDXJ\nwrbiSbLiy8IAF2c1OzjI/M53VlfccfeFhHmEu8NDlZu8sk8am6ivL5n0pToAtLUJelTjaaFQu+HM\n1J4uiFLPpVVTRKkvbe1q66OqJBeqM3X1c1UZRXmqqfm4OmC49MM46UKYTobU9WkVWe65gFdDtTZc\nJgKXzhu3g9uej6s+SnP4kpqHznNI3VkcZzdtEvrC51R7keqWGXrquGwMM5Xjan9I0wdME6yLus9l\nwZLEBmT632WjXZz+nheTCJ+JsrnI/cekqsrKq8wzixZEmo6fRXmuz7gYXE0uonEkC52NoVCo3ZOQ\nJqx7UulL957UvSlposdGTTRZvy9dfVyfMz0b539X6BYpaSbUOPVLUoZJqlLrEbW/Jqu288yixZC2\n4+cxicQtT85TDTynrpBdJzGd95K6GstaekqTR7gyT7PfQGcUtm3aM9XBNMnmtaKOizi06Izg6sTr\nSq9JRaQiqTpIliBkpwa1X6TdX+MCzyxaDGk7ZZqVT9zyTAMyzuCNo87q6aldtbuoivKe5FzgqkpS\nn1G9p1ziYKmqDfU/ncdQGvWma12SqJZMUHd+mxYnLiov193uaRdVulMj1YWOlyw8s4iFRkz6cY2B\nLiqoODrqRqfNG0loMakvovLSGd1Nk05ax4mousY15sdxRzXlF6aNs0KXyzW5rUbVLbyXdk+LLX9v\ns/DMwohGqZNcVSYujEk3aUQZV7M2RmbpRZIWSWhJstIsl2u9saKkL1t/iTuRmvJ07Suu3nZZMrkk\nY0aVBvKOw+W9oTyzyAVpxdW4Hk6ug6wRq/zJLlmEz8V9n66H69jyd5kI1eeTGJ/lhYqLt13SFXqS\nNtClVe0MrnG4ksJLFp5ZNCXirspcXTXzNN5lJbHkRUO9acmyrCjjr6s0EKWmsUlPeevw40Jni4sj\nWSRFVvX1zMIjU2TVMeXBnsat1bWMRkkPzUBDXoiqm4udIY6qy6RSahYvrDBtHGbYbHBlFrmG+yCi\n0wFcDaAI4EZm/rryfweAIQCnABgGcA4z7yKiTwK4TEp6IoCTmfnxPOn1MCPJ4Uw6yCEXABECYsaM\nbA+JqXs0zgbS4BrKJEtEHe5jiuSrHlQV1Ta2EBtZ9UUTXCLpqvTI7QGI6zQHTDUlXDhKkg8Eg9gB\n4DhUz+CepaS5GLVncP9Qk88JAHbYyvOSxeRAPVbczbCqnyr11MEmRWRtX8gaaYzHpvo2g6u2CWgC\nyaILwHZm3gkARHQrgLMgztQOcRaAlcH1TwBcR0QUVCDEeQBuzZFOjzoiryMn611GM9DQDBKUDvLK\nXydFrFhhb5u8pYcopDm+VH0nQ0O155DYpJRmRp7M4i0Anpd+7wHwHlMaZh4lopcBdAJ4SUpzDgRT\nmQAi6gXQCwAzZszIhmqP3FGPiaCRk029aGj4mcwOSBKNtdFIw+jV+gLNydCToKlDlBPRewD8kZmf\n1P3PzGsArAGAOXPmsC6Nh0erohkkKBsmA406JGVmOvuF6YTDyYY8mcVvARwt/T4quKdLs4eISgAO\nhTB0hzgXwA9ypNHDY1KjmVfoISYDjVlCre9kZJY65MksfgngeCI6FoIpnAvgE0qa2wEsAbAJwNkA\n7g/tFURUAPBxyA05dgAABsVJREFUAO/PkUYPDw+PXNEqzDI3ZhHYIJYBuAfCM2otMz9FRF+BsL7f\nDuAmAP9ORNsB/B8EQwkxF8DzoYHcw8PDw6NxoFrHo8mLOXPm8ObNmxtNhoeHh8ekAhE9ysxzbOkK\n9SDGw8PDw2NywzMLDw8PDw8rPLPw8PDw8LDCMwsPDw8PDytaxsBNRC8CeC7h40egdtf4VICv89SA\nr/PUQJo6H8PMr7clahlmkQZEtNnFG6CV4Os8NeDrPDVQjzp7NZSHh4eHhxWeWXh4eHh4WOGZhcCa\nRhPQAPg6Tw34Ok8N5F5nb7Pw8PDw8LDCSxYeHh4eHlZ4ZuHh4eHhYcWUYBZEtJaI9hHRk9K9w4no\nF0T0bPB9WHCfiOgaItpORFuJ6OTGUZ4MRHQ0EW0goqeJ6CkiujS438p1nkZEjxDRE0GdrwzuH0tE\nDwd1+yERtQf3O4Lf24P/ZzaS/jQgoiIRbSGiO4LfLV1nItpFRNuI6HEi2hzca9m+DQBE9Doi+gkR\n/Q8RPUNE3fWu85RgFgD+DcDpyr3LAdzHzMcDuC/4DQAfBnB88OkFcEOdaMwSowA+z8yzALwXwCVE\nNAutXecDAD7IzO8GcBKA04novQC+AeA7zPw2AL8HsDRIvxTA74P73wnSTVZcCuAZ6fdUqPMHmPkk\naW9BK/dtALgawN3M/A4A74Z43/WtMzNPiQ+AmQCelH7/CsCRwfWRAH4VXA8COE+XbrJ+APwngA9N\nlToDeA2AxyDOfH8JQCm43w3gnuD6HgDdwXUpSEeNpj1BXY8KJooPArgDAE2BOu8CcIRyr2X7NsQJ\nor9R31W96zxVJAsd3sjMLwTXewG8Mbh+C4DnpXR7gnuTEoGqYTaAh9HidQ7UMY8D2AfgFwB2ANjP\nzKNBErlelToH/78MoLO+FGeC1QD6AYwHvzvR+nVmAPcS0aNE1Bvca+W+fSyAFwHcHKgbbySi16LO\ndZ7KzKICFuy35XyIiegQALcBWM7Mf5D/a8U6M/MYM58EsdruAvCOBpOUK4joDAD7mPnRRtNSZ7yP\nmU+GULdcQkRz5T9bsG+XAJwM4AZmng3gVVRVTgDqU+epzCz+l4iOBIDge19w/7cAjpbSHRXcm1Qg\nojYIRvF9Zv5pcLul6xyCmfcD2AChgnkdEYXHB8v1qtQ5+P9QAMN1JjUtTgXwMSLaBeBWCFXU1Wjt\nOoOZfxt87wOwDmJh0Mp9ew+APcz8cPD7JxDMo651nsrM4nYAS4LrJRB6/fD+4sCj4L0AXpZEvUkB\nIiKI882fYeZvS3+1cp1fT0SvC66nQ9honoFgGmcHydQ6h21xNoD7g9XZpAEzr2Dmo5h5JsT59fcz\n8yfRwnUmotcS0V+E1wB6ADyJFu7bzLwXwPNE9Pbg1gIAT6PedW608aZOBqIfAHgBwAgEl14Koau9\nD8CzANYDODxISwCuh9B3bwMwp9H0J6jv+yBE0q0AHg8+H2nxOp8IYEtQ5ycBfDm4fxyARwBsB/Bj\nAB3B/WnB7+3B/8c1ug4p6z8fwB2tXuegbk8En6cAXBHcb9m+HdTjJACbg/79MwCH1bvOPtyHh4eH\nh4cVU1kN5eHh4eHhCM8sPDw8PDys8MzCw8PDw8MKzyw8PDw8PKzwzMLDw8PDwwrPLDw8LCCisSDC\nafi53P6Uc94zSYqG7OHRrCjZk3h4THn8iUUYEQ+PKQsvWXh4JERwrsI/B2crPEJEbwvuzySi+4Oz\nBO4johnB/TcS0ToSZ248QUR/E2RVJKJ/JXEOx73BDnQQ0T+SOJNkKxHd2qBqengA8MzCw8MF0xU1\n1DnSfy8z8wkAroOIAAsA1wK4hZlPBPB9ANcE968B8ACLMzdOhtiBDIhzB65n5ncB2A9gUXD/cgCz\ng3z68qqch4cL/A5uDw8LiOgVZj5Ec38XxIFLO4PAjXuZuZOIXoI4P2AkuP8CMx9BRC8COIqZD0h5\nzATwCxYH2ICIvgCgjZmvIqK7AbwCEd7hZ8z8Ss5V9fAwwksWHh7pwIbrODggXY+hakv8KESMn5MB\n/FKKJOvhUXd4ZuHhkQ7nSN+bgusyRBRYAPgkgIeC6/sAfBaoHNR0qClTIioAOJqZNwD4AkQ48QnS\njYdHveBXKh4edkwPTuALcTczh+6zhxHRVgjp4Lzg3ucgTjW7DOKEs/OD+5cCWENESyEkiM9CREPW\noQjgewFDIQDXsDinw8OjIfA2Cw+PhAhsFnOY+aVG0+LhkTe8GsrDw8PDwwovWXh4eHh4WOElCw8P\nDw8PKzyz8PDw8PCwwjMLDw8PDw8rPLPw8PDw8LDCMwsPDw8PDyv+H54gjB3Fee3GAAAAAElFTkSu\nQmCC\n",
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAEWCAYAAACXGLsWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsnXl4VNX5+D/vnUkCiEgbqygJBNEq\naGSLSER0EBekqFi0xS2gVuraYhe/ri2tVqxVGwW1ioqkKrjwE1dwASJb2JeioAIaSEQRo7gBSWbu\n+f1xl8xMZpJJMjOZCefzPHkyM3c7995z3ve8yzlHlFJoNBqNRtMQRmsXQKPRaDSpj1YWGo1Go2kU\nrSw0Go1G0yhaWWg0Go2mUbSy0Gg0Gk2jaGWh0Wg0mkbRyiLFERGPiPwgIt3iuW9rIiJHikjcc7ZF\n5HQRKQ/6/pGIDIll32Zc6wkRubW5x7c1RKRSRHxxPuczIjIxnufUNB9vaxegrSEiPwR97QBUAwH7\n+2+VUs825XxKqQDQMd777g8opY6Ox3lE5DfApUopX9C5fxOPc2vig4g8A2xRSk1s7bK0VbSyiDNK\nKVdY2z3X3yil3o22v4h4lVL+ZJRNo9G0nEhttqntOB3bvXZDJRkRuUtEnheRGSLyPXCpiBSKyDIR\n2S0in4vIQyKSYe/vFRElInn292fs7XNE5HsRKRORHk3d195+toh8LCLfishkEVkiIuOilDuWMv5W\nRLaIyDci8lDQsR4R+beIVInIJ8DwBp7PbSIyM+y3h0XkAfvzb0Rkk30/W+1ef7Rzua4REekgIv+1\ny/YBMCBs39tF5BP7vB+IyLn27/nAFGCI7eL7KujZTgw6/mr73qtEZLaIHBbLs4lQ5rtEZKZdP34Q\nkfUi0tMu3y4R2S4ipwft31lEptnvpFJE/i4ihr3tKBFZICJfi8hX9v0fFPZ8/iAiG+w6MENEsqKU\nq8Fz2Zxov5tvRORJ51wicoiIvGnXna9FZGHQeY8VkffsbRtE5BdRrv8bESkN+u7WdRG5Fvg1cKv9\nzF6298kRkZft5/apiFzXwHNvJyIPiEiFiOwUkUdEpJ297XQRKReRW0XkC2BqpN/sfRurB9eKyBbg\nw2hlSVmUUvovQX9AOXB62G93ATXAOVjKuj1wAnAilqV3BPAxcL29vxdQQJ79/RngK6AAyACeB55p\nxr6HAN8D59nb/gDUAuOi3EssZXwFOAjIA7527h24HvgAyAGygYVW1Yt4nSOAH4ADgs79JVBgfz/H\n3keA04C9wPH2ttOB8qBzVQI++/N9QCnwE6A7sDFs318Bh9nv5GK7DIfa234DlIaV8xlgov35TLuM\nfYF2wCPA/FieTYT7v8u+p9PtY58DPgVutr9fA2wO2v81+3odgEOB1cCV9rafA8OATPt9LwHuC3s+\ny4Au9nv5GMsSjlSuWM71P/sdH2yf13k+/8JSuBn28afYv2fa93aTve10+7kfGeEZh7wDItf1iUHb\nDWAdcKt9nSOx2uOwKPc3GXjZrh+dgDeBO4PqlR+42z5X+yi/xVIP5trXaN/a8qnJ8qy1C9CW/4iu\nLOY3ctyfgBftz5EaxX+C9j0XeL8Z+14BLAraJsDnRFEWMZZxUND2/wf8yf68kCAhBIwgirKwty8D\nLrY/nw181MC+rwPX2Z8bUhbbg98FcG3wvhHO+z7wC/tzY8piOnB30LZOWHGqnMaeTYTr3gXMCfp+\nPvAtYNjff2KfryPQFUuxZAXtfxnwTpRzXwCsDHs+Y4K+PwBMifH9RzpX8Ds+13lvWAL1/wE9w84x\nFPgMkKDfXgRuj/CMm6osBgOfhF3vDmBqhHsxgH1A96DfhmArZbte7QMyg7ZH+i2WenBKLM83Ff90\nzKJ1qAj+IiLHAPdjuUY6YFWs5Q0c/0XQ5z00HNSOtu/hweVQSikRqYx2khjLGNO1gG0NlBes3vRF\n9v+L7f9OOUZiNfqjsBp5B2BlI+cDy2qIWgax3G83Ylkd2GU/OIbzgnV/S50vSqnvROQbLGHuPJOm\nvLOdQZ/3AruUUmbQd6d83YEsYKeIOPsbWJ0URKQL8BCW4DzQ3rYr7Frh5fpppALFeK7w53u4/fke\n4G/APBEJYHVg/mVv365syRp0XNdIZWgi3YFuIrI76DcPlnUZThes57g+6DlK2D47lVI1jfwWSz0I\nafvphI5ZtA7haaOPYfVkj1RKdQL+Qv3KGm8+x+rxACBWK2mokbakjJ8DuUHfG0vtfQE4XUS6YrnJ\nnrPL2B54CZiE5SLqDLwdYzm+iFYGETkCeBTLxZNtn/fDoPM2lua7gzolg4gciGUBfBZDuVpCBbaA\nV0p1tv86KaWOt7f/EysbL99+Z+Nofr2K5Vzhz3cHWEJTKXWjUioPGAX8n4icam/PlSAJbR8X6bn9\niNUxcOgStj38HVVgWQadg/4OVEqdE+HcO7Fcw0cH7XuQUio4JhOpDoT/Fks9SNtpvrWySA0OxHI1\n/CgivYDfJuGarwP9ReQcEfECvwd+lqAyvgBMEJGuIpIN/F9DOyulvgAWA09juTI225uysPzDu4CA\nbWUMa0IZbrUDwt2w4igOHbEa8S4svXkVcEzQ9p1AjtgB/QjMAK4UkePtoO4kLBdfVEstHiilKoD3\ngPtEpJOIGGKNYTnF3uVALCH7rYjkYrkOm0ss57o+6B3fghUjw65jPW2l8C2Wa8bE6oX7gT+KSIaI\nnIblonw+wrnXA8eLSL7dafhr2PadWLEshzKgRkT+aAevPfaxA8KOQ1kp508AxSLyM7HIEZEzY3w2\nDq1SD5KFVhapwR+BsVgB58eI3FjiilJqJ1YGyQNAFdATWIvVe4x3GR8F5gEbsFxGL8VwzHNYfmHX\nBaWU2o3lKnoZK0h8AZbSi4W/Ylk45cAcoCTovP/DCnCusPc5mlAX2zvAZix3T7Dbxjl+LvB3u1yf\nY/WOL4mxXC3lUuAArID9N1g+f6fX/VdgIJaAfhWY1YLrxHKuGcC7wFbgI6xYBVjPcz5W8HoJ8KBS\napFSqhorYeE8rESMh7BiVZvDT6yU2mifr9Q+98KwXZ4A+tiZWC8pKy11hF3mcvv8j2HFESLxRywX\n2Ar7Ht/GcnXGTCvXg4Qjoe5Czf6KiHiwzOgLlFKLWrs8Go0mtdCWxX6MiAy33TJZWEHjWqyelUaj\n0YSglcX+zcnAJ1i++rOA823XgEaj0YSg3VAajUajaRRtWWg0Go2mUdrMoLyDDz5Y5eXltXYxNBqN\nJq1YvXr1V0qphtLmgTakLPLy8li1alVrF0Oj0WjSChFpbEYFQLuhNBqNRhMDWlloNBqNplG0stBo\nNBpNo7SZmIVGo0kOtbW1VFZWsm/fvtYuiqYJtGvXjpycHDIyok1x1jBaWWg0miZRWVnJgQceSF5e\nHqETxmpSFaUUVVVVVFZW0qNHj8YPiIB2Q2k0miaxb98+srOztaJII0SE7OzsFlmDWlmkOWVlMGmS\n9V+jSRZaUaQfLX1n2g2VxpSVwbBhUFMDmZkwbx4UFrZ2qTQaTVtEWxZpTGmppSgCAet/aWlrl0ij\nSTxVVVX07duXvn370qVLF7p27ep+r6kJX/k0MpdffjkfffRRg/s8/PDDPPvss/EoMieffHK9WMHI\nkSPp3LlzyG/33XcfHTp04Pvvv3d/e/fddznooIPce+zbty8LFiyIS7magrYs0hifz7IoHMvC52vt\nEmk0iSc7O5t169YBMHHiRDp27Mif/hS6cJ9SCqUUhhG5Pzxt2rRGr3Pddde1vLBBHHjggSxbtoxB\ngwbx9ddfs3Pnznr7zJgxgwEDBjB79mwuu+wy9/ehQ4cye/bsuJanqWjLIo0pLLRcT3feqV1QmtSm\nrKKMSYsmUVaRuODali1b6N27N5dccgnHHnssn3/+OePHj6egoIBjjz2Wv//97+6+J598MuvWrcPv\n99O5c2duvvlm+vTpQ2FhIV9++SUAt99+O8XFxe7+N998MwMHDuToo49m6dKlAPz444+MHj2a3r17\nc8EFF1BQUOAqsnDGjBnDzJkzAXjppZe44IILQrZ//PHH+P1+Jk6cyIwZM+L+fFqKVhZpTmEh3HKL\nVhSa1KWsooxhJcO4Y8EdDCsZllCF8eGHH3LjjTeyceNGunbtyj333MOqVatYv34977zzDhs3bqx3\nzLfffsupp57K+vXrKSws5Kmnnop4bqUUK1as4F//+pereCZPnkyXLl3YuHEjd9xxB2vXro1atjPO\nOIP58+djmibPP/88v/71r0O2z5gxgzFjxuDz+Xj//ff56quv3G0LFiwIcUOVl5c34+m0DK0sNBpN\nQiktL6UmUENABagJ1FBaXpqwa/Xs2ZOCggL3+4wZM+jfvz/9+/dn06ZNEZVF+/btOfvsswEYMGBA\nVEH8y1/+st4+ixcvZsyYMQD06dOHY489NmrZMjIyGDRoEDNnziQQCJCTkxOyfebMmYwZMwaPx8Oo\nUaN46aW6peqHDh3KunXr3L/WmGFbxyw0Gk1C8eX5yPRkUhOoIdOTiS/Pl7BrHXDAAe7nzZs38+CD\nD7JixQo6d+7MpZdeGnGcQWZmpvvZ4/Hg9/sjnjsrK6vRfRpjzJgxXHjhhdx1110hv69du5ZPPvmE\noUOHAlBdXc3Pf/5zrr766mZdJxFoy0Kj0SSUwtxC5hXN486hdzKvaB6FucnxmX733XcceOCBdOrU\nic8//5y33nor7tcYPHgwL7zwAgAbNmyIaLkE4/P5uPnmmyO6oO666y7Ky8spLy9nx44dfPrpp1RW\nVsa9zM1FWxYajSbhFOYWJk1JOPTv35/evXtzzDHH0L17dwYPHhz3a9xwww0UFRXRu3dv9++ggw6K\nur9hGPz5z38GcK0TpRTPP/888+bNc/cTEUaNGsXzzz9Pnz593JiFw1//+lfOP//8uN9PQyR0DW4R\nGQ48CHiAJ5RS94RtPwUoBo4HxiilXrJ/7ws8CnQCAsA/lFLPN3StgoICpRc/0mgSz6ZNm+jVq1dr\nFyMl8Pv9+P1+2rVrx+bNmznzzDPZvHkzXm9q9sMjvTsRWa2UKohyiEvC7khEPMDDwBlAJbBSRF5V\nSgXbaduBccCfwg7fAxQppTaLyOHAahF5Sym1O1Hl1Wg0mqbyww8/MGzYMPx+P0opHnvssZRVFC0l\nkXc1ENiilPoEQERmAucBrrJQSpXb28zgA5VSHwd93iEiXwI/A7Sy0Gg0KUPnzp1ZvXp1axcjKSQy\nwN0VqAj6Xmn/1iREZCCQCWyNsG28iKwSkVW7du1qdkE1Go1G0zApnQ0lIocB/wUuV0qZ4duVUo8r\npQqUUgU/+9nPkl9AjUaj2U9IpLL4DMgN+p5j/xYTItIJeAO4TSm1LM5l02g0Gk0TSKSyWAkcJSI9\nRCQTGAO8GsuB9v4vAyVOhpRGo9FoWo+EKQullB+4HngL2AS8oJT6QET+LiLnAojICSJSCVwIPCYi\nH9iH/wo4BRgnIuvsv74RLqPRaPYzhg4dWm+AXXFxMddcc02Dx3Xs2BGAHTt21JvEz8Hn89FYCn5x\ncTF79uxxv48YMYLdu1ueezNx4kREhC1btoRcS0RCyrRu3TpEhLlz54Yc7/F4QuaPuueekJEKLSah\nMQul1JtKqZ8rpXoqpf5h//YXpdSr9ueVSqkcpdQBSqlspdSx9u/PKKUylFJ9g/4iT+Wo0Wj2Ky66\n6CJ39laHmTNnctFFF8V0/OGHHx4y71JTCVcWb775Zr11KZpLfn5+yL29+OKL9eabmjFjBieffHK9\nmWnbt28fMn/UzTffHJcyOaR0gFuj0bQN4rn87wUXXMAbb7zhLnTkTI8xZMgQd9xD//79yc/P55VX\nXql3fHl5OccddxwAe/fuZcyYMfTq1Yvzzz+fvXv3uvtdc8017vTmf/3rXwF46KGH2LFjB0OHDnXn\nccrLy3NniH3ggQc47rjjOO6449zpzcvLy+nVqxdXXXUVxx57LGeeeWbIdYIZNWqUW+atW7dy0EEH\ncfDBB7vblVK8+OKLPP3007zzzjstWlO7qWhlodFoEoqz/O8dd1j/W6owfvrTnzJw4EDmzJkDWFbF\nr371K0SEdu3a8fLLL7NmzRoWLFjAH//4RxqapeLRRx+lQ4cObNq0ib/97W8hYyb+8Y9/sGrVKv73\nv//x3nvv8b///Y/f/e53HH744SxYsKDeanWrV69m2rRpLF++nGXLljF16lR3yvLNmzdz3XXX8cEH\nH9C5c2dmzZoVsTydOnUiNzeX999/n5kzZ9abQ2rp0qX06NGDnj174vP5eOONN9xte/fuDXFDPf98\ng5NeNBmtLDQaTUJJxPK/wa6oYBeUUopbb72V448/ntNPP53PPvss4op0DgsXLuTSSy8F4Pjjj+f4\n4493t73wwgv079+ffv368cEHHzQ6SeDixYs5//zzOeCAA+jYsSO//OUvWbRoEQA9evRw53ZqaBp0\nqFskafbs2fXmf3LWvHD2C3ZFhbuhwhVNS2mb49I1Gk3KkIjlf8877zxuvPFG1qxZw549exgwYAAA\nzz77LLt27WL16tVkZGSQl5fXLFfNp59+yn333cfKlSv5yU9+wrhx41rk8nGmNwcrEB3NDQXW2tx/\n/vOfKSgooFOnTu7vgUCAWbNm8corr/CPf/wDpRRVVVV8//33HHjggc0uW6xoy0Kj0SSURCz/27Fj\nR4YOHcoVV1wREtj+9ttvOeSQQ8jIyGDBggVs27atwfOccsopPPfccwC8//77/O9//wOs6c0POOAA\nDjroIHbu3Om6vMBaS/v777+vd64hQ4Ywe/Zs9uzZw48//sjLL7/MkCFDmnxvHTp04J///Ce33XZb\nyO/z5s3j+OOPp6KigvLycrZt28bo0aN5+eWXm3yN5qAtC41Gk3AKC+O/9O9FF13E+eefH5I9dMkl\nl3DOOeeQn59PQUEBxxxzTIPnuOaaa7j88svp1asXvXr1ci2UPn360K9fP4455hhyc3NDpjcfP348\nw4cPd2MXDv3792fcuHEMHDgQgN/85jf069evWUugOq6mYGbMmFHPLTV69GgeffRRioqK3JiFw/Dh\nw+OaPpvQKcqTiZ6iXKNJDnqK8vSlJVOUazeURqPRaBpFKwuNRqPRNIpWFhqNpsm0Fff1/kRL35lW\nFhqNpkm0a9eOqqoqrTDSCCfNtl27ds0+h86G0mg0TSInJ4fKykr0gmPpRbt27cjJyWn28VpZaDSa\nJpGRkUGPHj1auxiaJKPdUBqNRqNpFK0sNBqNRtMoWlloNBqNplG0stBoNBpNo2hl0QDxXLBFo9Fo\n0hmdDRUFZ8EWZ1rleM2WqdFoNOmItiyikIgFWzQajSZd0coiCs6CLR5P/BZs0WjSEe2O1YB2Q0XF\nWbCltNRSFNoFpdkf0e5YjYNWFg2QiAVbNJp0IpI7VreJ/RPthtJoNFHR7liNg7YsNBpNVLQ7VuOg\nlYVGo2kQ7Y7VgHZDaTQajSYGtLLQaDQaTaNoZaHRaDSaRtHKQqPRaDSNopWFRqPRJIl0Hg2vs6E0\nGo0mCaT7aHhtWWg0Gk0SSPfJSbWy0Gg0miSQ7qPhtRuqEcrK9OhVjUbTctJ9NLxWFg2Q7j7GRKCV\np0bTfNJ5NLxWFg2QzBk300EIa+UZP+L5vtOh7mjSn4QqCxEZDjwIeIAnlFL3hG0/BSgGjgfGKKVe\nCto2FxgELFZKjUxkOaPh+Bgd4ZgoH2O6CGE9XXV8iOf7Tpe6o0l/EhbgFhEP8DBwNtAbuEhEeoft\nth0YBzwX4RT/Ai5LVPliwfEx3nlnYhthumRJpHuALlVo6vtuKDc/XeqOJv1JpGUxENiilPoEQERm\nAucBG50dlFLl9jYz/GCl1DwR8SWwfDGRDB9jsiyYlpLuAbpUoSnvuzHLIV3qjib9SaSy6ApUBH2v\nBE6M5wVEZDwwHqBbt27xPHVSSSchnM4BulShKe+7MddfOtUdTXqT1gFupdTjwOMABQUFKl7nbY2A\noRbC+xexvu9YLAdddzTJIJHK4jMgN+h7jv1byhGsHEAHDDWpg7YcNKlCIpXFSuAoEemBpSTGABcn\n8HrNwvEJV1eDYcDIkTrjR5NaaMtBkwokLBtKKeUHrgfeAjYBLyilPhCRv4vIuQAicoKIVAIXAo+J\nyAfO8SKyCHgRGCYilSJyViLKWVpqKQrTBL8fXnsNvF6d8aPRaDTBJDRmoZR6E3gz7Le/BH1eieWe\ninTskESWzcHnsywK03SuC5dfDt26abNfo9FoHNI6wB0PCgvh4Yfh+ust11NWFhQVaSWh0Wg0wez3\nygJg/HjIz0//IKKe9kGj0SQKrSxs0j2IqKd90Gg0iUSvZ9FG0NM+aDSaRKKVRRtBz9uk0WgSiXZD\npQgtjTfowVsajSaRaGWRAsQr3pDucReNRpO6aDdUCqDjDRqNJtXRyiIF0PEGjWb/oqE1SlIV7YZK\nAXS8QaPZf0jXNHetLFIEHW/QaPYP0nV5Yu2G0mg0miSSrm5nbVloNBpNEklXt7NWFhqNRpNk0tHt\nrN1QmoSTjpkfGo0mFG1Z7MckY5badM380ITS1LqiZ0Bue2hlsZ+SLCGerpkfmjqaWld0B6Ftot1Q\n+ynJGjWerpkfmjqaWlf0jARtE21Z7Kc4Qtzp/SVKiKdr5oemjqbWlWTVLU1yEaVUa5chLhQUFKhV\nq1a1djHSCu1Xbjsk+l3qmEXbRURWK6UKGt1PKwuNJr3RMQJNS4hVWWg3FFBWUUZpeSm+PB9UFuoe\nkSat0EkEmmSw3yuLsooyhpUMoyZQg+ezk5GSefhrPbqHhnYlpAs6RqBJBvu9sigtL6XaX42JSWDr\nSUgNKDN1e2jJEuDatZE+6CQCTTLY75VFdodsTEzrS94ClFGNIe3JzJSU66ElU4CnimtDWzexkY7T\nR2jSi/1+nEXVnqq6L7nLYOwwCi55JSV70snMX0+F8RGOcrzjDuu/ni5Eo2k99ntl4cvz4RFP3Q+5\ny+h/4VsppygguQLccW3ceWfruaD04K7ko+fx0kRjv3dDFeYW8sgvHuHaN67FVCYZngyK+hS1drEi\nkmzfdKJdG425mHTgNrnoOJWmIRpUFiLSSSn1XZRt3ZRS2xNTrOQyfsB48g/Jd9NnC3NTt4W0Fd90\nLIJJB26TS6rEqTSpSWOWRSnQH0BE5imlhgVtm+1sawsU5ha6SsLp8WZnQ1VV/AWVDtrGLpjainJM\nB7Qlp2mIxpSFBH3+aQPb0ppg4Q1Wj7e6GkwTDAOyskJ7vi0R9vuLqd8cF5NWonUk4lk0dk5tyWka\nojFloaJ8jvQ9LQkX3mPHWp9NO5vWDBtz0VJhvz+Y+s1xMcH+oURjIREdiljP2dYsOd0BiR+NKYtD\nROQPWFaE8xn7+88SWrIkES68wWpMwZZFsEneUmG/P5j6zXExTZrU9pRocwVVIjoUDZ2zrQrUlird\ntvpcmktjymIqcGCEzwBPJKRESSZceBcVWX/RYhYtFfb7g6nfnGeUTCWa6isEJuJZRDtnW3aLtkTp\ntuXn0lwaVBZKqb9F2yYiJ8S/OMknWHhn99pASdUjABRdWlQvK8oRMsXFsHZty67ZliteLAoxXGAn\nS4mmwwqBiXgW0c7Zlt2iLVG6bfm5NBulVMx/QG/gTmALsKopxyb6b8CAAaolLN2+VGXemamYiGIi\nKuvOLLV0+9K67UuVat9eKY9HqcxMpbKyrM/t21vbNLET/CyT/fzuvtu6Llj/7747Mddpyj0uXWqV\nozXqUWu+i2TQ3Gfb1p9LMLHK8kYH5YlIHnCR/VcLdAcKlFLliVFfrUNpeSm1gVr3e02ghtLyUte6\nCO5pOMFvpXSvozm0Zq8t1VYIbG13R1t3izbXim/rz6U5NDYorwzoBMwERiulNovIp7EqChEZDjwI\neIAnlFL3hG0/BSgGjgfGKKVeCto2Frjd/nqXUmp6bLfUPHx5PjI8GdQErCh3pifTWt/C2e6rEzIe\nD4iA3992g9QtpaG4QGsG+ZMpBGIRVKng7mjrbtHmop9LKI1ZFjuBrsChWNlPm4kxZVZEPMDDwBlA\nJbBSRF5VSm0M2m07MA74U9ixPwX+ChTY11ttH/tNLNduDoW5hZSOLaVkfQkARX2smEWw0AtP9Wyp\nwGmr2RaN9ZZbu9eWSkJgf8iO07QNGgtwjxKRg4BfAhNF5Cigs4gMVEqtaOTcA4EtSqlPAERkJnAe\n4CoLx0IRETPs2LOAd5RSX9vb3wGGAzNivbHm4IzidlbO27C6IxMuzg8RerfcErR/CwROa7sfEkks\nveXWENipqJxbW3EGk4rPR5M6NBqzUEp9C0wDponIocCvgH/bc0PlNnBoV6Ai6HslcGKM5Yp0bNfw\nnURkPDAeoFu3bjGeumGCV86TxXsxa47DDEjcXQRNdT+kU0NOxd5yKijnaO8wFSydVHg+bZV0arsN\n0aRZZ5VSO4HJwGQR6Z6YIjWpPI8DjwMUFBS0aES580K3d95MTaCGgApgdJ+Px3sHQkbchV5TBGq6\nNeRU6i07tHZsINXfYWs/n7ZKqr/3ptBYgPvVRo4/t4FtnwHBlkeO/VssfAb4wo4tjfHYJhP8QjEu\nxuyzFw5bjbGvCzdO3E5n1TPuQq8pAjUdG3Iq9JaDaW1rJ9XfYWs/n1Qh3lZAqr/3ptCYZVGI5Q6a\nASynaZMHrgSOEpEeWMJ/DHBxjMe+BdwtIj+xv58J3NLA/i0i+IUS8MCq3wDjCYhi8mKD4uK6hXfi\nrTAaSql0Kq1uyC0nntZOcwRKvN5holwaqWgNBpPqo+6j0ababkODMLBSXocD04G1wF3AsbEM4LCP\nHwF8DGwFbrN/+ztwrv35BKx4xI9AFfBB0LFXYA3+2wJc3ti1WjIozxmAgwQUmMoaQWH/F7/yeANJ\nHZwTaUBQaw7c0tTRksFaLX2H+9NAsWCSdd+JGrCZ6m2XeAzKU0oFgLnAXBHJwhqYVyoif1NKTYlB\nEb0JvBn221+CPq/EcjFFOvYp4KnGrhEPnF7VvQ/vZPbMn0DAC3hA/CAmgYABykiaGRnJdL3lltTr\n7aUCyQ4etnQaj5aUsS25NJpCsu47UVZAqrlkm0ssI7izgF9gKYo84CHg5cQWK/kUFsLLhYfx+AUb\nmDWnii/Njaz7tALafwVzH0Q0g56yAAAgAElEQVRMITPTkxQzsk2ZrgmkNYKHrflusrOtWZCVSs16\nkSjFnWqj7vdXGgtwlwDHYVkHf1NKvZ+UUrUi40flM34UXPP686xb/R/rx0Pf54Ta/6P46lEJrUDR\nBgCma6VNdK+/NXrarSVQyspgwgTrXg3DmswylepFIhV3qo26319pzLK4FCue8HvgdyJufFsApZTq\nlMCytSr9DuuHRzyYysTbfTX9+70FOYdixfzjT6TGFjwAMN2IVXi0RKG0Vi+/NQSKoxhN05pqpqoq\nuddvjEQr7v1NiKfi2IzGYhZGsgqSSpRVlDFh7gSUUhhiPYKpa6Yyff105hXNqzd1eYuu5Yzv2N76\n4wCSnTLY0t7o/uQ2SHXXZKqXL51I1bEZTRqUt79QWl5KTaAGExNRgqlMFKreTLQtJbhSeDzgtd9G\nshtba6UMxqM3ur/0OFNdMaZ6+dKJVE1k0MoiCKd3nd1rJJmeO60pP0Ss1DFUvZloW0rI+A7gqqug\nW7fkN7ZEVM5YhIfujTaNVFeMqV6+dCFV24VWFjbBvWtvRm/OvnMi5JQxZ8scas1aDMOgeHhxXF1Q\nkZZ0bY3G1lopg/tbbzQV/dCa1CNV24VWFjbBveuAafLK3G/xnPo6pjJdd1TVnvhGFVOlUjjlKClp\nnWunQmNItCBPVT/0/ka6KOxUaRfBaGVh4/Su91UHUEYtKm8+pmniMTwIEncXlEMqVYrp0y1hNn16\nagizWBt2SwVAMgR5qvqh05XmvHOtsFuGVhY2bu96diVP7R5LoOtKMj1ZFA8vpmpPFb48X1xdUK1F\ntEaWasKsKam3LRUAybj3VPVDB9cHSI9ed3PfearV8XRDK4sgrF5+d4oqJlFaXtpmFIRDQ40s1YRZ\nrA27JQLATWjITvy9p4rLMZjwbLzgpYJTudfd3HeeanU83dDKIgLOinltjYYaWaoJs1gbdnMFQLji\nLC62Brol8t5TyeUIofXBtNeqVCr1e93NfeepVsfTDa0s9iMaa2SpJMxiadiOZdAcQR+uOKuq0nvE\nfHMIrg/hlkUq97pbIvRTqY6nG1pZ7EcEN7Ls7MSs0RFPGmrYLY1VhAvK7dutc6bqs0gE4UIX0qfX\nnWihny5ZU8lEK4v9DKfip3tWSDSXWqyNPDhdeNo0mDo1chZYOgqNppQ5XOi2xj2m2jPWWVOR0cqi\nEcoqylo92B2vxpTseagSKQQiudSa2sgLC63y+f2Rn0WqCY1Ynmeqlbkx4lneeNU3nTUVGa0sGqCs\nooxhJcOoCdSQ6cmM+ySCjV6/rK7n29IslWTPQ9WYEIjWsJtqGQTvO2lS0xt5Q3GcVBIasQrVVCpz\nLMSrvPFUOuF1IjvbqlupYvm0FlpZNIAzoWBABZo0iWA8ejhO5d+3z8pQgZY1puBGCYmfhypcCJSU\nhPrGIzXs5lgGwdubkyXTULA0mvXixHwSnT0VTKxCNd3SQ+NV3ngqyfDY3oQJsdfJVHOpxROtLBrA\nl+cj05PpWha+PF+jlSFePRyn8juKQqRljSnZ81CFB5CDraOxYyM37JY2+OZmyUQLlkYKAA8bBtXV\nVqqpYUBWlrUPJFZIxCpUG3sGyRoVHyvxSmeNt5J06kRTrNV0cwE2mVgW6k6HvwEDBrRgyfLoLN2+\nVN298G61dPtStXSpUlnt/EqMgMpq54+4AHu8Fn1/7DHreBGlMjKUuvrqli/4nuyF453rXX116DO5\n+mql2re3Prdvb+23dKn1e1ZW6O/xLktLzhn8bp2/aPeTCFp6D0uXxlbOWPdLNRJRv5vyLOLV9pMN\nsErFIGNbXcjH6y9RyiKYq28qV0itJSikRg0selkt3R5ae+LR0JYuVSozs04gZWSkT4ONRKRnEtyw\ng7dnZsZHMTZ2/ZacxzCs92IY1vdwZZiqQiJWYRa8n2EodeaZ6V3/WkqsSihdlWysykK7oWLAyYj6\n4mce8FwPAQWeWlZk3MuwknUhge94mNWlpVBbW/fd70/9QGVDRHsmzv9gUx+sWEo87zVe/uxwX7YT\ns4C6SRhTOU7Q1FHxjrvt3Xdh0aI26FaJkVjHdDTU9iO59dItvqGVRSMEZ0R5DA8Zl79B7SeDIW8B\n5C5jn18oWV8S93UuMjKsRg2pLYBipaEGl+igbDzPH+0+Um0aiUiCKNaOjLPfxImWojDN9MisipVE\nCulI9SNSLAPSL76hlUUjBGdEqYCi4IR9HH7aJt7YvJpaExSKJ9c+SVGfIgpzC+MS5HKCvc76Eq21\nKFIsxKPhxSvI2Vrnd64R63lbc+2MpvSSJ060LIpUt5iaQmsEoSNZtpBeKc6glUWjOBlR1f5qTExW\nfb6KrC+zKMwtZOG2hQDUmrWudRFPl0dz882T1cONZ8Nr7v22xvlb8ozTae2MZCjZZJPscShlZdYg\n2Ejjmppq7ba220ori0YozC1kXtE8JsydwModKzGVyV7/Xjbt2hRx/4Zy80P8lQkYGZ7sXlO6DACL\nZyNr6TNu7JnFo6zJcLulK4lweTY0wDR4IOxVV4V6CZqiiFMhLVcrixhZt3MdCuV+37VnF4C7il6/\nw/oxadEkfHk+5s0rbHAAGjmJGRkeLIiqqy03wsSJ8a1UwQ0jHQaAxbuRtVRBNvTM4lXWtmgRxIt4\nP5uG3llwXYH6iRtNUcSp0DHTyiIGSstLCZiBiNvOOOIMRvcezYS5E6gu74+xbS8PX9uRW27JB+Ca\n/9vGvupclGnU+StPbvrI8Fh6nNGyWOK1VkOkhhE+aC3VpkWIdyNrqYJsSFjFs6xtzSKIJ/F8Ng29\ns3h2plKhY6aVRQz48nx4DA+BQKjCyPJkMdE3kdLyUqrL+2M+/TZmIJPrFyryFwA5ZTzx0X9RFIMY\neDMEn88DOfVHhjdErD3OSFks1dVw/fXW55b2rCM1jFtusc6XCmZyOA35i5tzLkfAN9V9ECkrKdJx\nqSAQotHa/vJItHaZGqtf8bRiUsJajGUwRjr8JXpQ3tWvXa1koigmopiIGvj4QGtU9/al6urXrlae\n0293B+wZnoA6c/wCNepf/1R4f1RQqzCq1ag/veGezznu6teurjewL5ymjgwNHhzk9dYNImvpgLGG\nBh0lY/RqU0boxnOgX3MHWzXnuGSPso+Fxx6zBoY6gxBToWzJHAAX6Z0keiBpMkEPyosv/fzX4lny\nMwLtduLZdyhXXnsh8IMbeyCvEMN7GyogmEY175q3I2/5IJAJeMH0s2NFYcgCO9PWTaMmUMO0ddNY\nMHYBQMSgd1N7nOGDx4InQmtpsLMpk+7Fk6ZaLo35i5tCc91DzQlmp5r7qKwMrrvOGhgKlqWaCokM\nyfLhR6t38axf6YJWFjFQVgYTLs4nUH0cyoSAAb9bZNLnz3+k2luNqUzIWQxFp2FsG4rqvgAzpwxR\nCk/GXwjUBkB5WLm4M6cOrWXKzA9Z6y2hOlANQHWgmnuX3MtbW9+KGPRujgkaLHTy8+NnvkYTZok2\nk5siHFrifookwJurCOMZzG4tl0tpad363GBl9aSCeywZnZPS0uhrv6SyyzBRaGURAyUlzlThAoAy\nobraZOXSDqghJoJYmVK5ZajcZWBnTancpYz511SWP3M2W1floUyD2hq47pEXGXnFFyHX2PH9jgaD\n3g31OBsTJMnqrSbyOrE2zsbSFRsimgBvriKMVzA7GfGgaHXI57Nm1q2utmbZnTIlNXrQ8eychN97\nLGu/pEQMIcloZdEIZWXw1FN1U4UDiGGijFpU3gIMDAoOL2D9zvX4TT8iQsAMoFAIwgvf3kigzwzU\n2rchkAGeWszu8+nSMd8NcnvEg6+Hjw1fbogY9G5IGaRiYDkRxNo4W+IeaEiAN1cRxiOYnWiXS2Mj\nvlNJKIa3BUe4NzcLL9K9h9ehSGu/tHZwvTXQyqIRSkvrKo0InHcedPl5BU/tHovf9GAsuY0rr72Q\n/OE/UFpeSnaHbCuN1l8NAn7Tj8pZgow9A9k2FPJKycpbQ1Gff9HvsH5c/+b1BFSAycsnUzy8mKo9\nVSExi7IyGHpagJoaITNTsWC+J2oPdV+1ouj3n/LnW39k/Kj8pD+rlhIPC6kl7oFkuhaaIoQTXa7G\nlFGqxFESMcdSpHsPf97hlmlzO2jprmC0smiE8Ipz001QWNidTvc8yX235xFQBr9bZHL5A89SNNIH\nwFk9z+K1j1/DVCYKhSEG3rxVjDj9ELp0zKffYUWUlpey/dvtmMrEVCbV/mpmbZzFRN/EEPdTyext\nVFd3BeWhurqWCf95jeKcQ915qBzfvKkUyoQtq7rx21/VwAsbWk1hNKdRpMKAtGT3omMVwokuV7r4\n3yMJdqgbV9Sc4Hukew9PEHGuE2mwXayWXlvwAGhl0QiRGmpZGTxwR0+ccXrV1Yr/zPqQJ7/6DYYY\n1ARq3NHeBgZHZx/Nx1Uf8+rHr+I1vMg6wW/68RgevIYXFVCYmLz76bss2r6IeUVWl6mhadGLj13O\nhIvzXb9q16N3UPnhoaC84FfMmlPF+FF1gnt3l9ms8z7K6N6jyT8kP+5TjTz+OMyaBX37wuTJTW8U\n8XS1tKQnnOhedHN7l4ksV6q5mqIRSbBv2FAXgDdNS7g3hWj37vyPJOCbo1yTlb2VSBKqLERkOPAg\n4AGeUErdE7Y9CygBBgBVwK+VUuUikgk8BhQAJvB7pVRpIsvaEOENNTRDRIGYkLeAWrO23rEew8PH\nVR8TUJZmqSkfAOU+a4rzbiu5qv9VfPLNJ7z76buYyqQmUEPJ+hKmr58edVr0moCHWXOqQvyqBf0N\nKrfUgF+Bt5bRZ2fXreNdbaKMM2HsP3n7k99iiAFYgwqbMtVItPmsHn8cfvtb6/Pbb1vuOqWiN4pI\n54lX77Y5c24lYp6uiNdJ4d5lqriaGiKSYC8ttQLvzjK3VVXNO2+ke48m4JujXNPFemuIhCkLEfEA\nDwNnAJXAShF5VSm1MWi3K4FvlFJHisgY4J/Ar4GrAJRS+SJyCDBHRE5QSpmkAE6GyL59oDCh8H7I\nXVZvP0FcNxMAFYNg+rvW2AtPDXL5cIouLwJg0fZFbnAbcDOjMOGqc3sD3zBt3Vr8podMTyajz85m\n0X+tyufNCNBl8DvcNKwT65Z1ZvTZ2Ywfle8uKqRMA1SGpaRyl7nlqQ5UU1peyoYvNzBr4yxG9x7N\n+AHjI95z8Loe4am9s2aF3bdYDTdSo4h2nnj0bhsqYzyPaS7hwqdk9jZK/c8lXEnFg3gq1FjWsY+2\nPVywO20xEUK4IQHfmHKNFIhPB+utIRJpWQwEtiilPgEQkZnAeUCwsjgPmGh/fgmYIiIC9AbmAyil\nvhSR3VhWxooElrdRgitAcbE1jYY/YKCW/x6OebWewlDYIx8NrzW31LbTMM0sUB4IKNSnpwK4M9s6\njRFg+vrpVPurERH6HdaP8QPGU9SnKKjB5pM/zxI4T+0ey9Rdiy1hd/s8CnOtWEV2tt3rUlb2Fnml\nIeXziIfd1bu5df6tALz9ydsAERVG8Loe4am9o0dbFoXDReMr2WVuoe+g3ZT6N0FFnYBp6Dwt7d02\ndO54HtNcgoWPNyPAU7vHEliwuNlKKlkB03gq1Masq6ZaX4kUwo3FLqLRUAp2OioJh0Qqi65ARdD3\nSuDEaPsopfwi8i2QDawHzhWRGUAulpsqlzBlISLjgfEA3bp1S8At1BFeAcaOtUxfZQqGtOfwry+j\nMkhZGJWDUZ+eirfnEiaPv5i1n69lowRYWFpt9fI9tZh58ykt72D1rO0/h+LhxW6m1IS5E8g/JN/d\np6yizJ3httvIUgILFocIOyoLKSmBadOsXqzHMPj1TSvZnGey9osMAmYAwzCYMmIKszaGmgWzNs5i\n/IDx9XqSzroewam9ZWWWsiLvPW6aNJh1C3rSd+hWJgfyqfZX8/Z2E6PCIMuT5WZ6ZXfIbtK8WFC/\nVxutlxupjJHeY7BgiXZM8L0VjTyqLjutBT3sYOGzvfOzTN1lvbfq8v5MuCGL/ofFNiakrKKMktc3\nM+0Pl+Cv9STcpRWsUKv91UwsnVgvESPmc5U27Ltvjm8/0fEcaP7sAS2JT6Ra9lSqBrifAnoBq4Bt\nwFKg3rSvSqnHgccBCgoKVPj2eBJeAaCul+jxCAUH/JKdnz2Pv+sivJ8Ngf/Ow18rBBYF2DroTabv\nmM4+tQ/GvmfHLErxdluNL+++kOs4wig4Uyq4x+v08pwZbv9wUf8QYZddNZJhFzuDCK1zisCxHU/l\nmauWRxR2jkUB0Pewvlzz+jVMWzcNv+kP6UmGWD+VhQw9LWBlankuwHv5cE4aI0z7ahN7q/e653My\nva5/83pMZZLpyQxJEQa45tESKD+VolHdIae+Ygju1RYPL2bC3AmRR7qHlTFcmEXu8VnHlKwvcfd7\n/HG49jqTQCAHPBfw1LoRlN4+CaBeWdz7qCyM2rAjjg2oOIrpJZnWBJTT3mVFIIsVWAp+wYLowsF5\nHvsW3IiqVqCgukYx8en3mJiTlRCrKHwBsOBEjKZerzHffXavDRjeY1B4ycyUlPDtB7f9fdUBSmZX\nUljYPer+wffo8VgZi8HT/MRCKsa3EqksPsOyBhxy7N8i7VMpIl7gIKDKntzqRmcnEVkKfJzAsjZK\npNzroiLcHvyrz3VBjAWcN2EuXX5yLI/7PShTCNSa/OvZlTBknz3KexnkLsNreJky4uGQnrIzRsMJ\nbHsNL5hYSqBDNpMWTWL7t9tDZrj990LFlJnLqcp+HV+ej5Ipndi7zwRlYAXfFd4MZc12C/UsGMfl\nNGvjLPoe1pfJyyezz7/Pzeba69/LhLkTKB5eHHLspGegpkZcl5r/k8Es7BqSvwCAIQaGGNZ4ExTV\ngWqq9lRxy5BbKKsow3fXLdQ89SYEMnnyQT8y9hYCXetcM+FuolkbZ0V1G1lCuRCfr5DCoJrnKuDX\nL6ampnvEHt/09dOpLu/P1L+/j1pzImZAAAMCmdRuHWxZbBDSw3YUoOezk5GSeRF7+VFdErZim3hX\nNe+YWSis2QEa64k6z0PlzQfPbYhZNxfZopI1cY251FlXmykeXsysjbNCEjGa6rIrqyij1F9K8XMj\nqdqUX0+xllWUMeGDYQQu64+x7TSKr72QwsLWHy/k81luw4BpuXOf2j2WoopJUe/dsSAd2TB1Kkyf\n3jSBn4rZU4lUFiuBo0SkB5ZSGANcHLbPq8BYoAy4AJivlFIi0gEQpdSPInIG4A8LjCedaL7R0lKo\nrbWzo0yDNx4cwZQp4PHWWr95aq2GbQvfDCODK/tdWbdmd1DPWSQoIG7CVf2vottB3cjukM3v5vyO\nmkCNpUDKb7KC5MqLvzbA2rJOFI26hZIp25ha9gKoP7nX4+hXUKc8CDmTgLCedpCVMX7AeCYtmhSS\n9uuwYscKhkwbwiO/eMRVLlYDMqmtCYCnfjwEoNfBvTi1+6l0ateJe5fcC1iWRnYHK7+xtLyU2jVj\nwJ8FeKit8SNbT0Id/p6bFQaEKM3RvUeHJAMEu40iCeWQ5/vNHPC8i0EG3gyT7Z2fpaziqJAp5q2y\niP1nZbpl9FyCL8+yLJwetjvgEoW5dTDUCMqs37AbavSFuYVMHAelT9dZq1a5nqGs4qiIwsh1m3Vb\nieeKEfTdN4FVmfdh5iyhJuBpUczF7bRUjWTtW/k8+ZRJba1lOWZeMYLJ4y8OefZOByYWl1yohXhn\nRKXmKEIzZwmSu4yq7PZAfn03ZKSVJ5vpsonluMJCuPyBZ3ls1keovPkEuq5s9Dk7mVp+f/MEvtM5\nra5RGF4/2b0+BFpXcSZMWdgxiOuBt7BSZ59SSn0gIn/HmhL3VeBJ4L8isgX4GkuhABwCvCUiJpai\nuSxR5WwKkXyjPl9d6h5YFaOqCqbM/JDrHnkRf7d33cC3IFzZ70oeHfmoe3xwz9lQBh7D466+5yiU\na16/xp10sNasRfIWgOdWK0hu1PKfjf/g8eKHUP4cFL/Hyjb2gPih6wr8hy9y/czONYOtGMelsv3b\n7RhiuGm+wQRUgOvfvN6NnZBThoy9Bdl6EuS9Z8+JFcpHVR+x9Zut9D20rzt/loFB1R4rv3H3ll6o\ntcMB2woy/NYUKmI9B8cV5jE8XNX/Kor6WJljY/uM5YsfvqBLxy5s+HJDg1ZD8POl6yK4zIdn22mY\nPRYxddcSppdY925sOw0zkGk9N1tZeryKcya8y02/q+tFOrEkR1EYYuDtuQRZoqittRr27i5vMGnR\nJnx5PrJ7dWzQreIIlZIS+OKHL3izwxim7lrM9JJQF1uw9Tm2z1gAN5NuWMkaagKe+tPENCG+Euze\nNKf/HvErey40LwQUtVsHU7WnyrUw+h7WN6o7MBKxJBJEjIs55fJXYxgGf8h5kcnXjYrLKO7wOaCu\nuCJ6zKho5FFM//rqJsXaWpIuW1gIxc9t4LpHXiTQfT4TPlhD/oDEZerFQkJjFkqpN4E3w377S9Dn\nfcCFEY4rB45OZNniRWEhPPywlRkVCFhpfFYvJZ/8AT9Qsn4X09atcf3/wcuvRgocFw8vZu3nawFc\nQfjFD6GTDqqcpTB2mBv7oNyHWeu1XEIoMALWbId2j9/xM5duK0UQdw6r4NHjjksl3KpwqRiEv3wo\nJQdvhr6FTHy62hKYQyYhtgvF4ZAOh/DV3q9cd8WKHVZegiFWsDu7QzbXvH4NU587GMyRWL34APSb\nBrnLKDhsIACrPl/lWlkAJetLmLZuGrWBWkzqJnAUBPn6DfDMwyAzRCg7z9d1reWWYeYuw8Ray8Vx\niz187YVc+56J6Q/g9cKVV3goKjIoLBwRklBQtaeqbmQ+Bqf3OJ3RvxjNnKPu57W3vsfffT73bl+K\nbBfruQioywbh2TYsqlvF6YRMWjSN18KSFULiVHbMwHmOTmciUpwmWgZTNAXi9uo/HQL+TFtRKOu9\neGrJ6LmE7A4Xuwpifvl8q/5sH8i+8tMo+elmCq+JLshiST6IdC+TFk1y79s0Te57bhXUnIcZEDd+\n0K1z96aPqK4oY+LT1VTXnIoZEAIBeOwxmPZ0gMsfeJZ+A/eFTL3TUDys3kSEwYr9/nZuPC6W8UbB\nVGW/jjr5bkwVaLHVGA9SNcCdVowfH3ka8MLcQqgshO9vhrz36DdwX73eGJWFjP1uk5t1A7hzSzmC\nwRCjbmZbBzv24eKpcUd5M/z3sPdgS5HkLnPHe9QGrEGDChVixYgIARWoGw9iY2BwfJfjWf/GQNQb\nk1HKYOpCeFJMav1DwHgbGXcGGd1XuUoo05PJnafdyYS5E0JiH65gtZeg3effh+p+InhuqSt3nxIy\njAzW71xPTfkAVPlNSI+FeLqv4sm1T9Yb9OicW6FQOUvgMh9Sfho3XHIChYWj3P3O6nkWH1V9xJav\nt2AqExGrrFDnFsvP+wHPuDMxtw5Gei6h6NZJUQPsHsODGTAxDIMOGR1cF6E6WYWUTWEFoMlZSiCn\njFnfLye/YmJUoZ1dNRJZvBej+3wy89aExKlqAjWYttYMjxmEx6Ggfk++ZH2Jq2wda+2Kvle4CscN\nYvdYhOmtwTA9eD3CiF/tostJb1M0clI9K1gqT4Lpb6ECmUxbIhT1rS8sgwVuQ8kHDuH34svzYRgG\npm26q7wFGJ5aMMWNH0wunExmZn7UHnykbLphJcOoNvtjGm8jZnuUEpSyZpN+bNaHqC8nuUo5OJHh\nliG3hJ47zP1Z/NwGJnwQptg7ZVGUMw/HDVxWURb1XTjvzpfni0nBJhNRKkpPMs0oKChQq1atau1i\nhFAv3fb+EqbuuoKACuARD1f97Cmm/7HIzbs/+8772XHQrLoedQQMDAzDQCkV6i6qGISUn4b3iMUE\nui52K6rX8IZYE0pZQizLk8UNJ97Aus/X0fewvhQvK7YWcQojc8ep+J98FzPgwbUAAMvNVYucNpH/\n/LN7yBQiYFkBX/zwBXO2zAnJqiotL+WOBXfUlb1ikGshebqt5Jyjz+HVd3dZ8YNAJuKt5dDRk/hi\nV62r/BzqKVCbDCOD98a9B4Bvus+9LydeBPD46setZ4TBXafdBeCWyyMeN160/dvtTF0zNeT3aeum\nuW7BaGWI+O6ChM8Njz9H7dbBZPRcQuntk9iwuiPXjzkGf60Hw+tn8G1/YblRHPLenGdmYJDlzYrq\npqraU8Xu6t38u+zfBFTArQPh8ShBaOdtV8/qyK6yAtDZ2aFrtwcrTo/h4cj3n2DTixejTA+GR3H6\nle8xevzHIZ0dqTwJz7ZhPHzthe5cZbGmQjv73rvkXl77+DUU1pilnj9cyoerDkXlLcBjz4JARWFI\nDz5a0siII0ew4/sdbhszKgdTsOsh1s/tT01tAGVUW1Z7UD3ziJUcEsndNmkS3HGHnaLugQGXzGZF\nxr1QfqpbXz3i4c6hd7pJHcNKhoV0pJx3keHJCOl0OdP+OLG7fof1C1G+jtIBXKXfHERktVKqoLH9\ntGWRQOql25afSmYne1ryz05mzbxz2FetrKwp02T23G9gSPRxh1JxEkd8dyV/vvgE8gdYs9zurt5N\n6aeltOueSe9RX9PvsEtY+3lv16fv9FZcF45Zi2EY3HDiDUxePplqfzXzy+dzUu5JLNy2sN41a7ee\nhDJDA74YAStIYwfv137el/EDxtdzmRiGwR8K/8B3+75zzxfcW/IYHo7s9x2bcv9pNxwPXQ7ogpQf\n6wbwVS188cLtVnaXp8ZtyE7wfONXG+uVO6AClKwvYc3na0IUoN/00+2gbvjyfO50KsE9Njd4DTy5\n9knXCgFL0Duj6x2rBOqsG6exX9H3Cjq168T9S+8PUoiFlqswbwE13Vby5Csb3QywmvdquPeoKbz+\n9g/4a/4CyiBQKyxcaMCQauciIRiG4WanAfXcVG59QTDE4PADD2fb7m31lJpCUe2vjmihlB3acJrx\ntHXT2NTxPyhjNEKWm5E1/83llnsKEyoGoaa/gz9sXXpX4Xx2MiMy7uX1mv8j0HUxGZ4MJp892XXD\nOs/RVCYew8M5R53DnBx21IQAACAASURBVC1z+Kj906ghph3b8to99KlkdsqkKGceZRXgu+sWarcO\nxujxJirXWqAsEAgw+6PZdc9RDLLy1lD8l2qohJLZlTzxzWX4u4bG39ypeoLGMTlehOBUX2+Gyerd\nb8GbQbM0jDuDzLy1bh1zs9kivItgy9+5li/Px1OvbbLaYd40jG4r6jocc26gprw/lPt48oibee+O\nexLqptLKIo7UG/DlC0u3HdWdopx51oCqSZewssZAmViB6CgZRWD1bFTFIMySt9kaaMf1rwaYMvND\nbhllmcSOsFiyfQkew4NSCr/pJ8OT4fY4SstL8Zt+q7enhHWfrwvxBS/ethiPeOoFt40eizAyFbXV\nfhAFR78GR84JcXNNXbPSHWVeWl4act77l96P1/DiN/1MXz+dGw5/jvyPn+Pw/I85e2hnbphzQ52r\nSgy++OELco//lvJS260mylIUygq0OlOWfFT1EeW7yykeXszyyuUhPX0UPLb6sXoN0lEMjkvEsX5K\n1pdQ1KeIG068gfuW3kdABeqeg30Kr+GleHgx+Yfku6PrgwUzwIijRrjPe9TRo6zzf9iDN575A7U1\nuFO8fPl1b1cZElDs2PBzzO73gefmOpdc3oKo9SxgBpizeY7by3TjDWHlUVjWSPnucve3DCODY392\nLOt2rgPAxGR39e5614iUxeWMgQGoDdSicp3Y2VCkx3uYOUsxtw1CyofasbRT3fusrfUz4T+z6X+h\ntRpkYPsJBKa/yexAJnjmwthh1OQu4+rXr7beW8UgWF8ETIY+Jfhzl/Fx1cch7/n0HqdzxE+OcC0/\nx922ZkWWq4wDnho8485CcpbUqw+n9zi9LunDP4lOZ+xGLV2KOIuche0vInywthN/vamWgN9LRoaJ\nKrqBwGV+jG2ncfaZHXll7k/r3q0JZxj/YPTwj103YL/D+oV0Srp17sZn333mKsRgy8KX5+Pe5xe5\n94KnBnPsMGq6rbRSyMsHuNMH1b5XQ0m/lxqMG7UUrSziRLTUzfrptoWU7i7EX2vFoMEPR7wLvr+5\npm+GkeGOTXD85LMe/znvBNrZq+0FuO6RF8kf8IOrCBxfciBQJ+xrAjVc+eqVnNr9VLeSOr3p0b1H\nW0FK2xdsYoKyFJMhhjvK++GrLyP/Ci/3Pvw5rzz/U9RH58KW4SGmenCmVLiP2fGvKxR7P+3LvRPP\ntCq+twbunmJNg2ITMO2eXydg7KeWYmj/Fcx9EAIKwxsgu/dGvrJjMDWBGtZ+vpbL+17OFz98wdd7\nv2bx9sX1hCZA74N788S5T4T0vJ5a95RreTiWRDT3n7n9RJ586BAOz1/EWT3PokvHLiFWjULxyoev\n8NaWt0JcFRNfqybg94ASa0zEp6dQ3n0aeC6FgCLDDsavrVyBGncGUu7D7D6/nhtEKVXXWy/3Mbui\nlFdyb8MQg8HdBltTygQCkYoewpE/PZINX24I+e3fZf9m1NGjQp5N+OC47F4bQlK8XUGauwxyl1t1\nZvsgeHoByl7k66hLJ7PF60f5FRi1rMj4J2vXrsZreDHLh6KCFKbTCXAVxdMLIJBlXWPt5TBuKJtY\n7pZPodwZlB3l7bxH/9I/hSjjo78fz8HdjJC6keXJchVFJLdQOAZWm3j21UqoEVBCjQK2DkYNuRvJ\nXUaXY64iY/Mmat6zOjpZmQajz862LAC7nmV5svj9oN+7lmf57nIyjAwG8yf2bR6Ezwedj9zE7urd\nTJg7gZVzTw+5FykfSmaP9fQ9rC/vlHvDnuGpjb7/lqCVRZyIlk8fKd3WmbNJKYXpqcEYehfe7msY\nceSoENdRsB83fxwsmF7rjmswu8+ntLx9vYwqIMQ62PTVJjZ9tYksTxYXdLqf5Yvb88uzf0r+IYcy\n8qiRIetuOFzZ70rXXWOVYxJdul6MKDtLxq600m2FK1z9pp+S9SU8OvJRHh7xsDtVidfwopSidtsA\nKP1LXcX3Wz3qzJ9nugIoWHGQuwzJXW6V69D3kfLT8ByxmKrsxW7KqpNeG54dFYmt32wNfV/lpa7Z\nD0ScMRiCrLqn32aF3btj7Mtk5c3h7KPODtlXodjn30fRy0WcmHMiL3zwAn7zBJTxDgbt3PdG7jK3\nR154ism/Kx/Ab/oxcpdzdL/dbNxVN6Qow8hgyogpVO2p4oW3Klg3/T63l6nGDiOQu4yF2xZiECEJ\ngvoxlU1fbap3j37Tz71L7mVg14Fkd8hmzuY5vDpvF2b+JQgGN1x7KFXZm+pSkIMu4cRPzjryLGa/\ndqYt4AUCBps/6AhFQ+uy9nKX4TeF3w74LWtq9rDivaCkjGCrutxnrSrpZNkFMpDyoSHp2YKw9vO1\nVO2p4oYTb+DfZf/Gb/qt8uUtsN6TCRi1bOz4CGxf5r7Pc44+h5tOuikk26ohReE8Q4Wyymknkxhe\nhafnEgLiwWNYcY3J4y9mbd+X3PhJSdUjIa7Q6kA1r737FYFVN7mzSNduG8DC6VbbWDGjhjP/to63\na6xxSeQZ4LnN7SwVnLQHn+1CVnl9rW0mZGUa1gwICUQrizgRa051WRlMmGApFRHhlNP20rv/eIpG\n/qtesM8JiDlpm1NmduS6R17E7D4fr+Fl++sXU+at8yU7Ab3fzfmda647VJf349npl0Mgk3tfruHf\nl5+NmWO5rc458pyQQHSkAYOe3W/h8b6LUoLhVTxy/UWQm8e1b1xLQFnLyE5bN42iPkWMHzDeDXhn\nd8jm+seehelzwO+MY7BcWu0O/CFkzqjwHtjZR53N7Hd2QvmpqLwF+LsuC0lZ7ZDZwd1OXmnEsR4O\nNYEaJsydwOEHHk6Xjl3o1K5TvQCj1/DWUxp/POmPvP7EcWwM6wXX5C6nywFdyPJkhTxrhWLLN1vY\n8s0W64ecpVA0DFU+FJVXCrll1u+5y8jovprFKuBaYQEVcBWFIHgMD1NGTHHn6rpj+RshvUynNw5E\ntKYAK2Pp01NQtmCKhEIx+6PZvPLRK3U9++nzIJCJ8tRw/9KzeOT4y0JTkLEz3I6wXDkbvtzA7Poz\n8tTL2vMaXor6FFHUB3w7R9hxhYWonBXuHUjeQvAGUH7Duk6GyUXnduXZb+pOa4jhdhRcQe4cn7uc\njCtGcOR3V7LxgEesHxfdbNeRFQw8fCBUFjLpGcjuNRLD+Iv7DqIRbEnJuDPcoD25F/Pkmr2s/WIt\nU9dMDQqCd6esooyn5j0VeqKKQWyaPtlV+G4KfFAn6u151TAk6PmNHYZn2zCMHotY7V3CmjLBv+0E\nq94P/z3GvkN56LpfJ3y0u1YWcSLaIknh6bT/v72zj5OiOvP996mamQZ1EUWDKAODaFSSWRlApAXi\nKErEYCTXeJNodnxBuZPIrmg+IbJ7cxdjLiZsNiEqmtGIYTTGZEMkguIbMoL0KIgoKpD4xpvRNZKg\nxui8dJ/941R1V1dXd1X39MzAeH6fT3+mp/r0qfOcOn2e87y7Eoi7Ntc9OoiNaxtoGA2thOdBWnvn\nNJpXHsdd117MHR02S2/OGB9dNULtp2ppfqFZu5vudOpnvDc8a6PpeH0iHLMWlVKMP2Y8cyfOzfFI\nyVJvHf0k1j/VwxunY49cD9zM3kdnMfGdg1i7fSvUrKFz2MYsYynA/Jb5dL4+Ud+bCjSjEFAWa5v+\nF0936LxL8ep4etygvTte3HQIy5tHaiZjt1PhMDhXjXZV0z3ZKd8vPRt72AZSqVSg/t6N9wByYkPU\n7gmkdp7JSWPfYnv/u9JM6Y/v/pGth6wD+4KsU7DLVN1MwPva9nHLsk38/ZVTcry2qH46i5ENP3Q4\ngw8eTL/KfoFOBaAPEtfGr2XzW5v5xspvsHXzoST3DQerE1IBp3GPV1n63rsnkFr6qJ4/KwXnXgXj\nfp6m/6QjTmLbu9uyXJCB7M0rqUi9MZm9f9+bZdx2DxZuQsGWHS3I6JWozZela81zcnMOXclUkoWJ\nhUw7bhqXn3cSb/9tK3/6oJ1n30L7TyCcf9anOGrsfby9/myO+ochNDRU0tL5PtYaSzsdIIwdMpaN\nf9qYIxGkMyRc3kDzC81sXY5mfM4cpM69in3DTmLKVW7231GcNu9brJcfae8osWB3nNQbk5ERa3Vc\njiM9C8L5J5wPJ8Af9v6WX+57ktaHoOP1KVoCgKyYk+YXmjPSq/t83huWy/A90kqQ7XLG2Udx1MF7\nueO59Vpy2pVd6kBdejZ7Bx1Md0d4G2ZRRnhVTjn+14u0G+KgQfp/N9Gf9u2G+fPh2BmvhOZBmjd5\nHs1b4rS35RYY8kolt02/jQF//jwLv+fYCKxO30ajDaipXePZ8Osp1F88nnmTvTmW9KnLtuandeGp\noeth6Ho690xk9ldPJNkBqdTF2kPKbsO+/NxM+g2Ph46qeT/9YxBLtHeVk1PKzbuU443TCstuA0nq\nSGJL2cw8rJlhZ+j6D80vNNP5+qQcY+Kxdb/mjufuyFaViJVji/B6Mandp8LSx0l2VrF9DdjTY6gx\nt2NbNiv+uAKqk1lBkKPq3ufnX1yTTj2xa+XR3LH9ByQfeiDrxCjVz6SDH73Y+d5Odr+/O4dhZY1P\nqYxHleekj9WJjF1CRd29JI/ZgIhNbfusLPVU2p60o96R5ir0c39oMQx+KZ2b7OoJV2e7uboqK9/m\nVTkyQX3Njenn402VD3DjuhsZdNAgKoc/S/ul2WonQTjy4CN558N39BoixfLty1m+fXmOisx19V71\n6io6kg8gnxbOO+E8GDqXeuqpeHNy2t34+NqhWczfRd1RddQNqaNlRwsD+g3Q6ivfHNz74aO0tSsd\njJdKsW6thX26cMqQU6ivmMdPbzyP9nZB1nWS+qczoDoBaKno00d8Op26ZtvmQ7OeC4BKVXDnU1A3\n5EWWvLQkR1LL/R22INXPYF92Doe//SXe+dRvsjI+fHvit/nhWT/UUsrzS/Rv0cfM7Z1TeiQGwzCL\nLiJfbhmvDaOtTUd4p1IZxrF5s04y5uaVevxxqFx7MXbDEnAS6QXlQWpthSVLMhllKyr0vYMidp9/\neqBnM1WcdE6C6mrFQcdv4IEPN5ByFvHvU/146A6d7qCuTqvJNJOr5dzv3cDvP7wua3O1dp5JsrPC\nkY70xi+pGJcPXEq8WutNvR46VvUGxs37V8a0XcOAwzr5z3+vJtkh6chgN++S18//X742irY2bUi0\nLIhVieNDPy/9w6FmTHpTi1VZzL+0HobGsqoMXj76cuqG1GWpuFwIwilHn8Kmp84i6WwoKqlQD97C\nlZ+fANWt3L7pdt3YUadUWpX8/ItPphmFrkRYjeKnWV5bNfsuY9YlX0z7+fsNqCmlYzwsrEAVUhaT\n8W4OKcUpowaz6Ls/SG/YLffE2aIUKSVYysLadTap6g3IiKdQtpBKKuc5WciOM7Ckgi/E/pPa9vEs\n+kycZav2MnrCPgYety1js5CppN74HPaIdcy58LS0F5SXqfvdpE+rPo11yXUZKcoxxr87Yh129V6S\nu07JYiR+qeDoQ45m3DHjeGD7A2mHi+Xbl7PqlVXc9NmNSPNqnYPrqRS/ems2/P26jCTlnNw31LSw\n4U//x8P44lqqSmXmYM8Hu0E+Aql03L/X0JnqZOOfNvLc+udIdZyPSgkqWYm9awqp6qexLItr4tfw\nu62/S9NGy797Nm2X8dt0tHdw5/2vkRyZDHx+jP051sA3oabFsVVqaeXBV74DjhrUFjsrFxvAcYcd\nx9Z3t3qYuXajvvbMS4hXj8xZQ+WGYRZdQKE0wl4bhohmGikn0dzevXDbbToPzfz5mlGkUtDZYXPl\nwKXp07OrmslKf3BPppSqCFx2mZsqIjf3zgXTpvPoXZlSq3NmHc6sGbW07o7x0Pcn094yD5IxlLJo\nb9fpDmxbj8Ud61F//gr9BszP3nxPvpA5TwltbZlylrGYnWVgy0ljcvlXgD8xpXkKqmEM1o7TOWHs\n28y58KIc+wjrPiTZNt9JX9LJuEkfsOgHh2VUeTtatDG8+mm45CzGd8xl5sQZDtMOjhSu/VQtC9cv\nzOjlIa3qe/HIQ/jGmsymqlLCsH0N1E8/PqsIldcwCvpA0NbmVCLE0psSHWB3MO/rcWZNrk3f25+q\nxBugt/mtzekAxo5kRzo+5cet2vCdddKv6GDml0YSr67NeC/Va2aq16HFom9eyN5B/amvqWf5yB38\n6P/WgLKorBKm1U1i1S03sKLD5sE7kqTUSaiUzbq7hdWrZxAfC3v/vpcV1d+FoetRWPyk9Zl0enmv\np5ffTXr9Lq0i7Eh2kNo9Pn2aTtntjL70Fzy/9NJc6ceDNz94k7dXj0C9fh3UZDzC2pJt/Me9G+no\n+CwqJXS0K3jwpkzszTlXOx5zmb7TDKu6FfnCPyMPLSaVEv35yc1w8t1ZgXOgpc3U8CewK76LUElV\nlaTn0sv005KCa4OTzrRk4cYffTBkRToBphy7nk6PMV9OvodJkyoYdUQtdUMasg8TuyfAjjM475yB\naUZx+6bbmf3Q7Iw9rfppOOdqZNWtKGVz8/dGMuP07s9Ka5hFF5DPAwqybRiDBunTelub3uDdovLx\nuGYW69Zlx2LE45mUAjnpD+q1NOFKKQ0NzvWA1ADxybXwmxdZtmpvutQqAHvi+pTmnNzdmtlKOWvd\n1uOsqoK6kcPhtUw6kvTm66HNG+XrHXdQnh83qyhD17MdYc7Dv0ozxLS3zfAn0okSsTsY89UVxOMN\nmTnw0jrieWZ+ZiRzLvImhIvT0BCHPTqVuh5bnPHHjGfFH1eQVEkE4bLRlznzC9wakN+rUD6g3a1s\n+MtfSaWmob12bEZ/6XGqDv6YmV8amZlrB8MOHcZN025KG/O9kbjePr33ev/j93W8iMMUj3v/Cr59\n0Sk5fefay2pxs7XenKyFy3TK75u+eSF7t53Lig5Npz506IDLtnZF8/JdtHTem1WgypsKxp8AMMtN\nevcEks4mN/7UJA//YjRrPaqS9o1fR5I6rQYpsHbqE7sXavepdC59GFIxsP5NMxSAHfW8etDTYH0V\ni36kVDJLivvUrm/yjs8O4HrSWWIRO/VuFl0+m82tA1iy7xI6jna87FxnAweC6CC9+7Z7UqjruczK\nyOxKCq4NznV9d8ZKTQvb+z+DldJuzaPGjGJA/FFWPvKBDmSsTrB2JzyzR7sCp72xPOqqVeuF1onA\n0FaueuiqrEBQAD46wlHnCh+1dTLnZytZNHSwCcrbXxHmAeV3m3U3ozlzdC4pf23eqKUbtQpK0ZlK\n8uJ/byNObd6NbdaMWmbNyP5+S4uWYlBaKhg3Dl54QadT9ttXtEpqOFVV2gjvViiJUp3MzY3Vcg9Q\nn5vUzx+pmo7srtmEumwana9PonLkehqm35jTr5fWlntq00zbTQi3ZIlmeC5Nq1fnMlTXRRkK5/cK\nSkExpXkKH2++BuTzoGwsS/jfY89hXnbqoKJKkvrv1XByQybSfMTzNDechFsy1+07/bzj8Zzn4U35\nDbBs1ZlcMNaxmbUlUeKcVFMpsFPc8Zevo9Ykciobep0svLrxeHWca+PXsvC+telN7sGnYO6aCuqv\ngDPuTdLenqSiwuKVrQPSqlPLkrQR18Ji3NHjnHxgZzpxAzaW9Kf65VvZtVFLE9jtyLRrOLbfqbz2\n8TOoVT/R+vpKxaVfG8DCbY4EbXcw/rSPGHjs2YweMpqBsYHpMsTMgAGPn8rC9U/mzL2bykV7Auq2\nXmStz5GtdK5Lkupw7A6eGKmMlKK929buXMtTu54iZi/lkq9fwrZNGQblqkXTDNejrurscPaCSS1Z\nnlq22Jww6AS21jyZkTadGJYzlm5mzSVruo1hGGbRBRRT/3fv3mz1jl8KgWhplltaoNMx+iY7VVZw\nXtDGFoRMnIc+RS9alOnbS8eNN5ZWgCVtIB/ktX/A6tXxQI+aoERz4MaZBBeZyaK1PtdpoKMjY9dx\nxz5vXpxFn3kmLWl5N16IxgDdcWUXIIoRi9mB7tJRUnPnQ5hk46a0qBw5L+1R5kU6OeCOMaSWPsrj\nqf6su9uxmb22R5+ykx2w43RSNS0wtFVX3vMUqAJyVKFeDIwN1EZkT30Vdx1ddqmdbnf77Z4vKe2m\nnBQb+81JjPn7UmbG32fzoM3cuU7o7ABLhD0b61CuvSUJ9seD+fa/dzLn4btpG/wy1s4zMzmnBi/n\nR/c+S2r4E2ywW5HXhSd2PMFip8CYi+ffej5nniusChafu5ja9lnpg41/HaSj/le+Ah+ezoDr9/Dj\nJ5ZqN/bhzzF68Pi8Od1cqQyg0q5Mv3cPLHVD6nTa+xFrUU4Sx3Tm5KH1xCpiWelzFj29CKrbszNP\nVz9Ne1K6NTOtYRZdhNcl1vu/H2FSSCGVlovWVl2iUaxk2pvCG5wXBd44D8vSG4efaUUdc77+XaYn\nkssg583L9ahxx+5ndoXSQHvhr0yWLkblwHUCuP12mD27lmQS1t2tVWmFypfm2yD9BYguH7g0MAV1\nVtsSM4fmOwA0r3wlK79U8+jcVA/uBjf/+208nupPKikZm9kPh1O36SIdJ+N4+7iwxU7XkvBmQPUa\nuV0MOmgQ1oiHSDqn3Koqi0GDcj0BvTVfUBaXD1wKRz7JXTdqF3DdrhbLsRO7KlE3J5lliWYMY2t9\nzEsz/IHHbUMm/wCcgFSFTnkz+6HZsDueVitdMOqCrDLCM06cwdzT5sKeePhhbU+cpd+Kp9ssvvdC\nNr91HOz4JXWffZ8X3zk1E+GuVCaVvZNXzHW19if/89omB33hNV8VQZ8UvaNFx1i8MRlqWrAmL0w7\nSHR3ZlrDLLoId3Nsa9M/iMWLtUrDjzApJGxj9m7Cli0wbgnq5GZiNc9RX/MfkcfrMqVUSm/me/fm\nb5tvzIU2bi/Ts6xs+4eXpqhSkHu/sB+yKxX4nQZcJwCAq67SainQzyufpBSmOso98WcM+zn1tj0n\n0rdfPpHmldthujNXeZhRJHjyLpFUPPf0AFqntwYw2DgXjNXM0b+23NocXlhiccu5twBkp/fwZCv2\nq6hSqg4Z3czk4ZP5wbc+k3Pw2bs3u+ZLZVUSap6EHafT2WGn2y1bpp+PKx1WVLjrSFi82KZ2cC03\n3qidGFw3bxdpKcrN2eV4R3UctJfZ/1+7eevf5yyapusywheMuiBtRNZlgvX9Pv5YHzz8a6O5OSO9\ntrfD5kdqWbo0kxp90b2Z8sbu8w2yT4VKyjPyf/bipkNILb3aiRvpZNKM1zh8woMcdeIbXco8GwlK\nqT7xGjt2rOoNLFiglGW5y1upykqlEonS+kokdH9B31+wQCnb1vewbaUa5+5QC9YuUIld+W8W1F8i\noVT//rqP/v2LH2vY9/2fNzXlpykqcmhvLNyndwxVVbp9Y2Ph55TYlUjPZ+OtS5VM+VfFzAnKumKi\nmjprTaTx56O9qUmpWL9OhXQoKj5UFVdOVrEbYsq+3lb9v98/5xkWWgcumpqUsiuSCulUVHyorCsm\nZvUV5TkkdiVU1Q1Vivko5qPs623V9GyTnvO1C5R9vZ3+zH3JfFH2FZMU425TMu5nivOuVFR8qJAO\nVRlrV4mEvldlpZ5v7xpJJPS6rZp1urKvt1XVrNNVrF9n1hjzjTlo3fnnyX2GU+f8l8Jq0/NttSkk\nFfr7TCSUqqzqVFpkT6lYLLtdU1NmDYJSsZheU951uWBB+BopBt416WLBAqUs26UnpURK+x17ga5c\nGrrH9vomX65XbzGLREKpiorMIrKs8i8a9z7FbPKF2hfajBKJzOYahWkF0ZrzI46w+UWlpapK/1DD\n5sGlw23rfs+y9PNqavK03ZVQ/b/fP72BVcY69EZjf6SwP1KWncrLGL10eefGsvTGZNv6fiJJvUak\nXTFlnpL5ophPDjOK8pzdNpallGV3KjlvVnqzX7B2Qc5YCm1kiV0J1biiUTWuaMwwGuda7IZYepzM\nRzFzgmLcrQrr4/SmirTruUJvYo2NmbH551mpbCZkX2+rxluXRlorQQeGoHnK/B71hiqSyjok5Pt9\nJnYllH1Kk9JWck2L287/GxfJ/D66cvDKB+/8uweKpvu3pA8e/fvrMbjj6SqjisosjBqqi/CXVa2o\n0HaF1tby+j0XY0yHcLfeQPVLK3zucxlVzV13wZo12W2j2DEKRbJHrY/s78+lfdcuuOOOwuoC9zst\nLZoWNy7lyith2LCAAEqPETr12kTosLRrZtIChBSSM4dBdHnnBjIqFcsCy4ZkUsdgVBz7FLZdRcfO\ncVmGZ5fGMNuVV5Vo2Rb2x4NRkl2DO8pzctVUDfUZTyp/gaPzTzifVa+u0pULlz4GyX6g3PomgKrA\nslOAIlalrxVSc+Z4pE0/Xrsue55boO3HR497n5ya6y1eW4dWYU2cCOvXZxw68jkiqJMfhM1fh2Ql\ndgXU11em59tvA3NrdRfzmwxDays0L9/Jkn3z6Dh6bToeqG3HGGbfcCIpj7eiG9TrevtFsSd2GVE4\nyoHw6i3JwoX/JFvOk0ap4yn21NPYmDmtuCeoKJJDIUQ94UZFIqGlBK86IIo6qqAU4pMsYv06s06j\nQffJR1ciodSMGdkqr1hMnwgb5+5QjbcuVYldCZXYlVBTZ61JqxTcPoqRLNLqmvu3BKokwyTIoPss\nWLtAWVdMVEy5TllXTEz36x1rWqpw1DVz5yo1daqmMdL4A9QrUeClJ999vFKXbWekO1cdGbYGrCsm\nqoqzv6ua7t+SM1f5pKWotEV5HmIltVpv5oS02q/i7O+m596y9FwHqeFKBUYN1fPwqyHch9oVdGVB\nFPtdP7Ow7WjfLWVT6sqYveMMU/uF9ed+7t1wEwn97Lwb/vjx4bYf98Dg1W27Kot898634YXNQVS6\n8n2ej9k13b9FUaltEFR+mN40/Zvw5z6n6fLbGcq5iYUh333c61FsClkMqMBG33T/FjV11posJpLT\nl+fQ4bdFhamFs9dbh2Lkw6riysmqcUWjarp/S3ru3TVfzsOoYRa9AO8PqhwPtbt0ooXuF4tlflyF\nTlDFjDHq5hGVB/bZtAAAEO9JREFU3qambKYWZZzF3i/Ksww66Xp1yVEcHsIYbaHNMB9zKUU68dpd\n0qdYj95eqWDDdbklx3IibB5KkTyDHBJc+O0xrv1IqfwHSf860+snpUSSKtavM+uZehlKOec6KrMw\nNosywtVhel03iwlmg2zXyyj6a/93uqI3jce1jaKYvqKMMXKwW4S+QOvBXb99yyrs/lvq/bzP8rHH\n9L387rYuXa2tul1bWyYQELRu+5ZbMm2C5rWQ/SjIJdt//dpr4eabs20n+ejyjyFI315f780zJVm6\n8KDAUm+AZ4/pziMizKYQdb1FDawsFFPj2lzcfGqPPQZPPAHTp3vsTxYcfTS8+aaglKSjuN014k8N\n1ONzHYWjHAiv/UGycFGqRJCjiw4Q8ct1ryhjKac0EPWekU56ZbpnlH78UsyMGeESiF8/Xsp487lk\n+69bVu5pM5+KLOoYCkk0Qeszii4/DD2luvLfs5ySRSKRbZcK+nzq1FxPJq/HXmVl5rMge1x3zBNG\nsug9lOolERTMFNZP1NNRMfB6+ujEfJmEhf6xlEJroVN2vr4KnYoBJ1irONq9kd/54JViAH7/e3jk\nkYxXV5ZnkgVnnaVPgPnS1Qd5VQXRW1+ffd9kMtPOe105wWtu4KObX8zN7+X2G5a6JSeYMI+H2aJF\nOnjuggt0/1EDPAuhHB5zQTSEIeraLZR2xYXODgDJ5HBisew8at77zZ+vJYpOT5LaadNg/PiMpx9k\nZ5T291FOL8uiEIWjHAiv/UmyKBWlnEC7Q7Lw6lddPWrU+IbuGG8U20Ip43IN0oXoCrJF+L2fSpX+\nwr6bL7jNf90NXps7N7h9ueYwTPKNGoDpPx2H2T2iGvxLWVflOKUnEsXFWgUF+BUr/ZUTGMniwEMp\np/So3/Em9/OnFPefyFz9qj8xH2RSHZQqwRQjCbnj2rUr/3dKlazShYs+ztgZgr7vzzvl92uPMv/5\n2oSNPV8m3KDr/hiZIPtKvnFG1t235Jd8c5NGBttJwuJT/Lr4fBUnS6UhX7+lSjPuvb1xGLZd2J4w\na5aOk2hq0muvs9PNm1beuI2yIwpHORBe+7Nk0Z362GJOXX7Pnqam/Cdr/6m7JyWLYu5d6mksSHqK\nossvFN3uton6rAtJHMWul66knSlWsgiyUQRJB0H9FopPCYvc9kbFd3UdlNOLq9C8hH1nf4jJwrjO\n7h8IEt3LxTiiLjj/xuj94YWlDfD+iP0/6FKZYKF+8ql93HxQgTmOShiHd+7CAraCvtMVV8ywuShV\ntePOm2VplVQxiDqHhfI+RWEMxdLnbV9RUdh1tJj1We7NutQ12FMxVIVgmMV+gqgnIy+iLoTI+X8S\n2VHP7gnIexItNiFZqT+2sM0xKKitFI+eMDQ16UA7v3dTIYQlNCzHabWU9eKlyT0A5DvhdnWTKURj\noWfrZchuO6/NIx/DTiT0Mxo/XjPAYjy6CtloXBuPG3m+PyHsGZWb0RlmsZ+gmJORv31XfhD+drGY\n3kQqK3Mjb6OerL0oZWOMcvr0Jojzj6tcqoNSg/r8m59fNdbVH7G7Mdp2LkOPQm+YKqocm0yxfbiq\nuyA1Ypgq0H/IcdOmdOUg5VUZdUc0dFfhV2nNnZtLb7kDIaMyC2Pg7mZ4DYt+I2BgQrOW6Ia6qMZt\nN6GeUtoQN2xYtpHUNXoXg3xGyai1Llza/P00NOhXPnfSsOR4QfCPadmy7M+XLcutQRJER76EhpnC\nTqUbKFtb9XfcJIS2nRtwF0ZvPnfbIIN6W5t24/S7+YahWCcMf0JHd65Az6FbT95lCe3t2pnAnWPX\nsQIyRnV/6dog5FsrXldnKBw4G2Sc727jc0tLJnAvlYKFC/V124Zbb9XrtNTfQZcRhaMcCK/9VbLw\no6dFzLA+u3K/MJVSVJ1+MaoR96QaVRIKumeYZBFlTsr9nBYsyLbTuEkco6wX7+f5bAreMXtTS3Q1\nmC4KguwUXgltxoxgZ4auBqkFfRZVsig05igSfz77WtgYE4lsN9x8udqMzeITwCzCUOxmGPT9KPmE\nXJRTpI3iL98V2tw+imE4eRPmNeXXVxdjCyqns0LUbLre7xTLfBOJ3CjirhTsChtfkHNEPsN3Y6O2\nTXhVb+56yWfPKOXwEbSZ+7/jH+PUqdm2pHxJQoMYclVVeFsv03YZvp9Z5MsC3VUYZnEAohw671K8\ncaKcorua+qNcJ/GoLppduW93SHdRUCwzLZXR+0+v3VGwq9BG3tiYMcS7G2kptoQg+gtJVmFj9W7a\n+SSLsPE1NuYmk4TgzMNBqVtchuHOk9emU4zNphhEZRbGZrEfoaupO0r5fj4dtDeILyjYqpi+ykGb\niyB9baG+uzPQsdwoNpVDqbrreFzbQ370I/1/voJAYSjWPgWZJIhuihCR7PaF0qb44ad/0KDoddb9\nY/XaCWbPhiefzF0DtbWFk4S2tsKSJZkgzzD4bUzuvWtrM2uhoSGTjmbAgEyRtVisa4GEJSEKRzkQ\nXkayKN+J2NtPsR453T02t69ibCW9gXKoqKL0EUXl4pdW8qk/ih1bVPuUtw66P94ninQYNg6veiss\nMDGfHSOqpFVonH7vLsvSUkYhlWJTU650kc9LMt8Yu7rW2B/UUMA5wB+AV4HrAj6PAb92Pn8GqHGu\nVwJLgReBbcC8sHv1BWahVNcffDk2qVJ9/aPqiruDtnz68XLaFYoZZ0+7qObrI8gOUu54kEJu4Pnq\noAepcsqxPgoxwUJzWoz6Kt84g1RXXQl09CIfIyxF7eZHrzMLwAZeA44FqoAXgFG+Nt8Efua8/yrw\na+f9RcB9zvuDgB0uI8n36ivMYn9AKYu+u0/3Ufv3n2h7o8xtFEN/2HyWa0PP52FVLkZUyIAbREdQ\nJH45GXqhvsrxXLpy/658L59dpRy2p/2BWcSBRzz/z/NLCMAjQNx5XwG8i660/jVghXNtEPBH4PBC\n9zPMorwodtGXO1Co1P697UQym2V3jCkfymFw707Jwv2sq6d4N9CzK3XQe1KFuD+qK4uB/5lFUbtF\nQVRm0Z0G7mOA3Z7/9wCn5mujlOoUkfcc5vBb4HzgLbRkcY1S6i/dOFYDH3rK2Fru/r3tbFsbUP2Z\nYrsb5TD0l8PI7gbEuQbShoZsw39XjKPeQE83a2opdJTL8SEKestxoVzwP7P6em3odqsmulUZuwv7\nqzfUeCAJHA0cBqwTkceVUq97G4nILGAWwLBhw3p8kAYZdPcPMXKhGl876J3NId9mXAxTLUehm+4q\nllMuOnoyGrknIrB7Ej3N/ERLId3QsUgcmK+U+rzz/zwApdSNnjaPOG1aRaQCeBs4ErgFeFopdbfT\nbgnwsFLqN/nuN27cOPXss892Cy0GBuVEX9m0ykVHT8xHOetX9DWIyCal1Liwdt0pWWwEjheREcCb\naAP2Rb42DwCXAK3Al4EnlFJKRHYBZwJ3i8jBwARgUTeO1cCgx9CrpTHLiHLR0RPz0ZPqrr4Kq7s6\nVkp1ArPRRuxtwG+UUi+LyPdE5ItOszuBQSLyKnAtcJ1zfTFwiIi8jGY6dymltnTXWA0MDPo2XHWX\nbfdw8r0+hG5TQ/U0jBrKwMCgEPqK+q/c2B/UUAYGBgb7DfqK+q+30G1qKAMDAwODvgPDLAwMDAwM\nQmGYhYGBgYFBKAyzMDAwMDAIhWEWBgYGBgahMMzCwMDAwCAUfSbOQkT+DOzsodsdgc6Q29fxSaET\nDK19EZ8UOqFrtA5XSh0Z1qjPMIuehIg8GyWI5UDHJ4VOMLT2RXxS6ISeodWooQwMDAwMQmGYhYGB\ngYFBKAyzKA239/YAegifFDrB0NoX8UmhE3qAVmOzMDAwMDAIhZEsDAwMDAxCYZiFgYGBgUEoDLPw\nQUSWiMg7IvKS59rhIvKYiLzi/D3MuS4icpOIvCoiW0RkTO+NvHiISLWIrBGRrSLysohc7VzvU/SK\nSD8R2SAiLzh0Xu9cHyEizzj0/FpEqpzrMef/V53Pa3pz/KVARGwR2SwiK53/+yStIrJDRF4UkedF\n5FnnWp9avwAiMlBEfisi20Vkm4jEe5pOwyxy8QvgHN+164DVSqnjgdVkKvpNA453XrOA23pojOVC\nJ/AtpdQodOnaq0RkFH2P3jbgTKXUycBo4BwRmQD8EPiJUuo44K/ATKf9TOCvzvWfOO0ONFyNrlDp\noi/TeoZSarQnzqCvrV+AnwIPK6VOBE5GP9uepVMpZV6+F1ADvOT5/w/AEOf9EOAPzvsm4GtB7Q7E\nF/B74Oy+TC9wEPAccCo64rXCuR4HHnHePwLEnfcVTjvp7bEXQeNQZ/M4E1gJSB+mdQdwhO9an1q/\nwKHAG/7n0tN0GskiGgYrpd5y3r8NDHbeHwPs9rTb41w74OCoH+qAZ+iD9DpqmeeBd4DHgNeAfUrX\niodsWtJ0Op+/Bwzq2RF3CYuAuUDK+X8QfZdWBTwqIptEZJZzra+t3xHAn4G7HNXiz0XkYHqYTsMs\nioTSrLpP+RuLyCHAMmCOUup972d9hV6lVFIpNRp96h4PnNjLQ+oWiMh04B2l1KbeHksPYZJSagxa\n9XKViHzO+2EfWb8VwBjgNqVUHfAhGZUT0DN0GmYRDf8tIkMAnL/vONffBKo97YY61w4YiEglmlH8\nUin1O+dyn6VXKbUPWINWxQwUEbcOvZeWNJ3O54cCe3t4qKViIvBFEdkB3IdWRf2UvkkrSqk3nb/v\nAPejDwJ9bf3uAfYopZ5x/v8tmnn0KJ2GWUTDA8AlzvtL0Lp993qD430wAXjPIxbu9xARAe4Etiml\nfuz5qE/RKyJHishA531/tF1mG5ppfNlp5qfTpf/LwBPOyW2/h1JqnlJqqFKqBvgqeuwX0wdpFZGD\nReQf3PfAVOAl+tj6VUq9DewWkROcS1OArfQ0nb1tvNnfXsCvgLeADjRHn4nW4a4GXgEeBw532gqw\nGK3/fhEY19vjL5LWSWjRdQvwvPM6t6/RC/wjsNmh8yXg/znXjwU2AK8C/wXEnOv9nP9fdT4/trdp\nKJHuemBlX6XVoekF5/Uy8G/O9T61fp2xjwaeddbwcuCwnqbTpPswMDAwMAiFUUMZGBgYGITCMAsD\nAwMDg1AYZmFgYGBgEArDLAwMDAwMQmGYhYGBgYFBKAyzMDAIgYgknaym7uu68G9F7rtGPBmODQz2\nV1SENzEw+MTjI6VThRgYfGJhJAsDgxLh1FJY6NRT2CAixznXa0TkCaeWwGoRGeZcHywi94uuq/GC\niJzmdGWLyB2ia2086kSZIyL/IrrWyBYRua+XyDQwAAyzMDCIgv4+NdRXPJ+9p5SqBW5BZ3sFuBlY\nqpT6R+CXwE3O9ZuAJ5WuqzEGHXUMuu7AYqXUZ4B9wAXO9euAOqefxu4izsAgCkwEt4FBCETkb0qp\nQwKu70AXVXrdScj4tlJqkIi8i64f0OFcf0spdYSI/BkYqpRq8/RRAzymdAEbROQ7QKVS6vsi8jDw\nN3R6h+VKqb91M6kGBnlhJAsDg65B5XlfDNo875NkbIlfQOf4GQNs9GSNNTDocRhmYWDQNXzF87fV\neZ9AZ3wFuBhY57xfDXwD0sWYDs3XqYhYQLVSag3wHXTq8BzpxsCgp2BOKgYG4ejvVNlz8bBSynWf\nPUxEtqClg6851/4ZXdXs2+gKZ5c5168GbheRmWgJ4hvoDMdBsIF7HIYiwE1K1+IwMOgVGJuFgUGJ\ncGwW45RS7/b2WAwMuhtGDWVgYGBgEAojWRgYGBgYhMJIFgYGBgYGoTDMwsDAwMAgFIZZGBgYGBiE\nwjALAwMDA4NQGGZhYGBgYBCK/wEzkueh6np4FAAAAABJRU5ErkJggg==\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
@@ -820,13 +862,12 @@
"colab_type": "text"
},
"source": [
- "Great results! From these graphs, we can see several exciting things:\n",
+ "Great results! From these graphs, we can see two exciting things:\n",
"\n",
- "* Our network has reached its peak accuracy much more quickly (within 200 epochs instead of 600)\n",
- "* The overall loss and MAE are much better than our previous network\n",
"* Metrics are better for validation than training, which means the network is not overfitting\n",
+ "* The overall loss and MAE are much better than our previous network\n",
"\n",
- "The reason the metrics for validation are better than those for training is that validation metrics are calculated at the end of each epoch, while training metrics are calculated throughout the epoch, so validation happens on a model that has been trained slightly longer.\n",
+ "The reason the metrics for validation are better than those for training (and not merely identical) is that validation metrics are calculated at the end of each epoch, while training metrics are calculated throughout the epoch, so validation happens on a model that has been trained slightly longer.\n",
"\n",
"This all means our network seems to be performing well! To confirm, let's check its predictions against the test dataset we set aside earlier:\n"
]
@@ -836,10 +877,10 @@
"metadata": {
"id": "lZfztKKyhLxX",
"colab_type": "code",
- "outputId": "b792a12e-713d-4b07-9f8e-de0d059d5cdb",
+ "outputId": "021c3cdf-1a38-4f7c-e535-885a87d8c09e",
"colab": {
"base_uri": "https://localhost:8080/",
- "height": 298
+ "height": 318
}
},
"source": [
@@ -862,14 +903,14 @@
{
"output_type": "stream",
"text": [
- "200/200 [==============================] - 0s 146us/sample - loss: 0.0124 - mae: 0.0907\n"
+ "\r200/1 [================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================] - 0s 71us/sample - loss: 0.0103 - mae: 0.0718\n"
],
"name": "stdout"
},
{
"output_type": "display_data",
"data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAEICAYAAAC3Y/QeAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi40LCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcv7US4rQAAIABJREFUeJztnXmYVMW5/z9v9yzgEpVR44KIMRhj\nnJ+Ak+iJim3QuMS4EaOJZhSJjQtRkmvQyY0JuS4ImlyMIDIKyFwTjHEUl2gkoq2irTgoCRE1oBcR\nl6ijeF1glu76/VHnTPf0dPf0TPdMb+/nefrpPnvV6XO+VfXWW2+JMQZFURSlvPDlOwGKoijK4KPi\nryiKUoao+CuKopQhKv6KoihliIq/oihKGaLiryiKUoao+JcwInKWiCzLdzo8RGSoiDwgIh+LyJ/z\ncP2AiGyKW35JRAL9OM8RIvJqThM3iIjIuSKyIt/pSEfif5XD8xZ83gcLFf8MEJEfikiLiHwqIu+I\nyMMicni+09Ubxpg/GGO+ne90xPE94ItAjTHm9HwnxhjzNWNMqLf9RMSIyJfjjnvKGPOVAU1ckSEi\nI937VJHvtCiZoeLfCyLyM2A2cC1WuEYANwMn5zNdvVGgL+HewL+MMZ3ZnqhA86coxYMxRj8pPsAO\nwKfA6Wn2qcYWDm+7n9lAtbstAGwCpgHvAe8ApwAnAP8CPgR+EXeu6cDdwJ+AT4AXgIPitl8BvOZu\nWwucGrftXOBp4L+BVuBqd90Kd7u4294D/g9YAxwYl88m4H3gDeCXgC/uvCuAG4CPgP8Fjk9zP74K\nhIDNwEvASe763wDtQId7TyclOba3/G8ALgf+AbQBFcAeQLOb9v8FLonbfyhwu5vutcDPgU0J5zva\n/e0HfhF3f1cBewFPAgb4zE33Gd7/2lue3W23A3OBv7jnfQ7Yt7f/JMm9mQi87J7jdWBy3LYA9jn7\nD2LP2cS47TXA/e41VgJXec9Fimv9GXgX+NjN/9cS7ulv3efkY/fZGApsdO/Tp+7Hcf/PO+KOHenu\nU5FpnlKkbx5wQ8K6+4CfZfierEiWHnddCPhx3PJ5bho/Ah4B9u7rf1eon7wnoJA/wHFAZ/zDkWSf\n/wKeBXYFdgGeAa5ytwXc438FVALnY0Xqj8D2wNeALcA+7v7TseL4PXf/y7CCVuluPx0rdj6sCH0G\n7O5uO9e91k+wojg04UE/FitoO7oP7lfjjm1yX57t3RfiX7ji7J6jw027H7gQW8hJkntRCazHimgV\n8C33BfxKXP7uSHMve8v/BmA1VpSHuvdhlXt/q4AvYUXkWHf/64CngGHuMf8ktfj/3H2Bv+Len4Ow\n5imwAvHluOMC3nkyyPPt2ML4G+7/8gfgzt7+kyT35jvAvu5+RwKfA2MTnrP/ctNzgrt9J3f7ncBd\nwLbAgcBbpBf/89xnwavYrI7bNhcrkHu6z8M33f1G0lNIu/3fiftkkKdU4j8OeBP3GQR2wr5He2T4\nnmQk/tjW/Xr3f6nAVoqe6et/V6ifvCegkD/AWcC7vezzGnBC3PKxwAb3d8B9KP3u8vbuw3ZI3P6r\ngFPc39OBZ+O2+bC1uCNSXHs1cLL7+1xgY8L2+Af9W1hRPxS3Vu+u92Nr5AfErZsMhOLOsT5u2zZu\nHnZLkp4jsDXG+PMvAabH5a838U+Zf6xYnxe3/ZAkeW4AFrm/XweOi9sWJLX4v+rdyyTpSif+veX5\nduC2uG0nAK+k+08yfDaXApcmPGfxIvaee14/tkDdP27btaQR/4Tr7Ojmfwf3/9hCXGssbr+R9FH8\nM8hTKvEXbEtjnLt8PvBYmjwkvieZiv/DxLVQ3fx/jjVf9vu/K5SP2vzT0wrs3It9eQ9sE9jjDXdd\n1zmMMRH39xb3+99x27cA28Utv+n9MMZEsc35PQBEpF5EVovIZhHZjK3F7Zzs2ESMMY8Bc7A1t/dE\npFFEvuAeX5kkD3vGLb8bd57P3Z/xafbYA3jTTXeqc/VGyvwnbse+hHt498O9J7/A9s10pSchLanY\nC1uQ95VM8vxu3O/Pce9dmv+kByJyvIg8KyIfuvk8ge7/favp3pfiXWcXbK01o/sgIn4RuU5EXhOR\n/8MWkLjX2hkYQv/uU7Jr9ZanpBirxHcCP3BX/RDbovLO29t7kil7AzfGnedDbMGzZ1/+u0JFxT89\nYaxt+ZQ0+7yNfUg8Rrjr+ste3g8R8QHDgbdFZG/gVmAK1hyxI9aMIXHHmnQnNsb83hhzMHAAsB/W\n1PEBtmaYmIe3+pH2t4G93HT391xJ8x+3PT6PbwL/a4zZMe6zvTHmBHf7O/Hnc9OSijexJoi+klWe\nU/wn3RCRamy/xg3AF93//iG6//epeB9rEsr0PvwQa+44GlvbH+klA/usbCX5fUr27H2GbSl67Ob9\nyDJPYFtX33Pfi0Pcc5HhexKfPlKlEftMTE54voYaY56BzP67QkbFPw3GmI+x9uS5InKKiGwjIpVu\njWWWu9sS4JcisouI7Ozuf0cWlz1YRE5zWxtTsYXPs1h7rcG+zIjIRGyNJiNE5OsicoiIVGIf+q1A\n1G2V3AVcIyLbuy/Pz/qZh+ewNc5p7n0KAN/F1tIyJVX+k7ES+ERELnfHEPhF5EAR+bq7/S6gQUR2\nEpHh2P6QVNwGXCUio8Ty/0Skxt32b2x/QjL6nedU/0mSXauwdvX3gU4ROR7IyIXX/X/vAaa7z+8B\nwDlpDtkee89bsaJ4bdy5osBC4Hcisod7vx1XyN930x5/n1YD40RkhIjsgDXJZZ0nNy0vYguj24BH\njDGb3U0ZvyfGmPexhfTZbl7Oo3vBdgv2+fmae64dROR093em/13BouLfC8aY32LF8JfYB+pNbK1i\nqbvL1UAL1gNlDdZD5eosLnkftpPqI+BHwGnGmA5jzFqsl0UYK0a1WO+eTPkCtkb0EbbZ3wpc7277\nCfYBfh3rvfFH7EveJ4wx7VjhOx77Yt4M1BtjXunDaZLmP8X1IsCJwGhsx7AnBju4u/wGm9f/BZYB\n/5Pmur/DFhbLsN4bC7CdymBt14vd5v/3E9KQTZ7T/Sfx1/gEuMRN30fY2vn9GZzfYwrWBPQutg9i\nUZp9m9y0vIX1lEkseC/DPufPY80gM7E278+Ba4Cn3ft0qDHmb1jPrX9g+7YezGGewD6nR7vf3nn7\n+p6cj62xt2IdMJ6JO9e9bv7udE1g/8T+z5Dhf1fIeL3lSgEgItOxHYtn5zst+aDc868og4nW/BVF\nUcoQFX9FUZQyRM0+iqIoZYjW/BVFUcqQgg2OtfPOO5uRI0fmOxmKoihFxapVqz4wxuzS234FK/4j\nR46kpaUl38lQFEUpKkQk3Uj2LtTsoyiKUoao+CuKopQhKv6KoihlSMHa/BVFKU06OjrYtGkTW7du\nzXdSipohQ4YwfPhwKisr+3W8ir+iKIPKpk2b2H777Rk5ciQimQbxVOIxxtDa2sqmTZvYZ599+nUO\nNfsoijKobN26lZqaGhX+LBARampqsmo9qfiXEOEwzJhhvxWlkFHhz55s76GafUqEcBjGj4f2dqiq\nguXLwXHynSpFUQoVrfmXCKGQFf5IxH6HQvlOkaIUNkuXLkVEeOWV9FMv3H777bz9dv8n5wuFQpx4\n4on9Pn6gUPEvEQIBW+P3++13IGDXJ5qC1DSkKJYlS5Zw+OGHs2TJkrT7ZSv+hYqKf4ngONbUc9VV\nMZOPZwq68kr73djYfVkLAKVYyHWl5dNPP2XFihUsWLCAO++Mzbg5c+ZMamtrOeigg7jiiiu4++67\naWlp4ayzzmL06NFs2bKFkSNH8sEHHwDQ0tJCwK1prVy5EsdxGDNmDN/85jd59dVXc5PYAUJt/iWE\n43S38yeagpqbuy83Ndl9amqgtdW2FrSfQCk0BqI/67777uO4445jv/32o6amhlWrVvHee+9x3333\n8dxzz7HNNtvw4YcfMmzYMObMmcMNN9xAXV1d2nPuv//+PPXUU1RUVPDoo4/yi1/8gubm5uwSOoCo\n+JcwNTXg80E0CiIwejQ89ZR9iSoqYOFC6Oy0230+qK7WjmKl8EjWn5XtM7pkyRIuvfRSAM4880yW\nLFmCMYaJEyeyzTbbADBs2LA+nfPjjz/mnHPOYd26dYgIHR1Jp54uGFT8i4hw2D74mdTQw2GYOtWK\nuzH2xbnpJpg929byV66E++6z28AWALl6sRQll3j9WV7N3+vP6i8ffvghjz32GGvWrEFEiEQiiAin\nn356RsdXVFQQjUYBuvnZX3nllRx11FHce++9bNiwocscVKiozb9ISLTf92b79GpLnrgbY5c9887D\nD8e2ga35p3uxtKNYyRfJ+rOy4e677+ZHP/oRb7zxBhs2bODNN99kn332YYcddmDRokV8/vnngC0k\nALbffns++eSTruNHjhzJqlWrALqZdT7++GP23HNPwHYSFzoq/kVCX105vdqSNw4kXtxDIdsiALv9\nlFPg6qtTv1h9LXgUJdc4DjQ05KZVumTJEk499dRu6yZMmMA777zDSSedRF1dHaNHj+aGG24A4Nxz\nz+WCCy7o6vD99a9/zaWXXkpdXR1+v7/rHNOmTaOhoYExY8bQ6b1ghYwxpiA/Bx98sFFiPPOMMUOH\nGuP32+9nnun9mPnzjamsNEbEmIoKu+ydq7rarq+u7v1c115rrwv2+9pr06fz2mszS59Snqxduzbf\nSSgZkt1LoMVkoLE5sfmLyELgROA9Y8yBSbYLcCNwAvA5cK4x5oVcXLtc8Jq+mdr8wZp4olFr3jHG\nLnvEm4N6I53NNb4fAnSUsaIUC7nq8L0dmAM0pdh+PDDK/RwCzHO/lT6Q6MrZG4GAHfQVjdpvT6BD\nIWs+Msaaf6ZPt59U505V8CS64J1zTu69MhRFGRhyIv7GmCdFZGSaXU4GmtwmybMisqOI7G6MeScX\n11dS49n842NAeTX5tjZbMDz6qHUBTVdTT1bwxPdDtLXBCy/YQgbStxC0QFCU/DNYHb57Am/GLW9y\n13VDRIIi0iIiLe+///4gJa108Tp2vRq+10ns1eSPPjo2DqA/8YC8QsQ7R0uLLWTOP797QaIdxopS\neBSUn78xphFoBKirq8vAGq2kIxCAGVzOGfyBDdF92anmOggDoRDO5s3cvSnEi2YIrQzjfXbDqakH\nMq+We4XI9Om29RCN2kJmxIjuwj99eqyVoeYgRSkMBkv83wL2ilse7q5TckljI5/cuIAPPxvCdnsP\nw3nnJQ6NrANgL/MWctER1i7T0QHGsD1whHdsBOSSRVD7eJ+U2XGsuHsjhxODyo0fHxP+3sYSKIoy\neAyW2ed+oF4shwIfq70/xzQ2YiZPZru1KxnxxpMMe3IpZt06BLo+RCJdwu/RbXs/Y0GnGoTj9Ql4\n4SXq6pL3K+gAMmWw8fv9jB49mgMPPJDTTz+9a2BXf4gP2Xz//fdz3XXXpdx38+bN3HzzzX2+xvTp\n07vGHeSKXLl6LgECwM4isgn4NVAJYIy5BXgI6+a5HuvqOTEX1y0lEl0m+9w56o40TDu3j99vP15A\nn0R6G+KbJlHJOoQ9byPPs+jvf09+WnUPVQaboUOHsnr1agDOOussbrnlFn72s591bfd84X2+vtWP\nTzrpJE466aSU2z3xv+iii/qX8BySK2+fH/Sy3QAX5+JapUi8AFZUxGLx9EkMJ0yAZctI7CjpKgwO\nOgjmzbO/QyHYvNl+DxkCw4bBbrtBfX36Ib59VGjHgfPOg/nzu3c6p4s8Gr9dPYSULgbwYTjiiCP4\nxz/+wYYNGzj22GM55JBDWLVqFQ899BCvvvoqv/71r2lra2Pfffdl0aJFbLfddvz1r39l6tSpbLPN\nNhx++OFd57r99ttpaWlhzpw5/Pvf/+aCCy7g9ddfB2DevHn8/ve/57XXXmP06NEcc8wxXH/99Vx/\n/fXcddddtLW1ceqpp/Kb3/wGgGuuuYbFixez6667stdee3HwwQfnNN8F1eFbrjQ1wdatViC9CrkX\niyfjztFgEIFuNv+aYSQX9b6+PH0Mqxj/ntbXw+LF3fsD4renGkCmLQKliwF8GDo7O3n44Yc57rjj\nAFi3bh2LFy/m0EMP5YMPPuDqq6/m0UcfZdttt2XmzJn87ne/Y9q0aZx//vk89thjfPnLX+aMM85I\neu5LLrmEI488knvvvZdIJMKnn37Kddddxz//+c+uVseyZctYt24dK1euxBjDSSedxJNPPsm2227L\nnXfeyerVq+ns7GTs2LEq/qVGOGxDK3tm+IoKax/3av7drDC91X6CQbYPBtk+g137RB/CKiZ7T+MH\niEH67Yn9BTpgTBmIh2HLli2MHj0asDX/SZMm8fbbb7P33ntz6KGHAvDss8+ydu1aDjvsMADa29tx\nHIdXXnmFffbZh1GjRgFw9tln09jY2OMajz32GE1Ndtyr3+9nhx124KOPPuq2z7Jly1i2bBljxowB\n7CQz69at45NPPuHUU0/tCi+dzpTUX1T884w32has6E+aZGvLPYS7sREuvtg2DXoJvJ/zilK6Ib7u\nw+21LpK9p/EBuWbMSL/dI9dhfJUiZgAehnibfzzbbrtt129jDMccc0yPaR6THddfjDE0NDQwefLk\nbutnz56ds2ukQqN65pn4uXeHDIlZaLoJYjgMU6bEOmrb2pJ65XheM01NAzCZe2KiwmE46ii45Rb7\nce05qeYSTpbfdO9xrsP4KkVMnh6GQw89lKeffpr169cD8Nlnn/Gvf/2L/fffnw0bNvDaa68BpJwD\nePz48cxz+9kikQgff/xxj/DQxx57LAsXLuTTTz8F4K233uK9995j3LhxLF26lC1btvDJJ5/wwAMP\n5Dx/WvMfZBLNMRkFbAuFMJ0RBDCA+Hw9VDOx0zhVmIWc4VXxPTo6YOpUnLFjeW52PQ+2Oj3y4+Xd\nm1CmN5NUX2MZKSVMHh6GXXbZhdtvv50f/OAHtLW1AXD11Vez33770djYyHe+8x222WYbjjjiiG6C\n7nHjjTcSDAZZsGABfr+fefPm4TgOhx12GAceeCDHH388119/PS+//DKOm7ftttuOO+64g7Fjx3LG\nGWdw0EEHseuuu/L1r3899xnMJPRnPj6lGNI547DMCXGR/zH/GfMZQ00HPtNGpVk/bX6PQxLDLl9w\nwQCHVvbiQseChsY+fn8sfnTc7n0NSa2UJhrSOXfkPaSzkhkZ9VslMdg/2OrwF99yjoiGeMoX4Ds7\nOjQkHJZoFk3ltZkzHAcef9zamF54AZ5/PtZrHYnABRfY38EgoB24ilJoqPgPImn7rTybyMaNPVQy\nEHC4qtrh2XaHqiq4PtDz3P2J958tYRxCIxxOHBOmdnUA2tutWQrbopSLLoLaWnAc7cBVlAJDTCaz\neeSBuro609LSku9k5BxP42tq4MUX7br//ORyhi+5wdacq6qSjvIqtAFPiQ2U52aH+dKMSWyz4eWu\ngWVGBJk82UZ6CwRsYREqnDwo+eHll19m//33RyTteHSlF4wxvPLKK3z1q1/ttl5EVhlj6no7Xmv+\ng4wnekcdZZ12fkwjezKrq8ZMR4c1lbiC6R3Ql/6uwSgoEs04D7Y67HncAs685Ugq6QAgIn4qFi2y\nefL5cObOhUCwW2hppfwYMmQIra2t1NTUaAHQT4wxtLa2MmTIkH6fQ8U/D3jC+WMauda13nd7BbIw\n2A/WyNhkZpw1axzG+5/grEgTfj9897uw2/2N1j01GsVccCHvycP8hWlcVe30SFuhtW6UgWH48OFs\n2rQJnbMjO4YMGcLw4cP7fbyK/yATDoN/ZZjHzRWM48mu9V01/8suy0r5BqtjNbGPAWDqVGgzDs9V\nOsyZA7vVhuHB2+JiVkQ5ySzlWB5i/NYQoZCTckpI9e0vXSorK9lnn33ynYyyRwd55ZDeQhMvvTxM\n+zeP5LKlh3UJv1fjl2HDbAS0mTOzSkOmg6hyQfy4r/jwzdGoO1m848DcuTaQP7HQ0VW0M9/8mK9u\njt2oZIWWoigDh9b8c0RvNdell4c5ftY4qugEupt5BGyp4bpFZkM+vH4gjSeTl6eLLsK4cSwE+Bpr\nOeD6w2HfeRAM9ji+psbeEjUBKcrAoOKfI1KZW7zwN8PmhziJzpgnjPstIvDzn+dE+D3yMTI2baET\nDEJtLe2nfJ+q9zbFCj4TtWEramtxHKfr+Joaa0JSE5CiDBxq9ukniSaeZOYWrzUQuaWRH5g7iGJF\nv0v4x42Dp5/O2tRTKPSISZSwsfqqK4HYPeiaXaypCWbMwCFMQ4M1GakJSFEGFq3594NUJp7ly2NB\nLsGK1o+2NnILsYh9EeD/djuAYb+5NKe1/aLAnXOA2bPh1VftuspKG9M6ErEl53nnceKYeq6qcnRA\nmKIMIFrz7wfpOicXL4Zbb7WFQ00NTDILgFhnpx8YdsnZEAyW59y1wSCsXQsrVsDVV8PEifZGejfz\nlluovXgca37SqBE9FWUA0Zp/P0jVuZlYKAx5McxY34sQjZl6Ir5KKgIBdW30OibCYVtielOZAXR2\nsu9/T6HhidoyuymKMnhozb8fpAov7hUKPp/97P9uCD/Rrg7OtXIAL897IuWkJ+VEV6sH92ZOnhyL\nQw32xkyfXmbNIkUZRDIJ/ZmPTyGFdE6IsJyW+fONqagwxucz5siqZ0xn9VAT8fnNFt9Qc++02AnK\nOcRxyrzPn28i/koTwWeiYIyIMZWVPcJDK4qSGjIM6aw1/17wzDNXXmm/e6uItrbCIdEw06Iz6OyE\n3x6/nOm+qzia5fzwJqfr+HKeqSpVqydcG+Rb/if4G0fb9pIxNi7QxRdrC0BRcoyKfy/01Tzz/c2N\nPBYdx1X8kmXR8QBcaxp4Our0OD6ta2QJk2oUcigEKyIO05lOhIqufhKi0fKziynKAKPi3wt9CpcQ\nDrPvf0+hkk4qiDJU2jhjt9CghVsoFnrrM3ne7zC1Yg7GX2E7T6qr9cYpSo7ReP4ZkHG0yVNPhaVL\nY8sVFfDkkz3i2CeeT6NZxuh2L4ib/CCTSX8VRck4nr+Kf65obLQeKx4+H8yb12MgV6KL5+zZGsog\nLUl8YsM4XYPpBny6SkUpMnQyl8Gmubn7cl1d0hG8iX0Izc06t21aEm7YG00hjlzg0GHni2HRIjuV\nsN4zRekbavPPFRMmdF+eNCnpbol9CBMmDF4I5qIkECBSUUVE/EQqqvjTu4Eu4YfyHCOhKLlAa/65\nwqvlNzdbRU8RtydZ9MvaWrX5pyKMQ4NZzmGEeNoE2InuN8jn0wJTUfqDin82JPbUBoMZBWtLDLmc\njxDMxYLn/vmEcfBH4PzdYqE1/H64+Wa9d4rSH1T8+0tjox18FI1aV0TtqR0QEuMo1dfDZV9oRO5p\nxpw2gX3LLTKqouQIFf/+EA7bSUg67axctLVpT+0A0cNMtqYRZrleVbOWwb6UX2hsRckBOenwFZHj\nRORVEVkvIlck2X6uiLwvIqvdz49zcd28EQpZ7xMP1/BcliGaB4FuI6ETvaoWLMhLmhSl2Mm65i8i\nfmAucAywCXheRO43xqxN2PVPxpgp2V6vIAgErKmnrc0anufMIYxT3iGaB4sJE2DZstjyiy/a0lZv\ntqL0iVzU/L8BrDfGvG6MaQfuBE7OwXkLim61etcW8UbwapomPUG4Nlj2IZoHjWAQTjkltqxxfxSl\nX+RC/PcE3oxb3uSuS2SCiPxDRO4Wkb1ycN1BIxyGhkCYnX5xIf847EKWXh4mjMNXFzdw3q1O16xd\n6q8/SEybBkOH2ptdUQEbN6qtTVH6yGB1+D4ALDHGtInIZGAx8K3EnUQkCAQBRowYMUhJ6511TWGW\ntR9JJR1goG3WQn7/rxDt7U5XTb+1taf/vjJAxE+YvHChnTdz8eKUtjaNnaQoPcmF+L8FxNfkh7vr\nujDGtMYt3gbMSnYiY0wj0Ag2tk8O0pYTvrN2FpV0dM3IVUkH+70doiphknH11x9EHCfW8R6J2Gkg\nm5p6/AFlP12moqQgF2af54FRIrKPiFQBZwL3x+8gIrvHLZ4EvJyD6w4O4TA1K2LZMUAUH/tOCpTt\nZCwFQyAAFTbuvzGGyIKFPcw/2hejKMnJuuZvjOkUkSnAI4AfWGiMeUlE/gs7ndj9wCUichLQCXwI\nnJvtdQeKHiaCpiZM1M7Da4VfeOasmzkyaNVeRT+POA7vHD+RXZfOx48h2hFhU1OIveP+lMRBYtoX\noyiWnNj8jTEPAQ8lrPtV3O8GoCEX18oVyezASU0ECcfdz8m88rUgRw5yepXk/G23er7HYippp4Mq\nniBAfdz2ZLGUFEUp06ieqeblTWoiqK8nWllFBKGNKm6smqa1xwJiVL3DCVXLmS5XcULVckbVW3XX\nAXeKkp6yDO+QTOQdJ4WJwHHwPxHijaYQTxBgRr2jtccCwnFgRsghFHKYEYjNjOa14Px+Ow98ZydU\nVmoUDkXxKEvxT2UHTmkicBz2dpxu5gSlcEj0soov3OOjcLS3J3UIUpSypCzFP50d2LEz7gIBelr8\nlUIjWd9NfOEO3QsARVEsZSf+8WIRCMRc/xzHboweeRTS0Y6prML3hM4PWMik8uF3HHhudpjW5hCb\nRwc480anW0hoRVHKTPwTbcEi1hbsTaT+lRubGNfRZgdzdbTx7qwmdrtXxb9QSdV3QzhM7VT3j36q\niud/v5wHWx319lGUOEpa/BNNAvFiEY3afYyxwTn/56Iw10de6Hb822/DbknOoxQGKX34E0qF2tYQ\ntQ36xylKPCUr/slMAvFiEe8F4hDmkch4qmgDIAJ0UEXlpHoND1DApOy7STOySwtyRbGUrPgnMwk0\nNMTEoqYGfvITu++RhKiinQqidOLj1eFHE71yOrVBhxkzUpgWlIIgaTylJKVCOGw9fRYtsgV+RQVM\nnGj7APT/VMqRkh3k5VX+EkMse7NCtbZaQTcGniBA1F9FRPxIdTVfu8sKf7rzKAVO3PRfm86+nOHf\n3IszbjmSMW1hIhFr6ps/v/sgP0UpJ8SYggme2Y26ujrT0tKS1TnSNfHDYVhxxOWcHLmH+/yncdzN\np1DbmnxnNRUUMZdfjpkVCyLbgZ8jeYpnXTdevx/OPx9GjND/VykNRGSVMaau1/1KWfzTkiAKMm0a\nzJw5cNdT8sOoUZj167vCcUe7+QmrAAAdUklEQVSBPxxwLT9e30Ak0tPrS/t0lGInU/EvWbNPr/zx\njwBdosA99+QtKcoActppXRFZDYDPz49uCxAK2XDc551nhV9DPivlRsl2+MbTw2wTDhN9+50uUQDY\ndMhpDM9bCpUBw23NyR//CF/6EnLddeA4OMTiAC1erCGflfKj5M0+ia6az80OU9s8nejfHsVnokSB\npxjHM9c+QUNBBZ1WBoQkHTjap6OUEpmafUq+5h/v8jm2Lcz+U8ZDpA0x1q2znWp+XXUdMwL5Tqky\n4KQYtJFu+k0tGJRSpeTFPxCwnXrRKAQkREWkHaJRxOfjk7qjeWDsdA3TXC6kjAdhSRR6HeCnlDIl\nL/5gvTkAnvIFiPqq8Hfat3mn2dOp17e5fIgf+VtRARs3QjhMGIemJli40JYLntD3UlYoSlFT8t4+\noRDUdYS53MwgEoE/TNRZ18sWb+Tv+efb0X233krkqPE0BMLMn99T6HWAn1LKlGTNP775/s2XGpkW\nvRAfUTqjFbwy5kkIas9u2eJF+PNmeom2cxghnjC2IiASE3qd/1cpZUpO/OPttIf5wizvuAAfBgEq\n6KTm+isg+ES+k6nkkzjzj4iPkyNL+UBqWFwV7BHvJ11nsKIUMyVn9om3057Z0dQl/B4VG1/LV9KU\nQsGr0n/3u/g6O/i6WcktZjIbjzybefNU7JXyoOTE36vUHeYLM5FF3Ud3Am8Fzspf4pTCwXHg888B\nO8pbgF2X/QEaG/OaLEUZLEpO/L1K3dVHh6j2dXbV+j/1fYHV357GmEc0fo/iMmFCz3XNzYOfDkXJ\nAyUn/gDOmkYCm5cifh/4/cjQoWy/4q8q/Ep3gkE4K6EluM02GuNZKQtKT/wbG2HyZFi5Ejo64Lvf\nVbdOJTV33GED+3/jG9b3/4EHNMi/UhaUnvg3N3fZ9w1Yu64Kv5KOYBBOOcX6/mt4T6VMKDnxf220\nteOahGVFSYuO6FLKjJLz879rxyAbBE41zdwrExi5YxAd0qWkIjYg0MGJn+DZq/lrq1EpUUpO/AMB\nGD8kyIL2oI3REsh3ipRCpWfgNgcngEZzU/LKYEWSLTnx1yH5SqYkDdxG3MqtW6GpSR8iZdAYzEiy\nJWfzB3uzGhr0nVXSk9TMHwhYrx+wHcALF6rnjzJoJKuQDBQlKf6KkgleK7FbkFfHgYkTMW4ccNMZ\ngVCIxkY49tjYAOBwGGbM0HJByS2D6XeQE7OPiBwH3Aj4gduMMdclbK8GmoCDgVbgDGPMhlxcW1Gy\nIVngtqVfqOfbZjGVtNMRrWLRSwGm/MFuW7YMXnsNbrpJuwWU3JBo4x8ss3XW4i8ifmAucAywCXhe\nRO43xqyN220S8JEx5ssiciYwEzgj22srSrYkm73r+//tcDDLCRCilRr2ezjEocCz2Dfxnnt6n+RF\np39UMiGVjX8wnplc1Py/Aaw3xrwOICJ3AicD8eJ/MjDd/X03MEdExBTq7PFKWZDsxfNC/XtCv5zx\nVH/UzkVUMZ7lPIvDaad1r/knNs11+kclU5LZ+LdbE6a1OUTNhAC1wYF7cHIh/nsCb8YtbwIOSbWP\nMaZTRD4GaoAP4ncSkSAQBBgxYkQOkqYoqUn24gUCUF0NbW0wnhBDTDs+E2GIr53zvxRi4s+drgHB\nqWr2Ov2jkimejb+tDXw+GBVq5KvLLsJHlPZlVazh8QErAAqqw9cY02iMqTPG1O2yyy75To5S4iTr\nXOuKCns1nDEvgG+I3cHnE84btpQgtsc3nUeZDhZWMsVxYPZsK/xf7wxz8rKL8BPBh6GaNjoWNA3Y\ntXNR838L2Ctuebi7Ltk+m0SkAtgB2/GrKHkjVedazObqQO1ymDULli61wQJXrrQ9vjNTR4jVsSZK\nX2httV7F40wIH5Fuk0/tvsfAXTcX4v88MEpE9sGK/JnADxP2uR84BwgD3wMeU3u/Ugj02rkWN+lL\nFzfcYO0+aQ7U6R+VTAkE4HB/mL2jG+k0lfjoACDqr2D3afUDdt2sxd+14U8BHsG6ei40xrwkIv8F\ntBhj7gcWAP8jIuuBD7EFhKIUBxMmWB9PD2PUkK/kDGdNI491XoSYCKaiEjnxFNhtN/zxk0kPADnx\n8zfGPAQ8lLDuV3G/twKn5+JaijLoBIPW1HPDDVb4Kyth40br1qMFgJIN4TBceCG+aNQud3bwwtu7\n0TZt3oA/WgXV4asoBcvMmbBihZ0oSARuvVUnfVGyp6kJPOF3WblycB4tFX9FyRTHgREjMB2dEIlg\n2tp5oymkYR6UnGCACD4WUz8o8wmp+CtKH1hTE2BLtIpOfHREfVx3aw1XXqmNAKWf1NcTrawmitCJ\nnwuZx0qfMyguwir+ipIh4TBc1uwwldlE8eEjwm8jU/l6JKwzPyr9w3G4Y9LjXCnXMI6nWOgLcvTR\ngzMqvOTi+SvKQOCFbGhrg2m04sNQQRRDO9+SEH+vcnQwl5IZCYGfRtU7XLDYob0dqqtg+vTiie2j\nKCWPF7IhGoUnJUCnVOGnHb+/ggljNnLmpDC16vmj9EY4bEW/o8N6jYVCOI6Tl0GBavZRlAyID9nw\n4hCHdfOWI8Hz8Ylh7KpbqZ2qRn8lA2bNsrUIY+x3kw3fkI8JqFT8FSUDEid+qQ1azx8ikcGZdkkp\nfsJheOCBfKeiCzX7KEqG9AjZ4DUHUsV2VpR4QiFb4/fw+6F+4MI39IaKv6L0F43gpvQFN164aWsj\nip8N/zGHffP4zEihxlerq6szLS0t+U6GoihKzljTGObPF4d4LBrghWpnQFw6RWSVMaaut/205q8o\nijJIPNjqcE3UIRoFX1usmygfjUcVf0UZIHQeXyWRmppYKJ9oFDZvzt+Unyr+ipJrwmHeaArRsDDA\nioij8/gqXbS22lm7olH7vXp1/qb8VFdPRckl7lDgveZfyUPt4zX0g9INb45ov99+T5iQvyk/teav\nKLnEHQrsMxGq2cJspnK5fzY1NQ4zZqgJqNxJ5iBWW5sf86B6+yhKLgmH4aijoK0N782KVFRztO9x\nVkQcKipg4kTr3q2FQGlRKH08mXr7qNlHUXKJ41h1B8T9+DrbOawjRCRiA8PNn68hoEsNL/DfX34Z\n5uFxM1jTWPh/roq/ouSa+nprwPWorOLpygAidtEL66L9AKVDUxOcvaWRx6JH8qvOX7L/lMIv3VX8\nFSXXOI5V9gsugAsuwPfE48wIOUyenL/OPWXgCIdh7YIwc7iYSjqoIEpFpK3gS3ft8FWUgSAxEFDY\nxoG76Sbr7ldTE9MGtf0XN6EQ/KCjCT8RBDsdo/j9SUv3QukXABV/RRlwwmH4n3GNnNzZzH0VExg9\nN8gll8QG9jz+eP6FQOk/J9aEGcUifBgMYHx+ZM6cHn+q1y+QjwFdyVCzj6LkmHCYbpO6fzSrkbmd\nk/k2y5jbOZkPrm2krc3a/tvaukK6K0VKbWuIal8nAiCCL3g+BIM99vMmBCqUCOBa81eUHJK0dvd2\nM0CXSWDcB81AT3FQipRAAKm2ob2lqiplmOZCiwCu4q8oOSRZ7c6ZNAGzclmX33/TZxMAELEz+eUx\npLuSDfEG/AxCexdaBHAVf0XJIUlrd04QAV6/vplX18NlXM+XeI3Hj5k5aJN1KzkmWROvoaHXw3pM\nCJRHVPwVJYekrN0Fg1Q9+RrHrZ8FwOXM4uxdYLgzM19JVfpIN0+dpE28AlH1DFHxV5Qck6p2N/y5\ne6wbINb2P/yem6Bx36Sdgx6F5BpYziRW9J+bHaC2kAz4/UDFX1EGi9NOg1mzumz/bNkCkycDEK4N\n9hD5QnMNLGdCIRjbFuasaBOyBV58sZ7aQjLg9wMVf0UZJMKnzGTFb+HCyE1sy5au9R8taGb8mmAP\nkQ+FrCtoNGq/i9CyUDKcWBPmp9GjqKYNgOiChVAfysjOX6ion7+iDBKhEDQwk58yG6CrBRDeY0JS\n/+/EWZ9qagY7xYpH7cOzqKatK1ifv7Mj/476WaI1f0UZJDxPoEXtQSoFrhnbzE6TJrBTbZCqR3qa\njxNnfWptzWfqy5jGRli6FIlfV1lZlHb+eFT8FWWQ6O4JFGQnx3b0OiT3EPJmfSriPsXSYPbs7st7\n7gl//jNhHEJFPEFPVuIvIsOAPwEjgQ3A940xHyXZLwKscRc3GmNOyua6ilKspPIESra+0AYFlSWN\njfDyy93X/epXhHGSdsYXk3dWtjX/K4DlxpjrROQKd/nyJPttMcaMzvJailJ2FNKgoLKkubnrpwE+\nH3kA2waDhGYkj9NTTN5Z2Xb4ngwsdn8vBk7J8nyKUp6Ew7xx4QxmnRrmwgsLfh6Q0seLzjd6tI3U\n6a7++aZLCYdj/TfxczMUWuC23si25v9FY8w77u93gS+m2G+IiLQAncB1xpilyXYSkSBuxKsRI0Zk\nmTRFKRLCYSJHjWfPtnamUMV4lrNwoaOunfkiYYDFI6OnIatXczcTWGSC7BWyHp7JTHLFNO6rV/EX\nkUeB3ZJs+s/4BWOMEZFUs8HvbYx5S0S+BDwmImuMMa8l7mSMaQQawU7g3mvqFaUUCIWQ9nZ3MpCt\n1NPEcx0q/nkjrgpv2tp5es2OXM0jAFTFzdGSaJIrtj6aXsXfGHN0qm0i8m8R2d0Y846I7A68l+Ic\nb7nfr4tICBgD9BB/RSlLAgFMhR/TEcGH4cfcyj98YwgENOxzXoiLztfpq+LxSACwUVjPOy+9qBdT\nH022Nv/7gXPc3+cA9yXuICI7iUi1+3tn4DBgbZbXVZTSwXHwTzoPEASoIMJcLsYhO8N/4qQySga4\n7jqv/WQ2ofFX8ZefLueFagefDyoq4AtfKKF7aozp9weoAZYD64BHgWHu+jrgNvf3N7Funn93vydl\ncu6DDz7YKErZ8MwzxlRWGmMn+DLG5zPm2muzOt3Qocb4/fb7mWdymNZSxb1pUZ/ffMZQc5jvGTN0\nqDHTphlTUWGMSOyvKeR7CrSYDDQ2q5q/MabVGDPeGDPKGHO0MeZDd32LMebH7u9njDG1xpiD3O8F\n2VxTUUoSx4E5c2z10uezo7uy6DEsNs+TgsC9aRKNUEk7R0RDtLfD6tVeiWx3i0ZL457qCF9FKRSC\nQaitzUmPYaFNGVgUuDfNtLXTEa3iKV+AqiqYMAGeeioWZM/nK417KsYUplNNXV2daWlpyXcyFCW/\nZDFktJhGmxYM7k1bUxPgwVanS+Cbmuz3mDE2xlIh31MRWWWMqet1PxV/RSlQvNFEHR02kJj6fg46\nxTinQqbiryGdFaVQaWqyqmOM/faqn0puyMAdqpT7TtTmryhFiJp0etKne5KiSp94jlLuO1HxV5RC\npb4eFi2KKU99PVCcpoiBJv6e+P12MFZ9fZr70tQEW7fGWlWhUMpIncU0arcvqNlHUQoVx4HHH4dr\nrrHfrvKUsimivyTek/nzbWGQ1KITDsPChTHfzYoKCARS3lfHsbF8Skn4QWv+ilLYJIkXUMqmiP7i\n3ROvMh9Xoe8p2qGQVXiwMRsmTgTHIUB53VcVf0UpMkrZFNFfvHvS1GQtZZ2daQQ8sfR0zWnldl/V\n1VNRihHt8U1JRremhO+f+vkrSqmiPb5KGtTPX1FKlUHu8S2J6KAlkYncojZ/RSk20vT45tqaURKN\njJLIRO5R8VeUYiNFz+RAaFyyRkbR6WZJZCL3qPgrSjGSxAW0LxqXaQuhJNxKSyITuUfFX1FKhEw1\nri8thHy6P+bMhFVuPpwZouKvKCWC48Bzs8O0NoeomRCgNoXIJYlsUHDz0mZrwlrTGHcfgk5xTa47\nSKj4K0qpEA5TO3W8nXXkMR8w104Q030XFi2KRTbw+wvTChJvwmprg+nT7ScT/V7TGGbU5AAH0EHH\nskrWELIFgNINdfVUlFIhFIpNN9XZCVOm9HBtDIXsJrCRDc47rzArxJ4Jy+ez2Xn00TSxehKomj2L\natrxY6imnY4FGgo7GSr+ilIqBAJWLT0ikR5jAGpqrOj7fDBkSFdkgwGjv+71npn+6KNjBUBGQxrC\nYUa9+kC3VXvs0bdrlwtq9lGUUsFxYO5cohdNgWgEIz78S5daxQ8GCYdh6lQrpH4/zJ49MLV+r6O2\npsZer792e8expp6nnsrQUaexEa6/Hl/UBm0zAD4/u00b4BKuSFHxV5QSIlwbpMFfy6WRWZwSWYpZ\nuRJZuRKAUGuQ9nYr/iJ2LtqcXz+uo1bEXiu+1p6p+Md7+mTkqNPYCJMnx5ZFEL8f5s4tTLtWAaDi\nryglRCgEKyIOV/A5AOJtaG4mMD044O7u8R21Pp9tYYhkdr10LYaGhl4u3NzcbXHLnvvy5xObGFXr\noNKfHBV/RSkhvI7Se7dO4FizDINbAEyYMCDu7r1Nezh7tm1h9Ha9rFsMEybAsmWANff8x7s/p/FW\nh6rFGs0hFSr+ilJCxAQ+yOubYd/VzVYYXZfPXLq7J/PFBzjnHPuddhrFBLJpMQAxl9bmZh7eZgKN\nDwR7HelcwlGdM0LFX1FKjJjAB91ParIRwMRwEk1NsHhxjzlSMqK/LYZuBIMQDLJTGKoeSW/e0lhv\nKv6KUrZkI4DhMGzcaKe/BXs8pI8tlK6gyaVJKpNzaaw3FX9FKS/iFDgUcvolgPGFht8P558fq+XH\n1/zja9y9FTQZt0Ay3LE381ZNjTUvGVO+sd5U/BWlROmhk54Ct7WB38/3fzqHq6qCffb+ia81A4wY\nERPaVDXudDXtlAVDvPtPa2v2Awfi7svUqbH+hYEa71DoqPgrSgmSVFDjwz9Eo+x7w4WsuQzu2jHY\nJdaZVKzTRQ9NVeNOd4xXMEyMNPK9Lc1se9EubHl3BUPe3QgY663k9QJHIv0bOBCHd72BHO9QDKj4\nK0oJkrSmHQhYAY1G7U5uAdAwD3DsCOCjjooJ9OOPJ9fW/tjnkx4TDkNTExc+u5ZzIuvYnXfszqtj\nxwnWdVO8NPt8Kd2AymqOghyg4q8oJUhSgXMcmDMHLrywWwHAlClQW0tTk0Nbm13d1ma9d9LF+U/c\n1pv4Og44uDutqYFLLoG2NnYEdnD36RJ7d9l4B/t8UF2d0g2oWOYoKCRU/BWlBEkpcJ4/fHwB0NEB\nU6eyzx6z+TFrmEAzzUygNzfReFKKb2Mj3Hij7VkdOxb+9Cd7XZ8v1mlAT7E3ced+/9tnsWvga2mV\nuq/eOxreP0vxF5HTgenAV4FvGGNaUux3HHAj4AduM8Zcl811FUXpnZQC5xUAF10UE+CVK7nMdwSC\nXT6WZbz+BYBg9yo9dO+E3bwZHnyQL79vmL9lLKNYR/WWNoaf3A47dsK6dbHrvvxy7Lcx3QoAT+yj\nwKPybb7+xTeJRIT/m3gp+87svRDqzZRT7gO6kpFtzf+fwGnA/FQ7iIgfmAscA2wCnheR+40xa7O8\ntqIo/SUYhBdfhFtu6Vrli0a6TC4Gd3RwuDZWpa+osKLd2RlrNbj77gycTZy4vw/mfftT6I4BjM+P\n7+a5Ng1r1yIffMAHO+/HQwdMY1S9wzBXoHdJODaViKcz5eiAruRkJf7GmJcBRBL/3m58A1hvjHnd\n3fdO4GRAxV9R8kl9PSxcaFURwO9H3Jq4Fw+omz3FE3wTM8ok2ueTKUG8CSeKEKGCqTKHH9UGcRIa\nFvUZxv9JJuKpWjo6oCs5g2Hz3xN4M255E3BIsh1FpGs8+ogRIwY+ZYpSzjiOVcImd6ar+npYs8ZG\nyPTiAYXDMXtKmpp/st8eUeAN9ubPcgYfsyOPmwDPG4e9QnZ7prXy/oq4evckp1fxF5FHgd2SbPpP\nY8x9uUyMMaYRaASoq6tL9hwpipJLEqvLjtN93t9Eewp0s/mvDm2matmDgGG1byzf/co6fB1tbFzf\nzvvswityAItNPWEcBFt+RKMxEU4m6N4lEs03/RVx9e5JTq/ib4w5OstrvAXsFbc83F2nKEoxkKyA\nwDXDXAVtvpn4fHbelC+45cbHYbizCdauhfCTdp0x8NOfwo47dhfheEGvqUndEshGxNW7pyeDYfZ5\nHhglIvtgRf9M4IeDcF1FUQaQxJGyL75o5+v1auSLF8PWrbH9fT4r/N7ELJ6tP951P34QcltbT9OO\ninjuyNbV81TgJmyn/F9EZLUx5lgR2QPr0nmCMaZTRKYAj2BdPRcaY17KOuWKogw68Z2z8WYYvx8W\nLbLdAVVVNqZ/e3usb1jEjtHyCoZUnbdr1nQff1ZTM/h5LBey9fa5F7g3yfq3gRPilh8CHsrmWoqi\n5Ja++r4nE2zPDLNxI9x6a8x2D90LhvPO6z65S6rO29ZW20LwxoGVa9ydwUBH+CpKGdIf3/dkgt3Q\nEAsIlziRS3196sIlVedtIGBbCOqZM/Co+CtKGdIft8neonkm64zta3wd9cwZPMSYwvSorKurMy0t\nSaNFKIqSJf0d9aphEgofEVlljKnrbT+t+StKGdLfGvZAeNtogZIfVPwVpUzxhDwcjrloDrb4Njba\niNKRiLX1a9ydwUPFX1HKmIEIepZpTT4chosvtu6hkNyvXxk4VPwVpYzJddCzvhQmoVC3EEH4/erd\nM5j48p0ARVHyh+fB4/fnxrUyVayeVNeurrb+/BUVdpIxrfUPHlrzV5QyJteulYnuoDU1qfsT1K0z\nv6irp6IoOcWz+dfUwNSpOonKYJOpq6eafRRFySmOY0f+trZmbgJSBh8Vf0VRssJzFQ2Hu6/PdX+C\nklvU5q8oSr9J592jNv3CRsVfUZR+05urqMbfL1zU7KMoSr9R007xojV/RVH6jZp2ihcVf0VRskJN\nO8WJmn0URVHKEBV/RVGUMkTFX1EUpQxR8VcURSlDVPwVRVHKEBV/RVGUMqRgo3qKyPvAGxnuvjPw\nwQAmZ7AohXxoHgqDUsgDlEY+BjsPextjdultp4IV/74gIi2ZhDAtdEohH5qHwqAU8gClkY9CzYOa\nfRRFUcoQFX9FUZQypFTEvzHfCcgRpZAPzUNhUAp5gNLIR0HmoSRs/oqiKErfKJWav6IoitIHVPwV\nRVHKkKIXfxE5TkReFZH1InJFvtPTV0RkoYi8JyL/zHda+ouI7CUij4vIWhF5SUQuzXea+oOIDBGR\nlSLydzcfv8l3mvqLiPhF5EUReTDfaekPIrJBRNaIyGoRacl3evqLiOwoIneLyCsi8rKIFEzw66K2\n+YuIH/gXcAywCXge+IExZm1eE9YHRGQc8CnQZIw5MN/p6Q8isjuwuzHmBRHZHlgFnFJM/wOAiAiw\nrTHmUxGpBFYAlxpjns1z0vqMiPwMqAO+YIw5Md/p6SsisgGoM8YU9QAvEVkMPGWMuU1EqoBtjDGb\n850uKP6a/zeA9caY140x7cCdwMl5TlOfMMY8CXyY73RkgzHmHWPMC+7vT4CXgT3zm6q+YyyfuouV\n7qfoakciMhz4DnBbvtNSzojIDsA4YAGAMaa9UIQfil/89wTejFveRBGKTikhIiOBMcBz+U1J/3DN\nJauB94C/GWOKMR+zgWlANN8JyQIDLBORVSISzHdi+sk+wPvAItcEd5uIbJvvRHkUu/grBYSIbAc0\nA1ONMf+X7/T0B2NMxBgzGhgOfENEisoUJyInAu8ZY1blOy1ZcrgxZixwPHCxax4tNiqAscA8Y8wY\n4DOgYPoli1383wL2ilse7q5TBhnXRt4M/MEYc0++05MtbvP8ceC4fKeljxwGnOTazO8EviUid+Q3\nSX3HGPOW+/0ecC/WxFtsbAI2xbUe78YWBgVBsYv/88AoEdnH7Uw5E7g/z2kqO9yO0gXAy8aY3+U7\nPf1FRHYRkR3d30OxjgSv5DdVfcMY02CMGW6MGYl9Hx4zxpyd52T1CRHZ1nUcwDWTfBsoOm84Y8y7\nwJsi8hV31XigYJwgKvKdgGwwxnSKyBTgEcAPLDTGvJTnZPUJEVkCBICdRWQT8GtjzIL8pqrPHAb8\nCFjj2ssBfmGMeSiPaeoPuwOLXS8yH3CXMaYoXSWLnC8C99o6BRXAH40xf81vkvrNT4A/uJXT14GJ\neU5PF0Xt6qkoiqL0j2I3+yiKoij9QMVfURSlDFHxVxRFKUNU/BVFUcoQFX9FUZQyRMVfURSlDFHx\nVxRFKUP+P5OxXtvr2werAAAAAElFTkSuQmCC\n",
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAEICAYAAAC3Y/QeAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJztnXuck9W1v5+VzAW1Vupo6wURS/FW\npwLFy1tFY7Fe0KKW2mqt01o0qODRX48ieI6ntLYiYI+0Fa2poE5VbI8oorWVikZQ4wUv59CCF7Aq\naKl0FOqFuSXr98d+M5MZkpnMJDPJJOv5fGaSN+9tv7fvXu/aa68tqophGIZRXgQKXQDDMAyj/zHx\nNwzDKENM/A3DMMoQE3/DMIwyxMTfMAyjDDHxNwzDKENM/EsYETlHRJYVuhxJRGQHEXlQRLaKyP8U\nYP8hEdmYMv1XEQn1YjtjReTVvBauHxGR74vIk4UuR1d0vlZ53G7RH3t/YeKfBSLyHRFZJSIficjf\nReSPInJ0ocvVHap6l6qeUOhypPBN4HNAjaqeWejCqOoXVTXa3XIioiLyhZT1VqrqAX1auAGGiAzz\nz1NFoctiZIeJfzeIyA+BecC1OOEaCtwEnFbIcnVHkT6E+wKvqWprrhsq0uMzjIGDqtpfhj9gF+Aj\n4MwulqnGVQ7v+n/zgGp/XgjYCEwD3gP+DpwOjAdeA94HrkrZ1kzgXuB3wIfAi8ChKfOnA+v9eWuA\nM1LmfR94CrgBaAB+6v/2pD9f/HnvAf8CVgOHpBxnPbAZeAv4TyCQst0ngeuBD4C/ASd3cT4OAqLA\nFuCvwAT/9x8DzUCLf04npVm3u+N/E7gS+D+gCagA9gIW+2X/G/BvKcvvANzul3sNcAWwsdP2jve/\nB4GrUs7vC8A+wApAgY/9cn87eV27O2Z/3u3AfOAP/nafBYZ3d03SnJvzgLX+Nt4AJqfMC+Hus3+n\n/T47L2V+DbDU38dzwDXJ+yLDvv4H2ARs9Y//i53O6c/9+2Srf2/sALztn6eP/D/Pv553pqw7zF+m\nIttjylC+m4HrO/32APDDLJ+TJ9OVx/8tCpyfMv0Dv4wfAI8A+/b02hXrX8ELUMx/wElAa+rNkWaZ\nnwDPAJ8FdgeeBq7x54X89f8LqAQuwInU3cDOwBeBbcB+/vIzceL4TX/5y3GCVunPPxMndgGcCH0M\n7OnP+76/r0tworhDpxv9RJygDfZv3INS1q33H56d/QfiNXxx9rfR4pc9CFyEq+QkzbmoBNbhRLQK\n+Kr/AB6Qcnx3dnEuuzv+N4GXcaK8g38eXvDPbxXweZyInOgvfx2wEtjVX+cvZBb/K/wH+AD//ByK\nc0+BE4gvpKwXSm4ni2O+HVcZH+5fl7uAe7q7JmnOzSnAcH+5Y4FPgNGd7rOf+OUZ78//jD//HuD3\nwE7AIcA7dC3+P/DvhaRh83LKvPk4gdzbvx++4i83jO2FtMP17rxMFseUSfyPATbg34PAZ3DP0V5Z\nPidZiT/u7X6df10qcEbR0z29dsX6V/ACFPMfcA6wqZtl1gPjU6ZPBN70v4f8mzLoT+/s32xHpCz/\nAnC6/30m8EzKvADOihubYd8vA6f5378PvN1pfuqN/lWcqB+Jb9X7vwdxFvnBKb9NBqIp21iXMm9H\n/xj2SFOesTiLMXX7i4CZKcfXnfhnPH6cWP8gZf4RaY55BnCb//0N4KSUeWEyi/+ryXOZplxdiX93\nx3w7cGvKvPHAK11dkyzvzSXApZ3us1QRe8/fbhBXoR6YMu9auhD/TvsZ7B//Lv712EbK21jKcsPo\nofhncUyZxF9wbxrH+NMXAI91cQydn5Nsxf+PpLyh+sf/Cc592etrVyx/5vPvmgZgt278y3vhXoGT\nvOX/1rYNVY3737f5n/9Imb8N+FTK9IbkF1VN4F7n9wIQkToReVlEtojIFpwVt1u6dTujqo8BN+Is\nt/dEJCIin/bXr0xzDHunTG9K2c4n/tfUMifZC9jglzvTtroj4/F3no97CPdKng//nFyFa5tpK0+n\nsmRiH1xF3lOyOeZNKd8/wT93XVyT7RCRk0XkGRF53z/O8XS89g3asS0luZ/dcVZrVudBRIIicp2I\nrBeRf+EqSPx97QYMonfnKd2+ujumtKhT4nuAs/2fvoN7o0put7vnJFv2BX6Rsp33cRXP3j25dsWK\niX/XxHC+5dO7WOZd3E2SZKj/W2/ZJ/lFRALAEOBdEdkX+A0wFeeOGIxzY0jKutrVhlX1l6r6ZeBg\nYH+cq+OfOMuw8zG804uyvwvs45e7t9tKe/wp81OPcQPwN1UdnPK3s6qO9+f/PXV7flkysQHngugp\nOR1zhmvSARGpxrVrXA98zr/2D9Px2mdiM84llO15+A7O3XE8ztofliwG7l5pJP15SnfvfYx7U0yy\nR/JLjscE7u3qm/5zcYS/LbJ8TlLLR6Yy4u6JyZ3urx1U9WnI7toVMyb+XaCqW3H+5PkicrqI7Cgi\nlb7FMsdfbBHwnyKyu4js5i9/Zw67/bKIfMN/27gMV/k8g/PXKu5hRkTOw1k0WSEih4nIESJSibvp\nG4GE/1bye+BnIrKz//D8sJfH8CzO4pzmn6cQ8HWclZYtmY4/Hc8BH4rIlX4fgqCIHCIih/nzfw/M\nEJHPiMgQXHtIJm4FrhGREeL4kojU+PP+gWtPSEevjznTNUmzaBXOr74ZaBWRk4GsQnj963sfMNO/\nfw8GvtfFKjvjznkDThSvTdlWAlgI/LeI7OWfb88X8s1+2VPP08vAMSIyVER2wbnkcj4mvywv4Sqj\nW4FHVHWLPyvr50RVN+Mq6e/6x/IDOlZsv8bdP1/0t7WLiJzpf8/22hUtJv7doKo/x4nhf+JuqA04\nq2KJv8hPgVW4CJTVuAiVn+awywdwjVQfAOcC31DVFlVdg4uyiOHEqBYX3ZMtn8ZZRB/gXvsbgLn+\nvEtwN/AbuOiNu3EPeY9Q1Wac8J2MezBvAupU9ZUebCbt8WfYXxw4FRiJaxhOisEu/iI/xh3r34Bl\nwG+72O9/4yqLZbjojQW4RmVwvus7/Nf/b3UqQy7H3NU1Sd3Hh8C/+eX7AGedL81i+0mm4lxAm3Bt\nELd1sWy9X5Z3cJEynSvey3H3+fM4N8hsnM/7E+BnwFP+eTpSVf+Mi9z6P1zb1kN5PCZw9+nx/mdy\nuz19Ti7AWewNuACMp1O2db9/fPf4LrC/4K4zZHntiplka7lRBIjITFzD4ncLXZZCUO7Hbxj9iVn+\nhmEYZYiJv2EYRhlibh/DMIwyxCx/wzCMMqRok2PttttuOmzYsEIXwzAMY0Dxwgsv/FNVd+9uuaIV\n/2HDhrFq1apCF8MwDGNAISJd9WRvw9w+hmEYZYiJv2EYRhli4m8YhlGGFK3P3zCM0qSlpYWNGzfS\n2NhY6KIMaAYNGsSQIUOorKzs1fom/oZh9CsbN25k5513ZtiwYYhkm8TTSEVVaWhoYOPGjey33369\n2oa5fQzD6FcaGxupqakx4c8BEaGmpiantycT/zIjFoNZs9ynYRQKE/7cyfUcmtunjIjFYNw4aG6G\nqipYvhw8L/t1o1EIhbJfxzCM4sUs/zIiGnXCH4+7z2g0u/WSlcbVV7tPe2swSoElS5YgIrzyStdD\nL9x+++28+27vB+eLRqOceuqpvV6/rzDxLyNqakAEAgFn+YdC2a3X20rDMIqZRYsWcfTRR7No0aIu\nl8tV/IsVE/8yIRaDyy6DRAKCQZg3L3v3TSjkKotgsGeVhmHki3y3VX300Uc8+eSTLFiwgHvuaR9x\nc/bs2dTW1nLooYcyffp07r33XlatWsU555zDyJEj2bZtG8OGDeOf//wnAKtWrSLkPxDPPfccnucx\natQovvKVr/Dqq6/mp7B9hPn8y4Sk9Z5IgCq89FL263qeax8wn79RCHJpq8rEAw88wEknncT+++9P\nTU0NL7zwAu+99x4PPPAAzz77LDvuuCPvv/8+u+66KzfeeCPXX389Y8aM6XKbBx54ICtXrqSiooJH\nH32Uq666isWLF+dW0D7ExL9MCIWc5R6PO/FfsADq6ty8bETd80z0jcKQzu2Y6724aNEiLr30UgDO\nOussFi1ahKpy3nnnseOOOwKw66679mibW7du5Xvf+x6vv/46IkJLS9qhp4sGE/8ywfNg/HhY4g87\n39ICc+bAI4/k16IyjHyTdDsm79Nc3Y7vv/8+jz32GKtXr0ZEiMfjiAhnnnlmVutXVFSQSCQAOsTZ\nX3311Rx33HHcf//9vPnmm23uoGLFfP5lxB57dJx+911ryDWKn6Tb8Zpr8mOg3HvvvZx77rm89dZb\nvPnmm2zYsIH99tuPXXbZhdtuu41PPvkEcJUEwM4778yHH37Ytv6wYcN44YUXADq4dbZu3cree+8N\nuEbiYsfEv0joj85XdXXOchJxn5MmdWzIramBiy5yfxbOaRQTngczZuTnzXTRokWcccYZHX6bOHEi\nf//735kwYQJjxoxh5MiRXH/99QB8//vf58ILL2xr8P3Rj37EpZdeypgxYwgGg23bmDZtGjNmzGDU\nqFG0trbmXtA+pmjH8B0zZoyWy2AufdGg1dW+Un38yemaGrjkElcGgMpKVznU1ZkryMgva9eu5aCD\nDip0MUqCdOdSRF5Q1a5bpzHLvyjozzj6zhZUcrqhwbUDJGlpgVtusU5dhlGqmPgXAcUQRx8KOWs/\nFVVrCzCMUsWifYqAQsTRp7p7GhrcfqNRqK+HTZvg4Yfdm4h16jKM0sTEv0jozzj6ZBtDU5Pr9BUI\nQHW1q4Buvrl9GevUZRilS17cPiKyUETeE5G/ZJgvIvJLEVknIv8nIqPzsV+jnXTRQpkiiFJ7+4L7\nbG6G9dMjvDHiRNZfGclrdIVhGMVHviz/24EbgfoM808GRvh/RwA3+59GDqS6bi67rGO0EGSOIAqF\n4CsS42zqOYg1DKKR1+MjOGfFXW6BOcvY+M56htw5Oy/ls7cHwyg+8iL+qrpCRIZ1schpQL26uNJn\nRGSwiOypqn/Px/7LkUgEpk51fvlk2oakBZ9soN2uSzwxuPhivLVribY2kToUxBE8B4AACux111x4\nPeriPcPhtuWyFfTU8iVdSmCVgVEcBINBamtraW1t5aCDDuKOO+5oS+vQU6LRKNdffz0PPfQQS5cu\nZc2aNUyfPj3tslu2bOHuu+/m4osv7tE+Zs6cyac+9Skuv/zyXpUxHf3l898b2JAyvdH/rYP4i0gY\nCAMMHTq0n4o28IjFYMoUSO1HEgi0d95KNtAeHYxxVCLKU8EQp9YAY8c6NcaJfKr4a8qnJL8995z7\n++MfYdo0YnhZ9UfoXL6mJteQfMcdlkrCKA522GEHXn75ZQDOOeccfv3rX/PDH/6wbb6qoqoEAj3z\njE+YMIEJEyZknL9lyxZuuummHot/X1BUoZ6qGlHVMao6Zvfddy90cYqSWAxmzmzTcMBZ/vPnd+z+\n7hFjuYzjGq5muYyj9qX6DislLfzknwAfHH4CKoGUCsBnyRIYO5ZdJ53B6KZYt/0RotH29oRk+cBS\nSRg50Idd4MeOHcu6det48803OeCAA6irq+OQQw5hw4YNLFu2DM/zGD16NGeeeSYfffQRAH/60584\n8MADGT16NPfdd1/btm6//XamTp0KwD/+8Q/OOOMMDj30UA499FCefvpppk+fzvr16xk5ciRXXHEF\nAHPnzuWwww7jS1/6Ej/60Y/atvWzn/2M/fffn6OPPrpv0kMna7hc/4BhwF8yzLsFODtl+lVgz662\n9+Uvf1nLnaefVr32WveZnN5hB1URVReFr1pRoXrLLer+nXCCP6FuxWDQLRQMql54Yft06t/IkaqH\nH96+3tNPq55+uiag7U9TvrcQ0MWcrsdWPd1WrnTl3mEH1UCgvXzJ34JB95lpXaP0WbNmTc9W6IOb\nZ6eddlJV1ZaWFp0wYYLedNNN+re//U1FRGOxmKqqbt68WceOHasfffSRqqped911+uMf/1i3bdum\nQ4YM0ddee00TiYSeeeaZesopp6iq6m233aZTpkxRVdVvfetbesMNN6iqamtrq27ZskX/9re/6Re/\n+MW2cjzyyCN6wQUXaCKR0Hg8rqeccoo+8cQTumrVKj3kkEP0448/1q1bt+rw4cN17ty52x1HunMJ\nrNIsNLu/3D5Lgakicg+uoXermr+/S9KlfIhGnQslmZEjEHAWf5gITJ7sfly2zH12ToVYV+f+Lr4Y\nXnsN9t8fbrppe9+L5xGbdj+/fShCXesCxrCKIIm2N4EgCc5gCafF/0CQJ4DtfTeZ+i10/s0ahI2s\n6IOcztu2bWPkyJGAs/wnTZrEu+++y7777suRRx4JwDPPPMOaNWs46qijAGhubsbzPF555RX2228/\nRowYAcB3v/tdIpHIdvt47LHHqK93MTDBYJBddtmFDz74oMMyy5YtY9myZYwaNQpwg8y8/vrrfPjh\nh5xxxhlt7RBduZJ6S17EX0QWASFgNxHZCPwIqARQ1V8DDwPjgXXAJ8B5+dhvKZN6vzc1OVePf692\noKEBiHYaMGLxYtdIm06BsxjFJRqFiIa5mTDnE+EmLqaC9rYCgEC8xTnyPS+tiKf2W4hEXJEmTnTh\no9C/+YyMAU6+czrT0eefyk477dT2XVX52te+tt0wj+nW6y2qyowZM5icNN585s2bl7d9ZCIvPn9V\nPVtV91TVSlUdoqoLVPXXvvDjv41MUdXhqlqrquWRsS0Hkvd7IOD853/+s8u/n0jA+USIcQSP67Gc\n99xFMHJkB/89Eye6jaQJ1s/GdZq671sJcywrWcLpJGhvGAZg0ybeumgWM0KxjIO7R/yXkmXL3GfS\nQLJxgY2syXdO5yw58sgjeeqpp1i3bh0AH3/8Ma+99hoHHnggb775JuvXrwfIOAbwuHHjuNnvNRmP\nx9m6det26aFPPPFEFi5c2NaW8M477/Dee+9xzDHHsGTJErZt28aHH37Igw8+mPfjsx6+RUryfp85\nEx591In+kcSYxXSOZYVbSEGWrCBRWc3Pg9P4UvxlHqiYyLm14TTOmOyt7VS3zZYtcMMNHt+M389k\nifDL+MXODVQRhD/+kX2aH+RPGuRhxvPetj14vb4OL2WjnUexS76U9IExZ5QyBRhKbvfdd+f222/n\n7LPPpqmpCYCf/vSn7L///kQiEU455RR23HFHxo4d20HQk/ziF78gHA6zYMECgsEgN998M57ncdRR\nR3HIIYdw8sknM3fuXNauXdv2zHzqU5/izjvvZPTo0Xz729/m0EMP5bOf/SyHHXZY/g8wm4aBQvxZ\ng68j2dZ1AbdoM8EOjbDJvziiV8m1bW27116bflud24AzLdfVOndc6LdCpzQgpzYOt1ZWdWiQu+WW\nju3LyXbl5LGlNmgb5UGPG3yNjAyEBl+jl3gerL4kwn5zL0K0veG1g/ulsoqnJESwm0RsSWu7qcn1\nCaip6X7/nS30EXVee2vtHXdAYyOotrcFtLZ0aJBL9g9L+vxT+ovZuMCGUUCKKs7f6EQsBmecwfC5\nFxLwhT/p108gvMxIXjr8QgJPPM6sqNetS9TzYN48F3efSLiUEN2FTWd0tyZnTJ6MVlS2latJK1ld\nE3Ib9ocFC9fGeOSRjsJvGEZhMcu/CNguWiYWc5E0CxZ0HGEFQAIslQnMZRovVnssnwd4LuAyGyu6\nocEJf2oqiO7Wy2ih+zPupI5PbqlHFe4K1BF+CWovCbUPC3bbbfD442bmG22oKiLS/YJGRjTHURhN\n/PuJTDHtnRthn50Xo/aycW3ulA4EAsjNN/PZ2jCnRGFuqOd6ms+G1rbEcqM8fjjIa9vmsczqWGk1\nNcGpp8L558Ps2dutb3H+5cWgQYNoaGigpqbGKoBeoqo0NDQwaNCgXm/DxL8fiERcrptEoj3JWVLs\nOoc8Niz2f+gs/MGg65QVDmdt5acjXwPHdK605s1rHxRmX0KwsLLd8gd4/30Xq/rMM3DddVnnCTJK\njyFDhrBx40Y2b95c6KIMaAYNGsSQIUN6vb6Jfx8Ti7nslqlJzlJdLaGQS8B2VryeALDPyFHEn6iC\nRDNUBAmeMh722COvI6nno6F1u0qrob0DF3jtw4LdfTf861/tK65YAePG8cGJ87issYHHNcSzjV6y\nv5hRBlRWVrLffvsVuhhlj4l/HxONdkzCFgi0u1pWR2JUL6jn0dZbCdIKcdAbqpmqv+QzNPCUhJg1\nzStKUezWfZSsYT79aWfxp9LUxIlLp3CCJkgQZIreyIIF4XzWb4ZhdIOJfx8TCjlXT1OT89zceKMf\nvhmJMWLycVTR1CG9srY08xlp4FqdQTDe/pZQLP7x1HJk5T5K+vjvugv+8Q/nzgoEkJZWgihKgl9z\nIdIC0Wg4Y4roYjh2wyglJNcW475izJgxumpVaWSBSCdeLx5xEaOe+3Vb+Ca4CiBRWc1X5XGejHtZ\njcrVn+Scj8c/Eeu31LDvnIvaEsYpEEdYe8tT1Ia97VY57rj2fVrQkGF0jYi8oKpjulvO4vz7mExW\n6557dVzuk30Pggu3j9kHl+KhqanweXByzsfj5xr6/eAwr3JA288CBFE35kAn6uvbM5kmB4UxDCN3\nzO3Th3RlKe85rY74HxYiLS1oZSU7LVrQNjMZzZNcv6nJRQoFAoXNg5OvMNFQCOZXXMb81sltA8dY\nwJ9h9C9m+fchyfz78TiMaozRNDMlnabnsebGKCtO+Blrboym9WUkLe2k8B9/fGFDIvOVXNHz4NwV\nYWLHTAPxb8HqahfR1Im6OlfRJIeoTLOIYRi9wHz+vSSbRshkOuPziTCfKVSQILCDC/TPJs69LHLe\nZ3EircHXMLInW5+/uX16Qbai3NAAFxDhZi4ikBwNyw/0j+J1OzhRvjpkFTVZdDqwBHCGkX9M/HtB\ntqPKnVoT40AubhN+BcQP9A+Rnf/chM8wjL7AxL8XZNvwWftSPUq8o/DPnw+eh0cZWPU+5rYxjOLD\nxL8X9MQdI6mfEyZ0yGtcDlZ9X7dbWMViGL3DxL+XZCXcdXUunXFS+aZN65eyFRPduchyEe+yaBA3\njD7CxL8vSFW0xx8va9O0KxdZruKdbduLYRjbY+KfZ9ZfGWHY9VPcyFuD/PzN7ekuy46uXGS5ircN\nAm8YvcfEP4+sjsQ4YM5UArS6Rt7GJsTM0YwusqzFOxJJOwhwWYTCGkYfYeKfRxoWRwmkRPckJEDQ\nzNGMZCXeyZ5yAMuWuc9wuH0UsSwGoTcMY3tM/PNIzcQQzcuqgSYSBImdfSPHmjnaJd02nC9e3HF6\nwQJiteHtch51HiHNMIyusdw+eSAWg1mz4KNaj2XTlvOjwE85Tp7g5PvCbal8jF4ycSLg3qQUSLz4\nEq/Xx9pyHkHHwegNw8gOE/8cicXgt8dEGH3Vifz2mAiP/MtjtszgafVMkPJBOEzDMae3Zf+MtyY4\ncFOUqipn8UPhs50axkDE3D458sGcCPNbnU/6hNZl3LUG7qgKWwRKHvnDwdP45opHqKSZFqp4ZY9Q\nW1tBTU37wPHm8jGM7DHxz5Ha15xPOtnIe+w/F7N8edgiUPLIiDqP8QuXc1RLlKcqQ8yq88qid7Rh\n9CUm/jmyev+JDFmzrG0oxtX7T2S8CVNe8TyYFfWIRj1mhfxza3kdDCMnyl78e6ohyeVPrYlR2xBl\nn5NDTHnoFk5rXcwDFRM5d1q4220YPaeDpW95HQwjZ8pa/HuqIcnlRzfF+H+JECot1FZWctH8KA81\nhDk3ZBrUL6R2Dd62DU49Fc4/H2bPLnTJDGPAUNbi39P0Asnlz0nUU00zoqDNzdS+VE/tzab6/Uay\na/C2bW76/fdhzhz33SoAw8iKvIR6ishJIvKqiKwTkelp5n9fRDaLyMv+3/n52G+uJDUkGEwfmZOM\n30/G6odCcHQwxmhe7LDcpk39UVqjjWTX4F137fj7ffcVpjyGMQDJ2fIXkSAwH/gasBF4XkSWquqa\nTov+TlWn5rq/fNJVeoG0LiFiPKbHAc0AxIEWqlm2Rx02rng/43nO1ZO0+AG+8Y3ClccwBhj5cPsc\nDqxT1TcAROQe4DSgs/gXJZlCBtO6hN6uJ9DSBLiwzlUczpVV85hVZy6fgpB08dx3HxxxBAwe7Gpt\na3gxjG7Jh9tnb2BDyvRG/7fOTBSR/xORe0Vkn3QbEpGwiKwSkVWbN2/OQ9F6TyaXkKYsU3n4aGZF\nPdOaAhI7fTb1x9cTv/c+uPpq97pmOTUMo1v6K73Dg8AwVf0S8GfgjnQLqWpEVceo6pjdd9+9n4qW\nnqRL6Jpr2qOAVo+qo4kq4ghNVFE5qc6Ev4AkXXOv3hJFm/zXtKYmmDnTKgDD6IZ8uH3eAVIt+SH+\nb22oakPK5K3AHAYAnV1CDzV4/CEQZWwiyspAiFMaPGoLVjoj6Zp7TEP8B1UITQQTCXj0UVi50uL/\nDaML8mH5Pw+MEJH9RKQKOAtYmrqAiOyZMjkBWJuH/fY7oRC8WO0xNziDF6s9y9tTYJKuueeDHuOr\nlvOvw49HJQCJhHsT6CKrXudILsMoN3K2/FW1VUSmAo8AQWChqv5VRH4CrFLVpcC/icgEoBV4H/h+\nrvstBDZyVHGRej1qajx+88eZTNWVLgFcoor1NaG0b2bWQdgw8tTJS1UfBh7u9Nt/pXyfAQy8gWzT\n5H6whGLFRfJajBsHjY0e97OcEF275Wzgd8Mo8x6+XWLm4YAhKeaq8Awez4rHoGqYG0q/vA38bhg2\nmEtm0pmHRlHSOSx38uSu6+p0kVyGUW6Y5Z8JMw8HDL1pizH3nVHulJX4Z5W+OXUha90dMJiYG0bP\nKBvxz8qFn26hGQOvndowDKM7ysbnn5UL3/z8hmGUCWUj/p0bBWtqOnXyicXg7bdJBCqIS5B4hfn5\nDcMoXcrG7dOxQxBcdlm7d+fZeTFqLxuHNjXTnAhyu1zAPVrHLDzMjWwYRilSNpY/uApgxgxoaOjo\n3WlYHIXmZiQRJ0ict3QoT8Y98/qUMpbfwShzysbyT6WmBgIB1yno6GCMA3d8G4JBVKElUcXKQMii\nO0uZWMxd3JYWqKxM28U3q8gwwxjAlJ34x2LO5ROPw1ckxqM6jooHm0kEKnh5zAW8HarjlMEec0P2\n0Jcs9fXulQ/c56RJsGBB2wXcVuoYAAAcN0lEQVS3zt1GOVBWbh9oD+hJJOCYRJRAq/P/xFtaWfz8\nUL7zK8+svTJAUz/XrnUmvu8CsqAvoxwoO/FPRv2EJcJpLIFAgLgEaaGKxzRkD3sZsHpUHa0EUUCS\nP7a0tF34TKO4GUYpUXbi73mw+pIIv9bJHKbPEYi3sGXs1xlftZzng5497GXAQw0eU+WmtgpAgXhF\nZduFt9w/RjlQdj5/gOHRBW1WnwI1gz5hVtSzBr4yIRSCcYPC/KWxlu9qPQC/k46hvZYuwih1SlL8\nu4zUuPJK9LnngXa/7/qRE+1hLyOSlv3MmR5TH/VIJCAYt7z+RnlRcuLfZaRGJILOccMHC5AAHuB0\nXhkcHoAjzRi54HlunPeVKy1xq1GelJzPv6tIjQ9/sQBod/coAX5RNc0e+jLFfPtGOVNyln/GNPyx\nGDu88iLQ7u65d9/LmbXIs4e+jOnK3WcdvYxSpuTEP+PAHtEoQbTN3fNg4HSGLpptD3UZ05W4W0cv\no9QpOfFPZfXqlIc7FEKqq9CmZuLBKj5/4zRq7WEuW7oTdxvk3Sh1Sk78kw91U5PrxRsIQHU1LF/u\n4S1fjkSjVIZC1NqTXNZ0J+42iqdR6pSc+KembziSGKFElJVNIaJRD2+GxXMaju7E/bNLIjxfs5iX\nPj+R4deF7bYxSo6SE/9QCCoq4LB4jOWMo4pmmhNVrK9ZDpad3/DpatD39VdG+PycyQAcvHEZ78xf\nD97sgpTTMPqKkgv19Dw47zyoo55qGqkgTrU0U9sQLXTRjCIjOb5DZ6te7lvsPv3pve6+3vL+GyVH\nyYk/wH98eCVhbiGAujQOFRXmtDWyRr8x0X3iKgBBLdufUXKUnvhHIgy5aw4BP6wTIDDpPPP1G1kz\nfHaYd86ZhkoAFUEGDTLjwSg5Ss7nz4L2XrwAIgJ1dYUrjzEgGXLnbJhyuvXyMkqW0rP899qr4/TY\nsfbgGr0j2SgANt6vUXKUnuU/bRo89BC0trqwn+uuK3SJjIFMLEb8uHFIczNaVUXwcevqa5QGpWf5\nex6sWAHXXus+7UE1cuCt+ija1ExA4ySamnmrPlroIhlGXig9yx+6zNZlybqMnvAEIb5JFUozLVTx\nBCGsBckoBfJi+YvISSLyqoisE5HpaeZXi8jv/PnPisiwfOy3pyRTP1x9tfs0F67RHSPqPMZXLWem\nXMP4quWMqMtsMUQicOKJ7tMwekss1j9NTDlb/iISBOYDXwM2As+LyFJVXZOy2CTgA1X9goicBcwG\nvp3rvnuKJesyeorn4Q/x6TErlPl+iURgsusUzLJl7jMc7pciGiVEf2aTzYflfziwTlXfUNVm4B7g\ntE7LnAbc4X+/FxgnIkIfkanmTOZzCQYtWZeRPZl6AqeyeHHX04aRDV0NRpVv8uHz3xvYkDK9ETgi\n0zKq2ioiW4Ea4J952H8Huqo5u8rnYhi5MHFiu8WfnDaMntKf2WSLqsFXRMJAGGDo0KG92kZ3rh0b\nqN3oC5IunsWLnfCby8fIhs4BKP1poOZD/N8B9kmZHuL/lm6ZjSJSAewCNHTekKpGgAjAmDFjtPP8\nbLA87EahCIdN9I3syeSl6C8DNR8+/+eBESKyn4hUAWcBSzstsxT4nv/9m8Bjqtorce8OG5TbMIyB\nQEb/fj+F++Rs+fs+/KnAI0AQWKiqfxWRnwCrVHUpsAD4rYisA97HVRB9hrl2DMModtJ6Kfox3Ccv\nPn9VfRh4uNNv/5XyvRE4Mx/7MgzDKAXS+vdnRfstHr300jsYRgHor445RmnROXfg6ppQv8WjF1W0\nj2EMRPqzY45ResRiMCMU46iWKJdUhvjVr5a7kQf7ONzHxN8wcsR6jhu58Hp9jIeb/fHGm6u496Xl\n1N48o8/3a+JvGDkSCsHRwRhHJaI8FQwRCpnyG9lzLFGqaKaCOEozxxIF+v4eMvE3jBzxiLFcxiE0\no1JFkOXEYp71JDeyYt+6EPHbqog3NxOoqmLfulC/7NfE3zByJRol2NoMGofmRjbNqWfcI561ARjd\nE4nA4sUEL70EBg/uV2vBon0MI1dCITdqHIAquz2wgJHbYv2SnMsYwCRTwS5bBnPmQE1Nv1oJJv6G\nkSueByef3DYZ1BbOpR5wdYKlGDFSicVgzhkx/nHFXJJpDhT6PRWsuX0MIx/ssUeHyVN4kAsYRfC8\nsLl8jDZiMZh+bIw/toyjikaAtgpg/ciJDO/Hspjlbxj5oK4OqqpQ3DAV+/AOtzCZsz60Yb2MdqJR\nOKulnmoaqUCJI7zOF7hQbuH3g/s3K6CJv2HkA8+DaBT5grPdkiMV7XD3Auv1a7TxrS0Rzuc3BFAU\naKWS86Se3w4K97t70MTfMPKF58EVVwDtr/Kj9EVerzf1N4BYjOE3TKWCOIK7R9Yf8wNO/ZlXkIgw\nE3/DyCfhMP869BjAWf8VtHLCpvrtFrNcQGVINArxeNtbYaCigi9eV9ftEKF9hTX4Gkae2cU7GP3f\nFW3TndqCLRdQGdB5hC7ATVRXQ1OTS9x2440FvfAm/oaRb+rqkIULoaUFqax0jcEpWC6g0iZj5V5k\ng4ib+BtGvvEbfzM95DbUaGnTZeVeRCNNmfgbRl/QxUNeZAagkWcGSuVu4m8YBaCIDEAjz3RVuadt\nCygQJv6GUQCKSQSM/JOuci+2hn4Tf8PoZ4pNBIw+IE3tXmwN/Sb+hpFnurPqi00EjDyToXYvtrYA\nE3/DyCPZWPXFJgJGnslQuxdbQ7+Jv2HkkWys+mITASPPdFG7F1NDv4m/YeSRbK36pAgk0zxYJVBC\nDJDa3cTfMPJIts/96kiM9Qui3PBSiKcSnjX8lgKdG3uK/GKa+BtGnunuuV8dibH/5GP5Ii2cTCUh\nnuD5Zs8afgcyAzCEy7J6GkY/UzVvDlW0EACqaOEK5ljD7wAgYybWWAxmzkQbmyAeR5sGxsDNZvkb\nRj+zp7zbYTq00yqe/e8YtUVuKZYzGQ37WIz4ceOgqYkACVoJ0JyoYn1NiNpCF7obzPI3jH7m05dO\nAtoHfNn1443UXhKy5P5FTLooLoC36qNoUzNBEsQJ8CjHc0JgOQ81FH9FbuJvGP1NOIzccguy995t\nP2lzM9RvP+iLURwko7iCwY5RXE8QopkqWgjSTDU/YSYvVnsDwoVn4m8YhSAcpmH4YR1+alizqUCF\nMbojGcV1zTUd23JH1HmMr1rOTLmGkyqWc+iFhRmSsTeYz98wCsRbjXuwa6fpmoKVxuiOZBRXJAIz\nZ8LEiRAOw6yoRzTqMTs0MEQ/iYm/YRSIykl1ND13G5U000IVlZPaR/yyrJ/FSSQCkye778uWuc9w\neGBeo5zEX0R2BX4HDAPeBL6lqh+kWS4OrPYn31bVCbns1zBKgdqwx2oep2FxlJqJIWrDTkEGYMh4\n2bB4MZxPhIksZjETWbw4TDhc6FL1jlwt/+nAclW9TkSm+9NXpllum6qOzHFfhlFyfFTrEWvwCKXE\nBdbXQ2MjqFrWz2Ljqt0jHIMz/U9kGSt2BxiY6p+r+J8GhPzvdwBR0ou/YRidSGfhA9x2mxN+cNEl\nqZEj5g4qLMduXowCggvVPXbzYspV/D+nqn/3v28CPpdhuUEisgpoBa5T1SU57tcwBjyZYsfHtMQY\nS5QoIfYY77WJvLmD+pZuK9ZYDHbcEfEnBVyr7wClW/EXkUeBPdLM+o/UCVVVEdE0ywHsq6rviMjn\ngcdEZLWqrk+zrzB+NTp06NBuC28YA5l0GUA/tTrGpYlxVNMICD9fejmx2Gw8zwaB6Uu6rVgjEZgy\nBRIJqKiA0aNh0iQGrMOfLMRfVY/PNE9E/iEie6rq30VkT+C9DNt4x/98Q0SiwChgO/FX1QgQARgz\nZkymisQwSoK0GUCjURI0EkBRlCsSc/jjnOFwf9gGgelDuqxYYzGYOhVaW9tXOP30AS38kLvbZynw\nPeA6//OBzguIyGeAT1S1SUR2A44C5uS4X8MoCbbLABoKIQFBE9rmV/bedX7lZGVhHYHzT5cV65w5\n0NLSPh0IlETNm2sP3+uAr4nI68Dx/jQiMkZEbvWXOQhYJSL/CzyO8/mvyXG/hlGaeB5y+eVAe+6f\nz0zq6Fe+4w74zW+cm6Kn6YAyZqYsczL14OXKK2FJShNlIADz55eEvy0ny19VG4BxaX5fBZzvf38a\nij7BnWEUD7NnI8OHu6DyZDdSn1z8/tZg3DXbvYVFIjB3bseFxowZ8O6eJNbD1zCKkXA4rcjk4ve3\nBuMeEIuRuHgKotoW3QO4Rt4SwcTfMAYQHjHWfi/KE4QYUef1SLytwTh73qqPsnc8QQXO/aYI755z\nBUNKxOoHE3/DGDj4fpt9m5upq6qCuuVA1+rfOXZ9AIwrXhQ8QYhvUg00kSDAVOZz531hlsdK57yZ\n+BvGQCHVb9PY6MJ+ulCiTD7+UhGvrsi1J/SIOo/xC5fzleYojxPiGTyCJeYqs3z+hjFQCIVcByNw\n+R8WLuwybCdTD+JSJ1npXX117yKiwAn8rKjH2tNn8HzQIxAoPVeZib9hDBQ8D847D8RvgozHu1T0\nTKNPlTr5rPQeecR16gW45JLSsfrBxN8wBhZ1dTBoUFaKni52vRzi/Hta6SXPyepIx5MTjUJTk3vJ\nSiTghhtK67yZz98wBhI9bLVN9fGXS5x/T05R8pyMbnI5lTTQjFS7kxMKOXdP0vJPvmiVyjkz8TeM\ngUYPW22TjZ9vv10+cf7ZnqLk2AljNUoVzUii/eR4Mzzmz3dpfeJxqK4uLdeZib9hlAKRSNoewanW\nfkWFc4VAebUBZCIWc23mR2iMobxNK0GCAZCUkxMOQ21taYbHmvgbxkAn08CydGz8BLjgAhg6tPSE\nLBs6h39Go3BYa4xljKOKZjRQgYQvcO0qKSenVMNjTfwNY6CzePH20774d+7V20nXyoZ07R2hENRI\nPdU0EkRdIr2hQ9OeoFIcQc3E3zAGOhMntln8Cry2YUeaIzFqw17WjZ+lKG6ppOsfd/OoCEfobxBf\n+KWyIq0vrFQbyk38DWOg41v5//rFAqrXvMTwtUtJTP4D69ffyPDZ4W7dFqUqbqmEQq69Ix53oZvy\nmwiJxEUE1A/lEXF9KNIceKkmxLM4f8MoBcJhXhxyOkHiVJCgkhaGXT8lq8D0cugJ7Hnwgx84jT+S\nGPPiUxFf+BVczVBXl3bdUu0sZ+JvGCVALAbP7RgiQdC5MMBZtVkoeSmKWywGF13k/pL1X7J/3Fcl\nSoB420hpcQKs/2HmAVoyDvQywDG3j2EMcGIxJ9gtLR5vBG7kRp3irNqqaoJZKHkpZfuMxZw/f8GC\n9pEXFy5sd9U8Oy9Gy4K30VWVtCZaSBDkErmRYYPDzOhiu6UY8WPibxgDnPp6564BuEXD/CVQy7FE\neUpD/Go11M45A9591w1EkiEffSmIW7LtorHR+fWTtLT44k+Mg6eGkJYWEoEgC4Nh6rWOF6s9locK\nVerCYeJvGCXG0+rxlHoc1Rrj4IuOhYRvAj/3HOvXw+8Hhwe8hZ+OZNtFqvADVFa6N5qG6XPYtaUZ\nASTRytHHQMNJHnNDpXcussHE3zAGOHV1cNtt7b14RVzj7VclSqC1pW05BRrmLuDqQLjoo3p6E3qa\n2qehogJOPhn22MPv20CM+MoHOyzf2AgzuvL1lDgm/oYxwPE8ePzxdrEE9/3UmhByUUpmMuAd3avo\nQxZ7G3raZdvFrCgBNKWRN0jlpPTRPeWCib9hlACdffbuuwfc7EJeEgm0opId45/wntawTE/m86E7\nC1Tarsklrj5j20UohAyqRhubSEiAty6fT224CGu+fsTE3zBKmZTMZIFolBP8nsDfSdyFzAe84qsA\nejvQfNJVVFMDg16KccKmevbYg/acFsuXI9EowVCI4cX4ytPPiHZuHSkSxowZo6tWrSp0MQyjdKip\ngfffb5/+9Kdh69bClacLeurzT7qKRjXG+HedwwQe8Hs8+Fk6O70+lHI6CxF5QVXHdLecWf6GUS6c\nfDLcdRdJc08/+ohALFaU6tfT0NNoFM5tjHCjXkwFLoWpJGd28h2VQzqLbLAevoZRLtx5Jx+NGNnW\nAzieEN6qj/Z4M0U1FKRfmG9tifArnUKF33M3KfxtqRtSfEflkM4iG8zyN4wy4r5xN/HN18dRSTMt\nVPEEIerI3g1SSKs5WcZTa2LUNkSdG+uyy6C5meEiaErKhiQSCMBNN3UoZG/bFEoNE3/DKHFShX1E\nncf4hcs5qiXKU5UhZtV53Qp66vrJYQ9VcwsX7Y1Pf0Yoxszm6RzMShIoBIIE8EdXDwRcSuZ4HBGB\no46Cgw9OO4BBKaWzyAUTf8MoYdIJ+6yoRzTqMSvkhG/WLDeA+dhElBWNIWbO9Jg5083rPAxkItHe\ng7aTNyWnMnUnwB/MifBo84XtjbiAJuIkAkECyYx08+ZBQ0OPB7YvV0z8DaOESeffnjGjo/CdWhPj\n/yVCVNKMKjy57BimP34dB09yCyXXT+krhgiMH9/uL+/OVZRqZffo7cHP1HbSAxHE76SVyv8O/Tpb\n9z+cmomhso/b7ykm/oZRwmTj3659qR6luc1ffiwreKzlaDb/uoZXOQgJnsNnpIEnK0I8Ix7xuHsL\nePhhePDBrq33zlb+vHkuy2by7aEi/eBZbaIfX7AQaWnpIPxJn34LQf5twzRiGzyqVsLyWrPme4KJ\nv2GUMD31bycFNkCCz7GZz7GZY+IrANB4kHfP/nfWbR7MczuGuOpBr9teuJ3fPBYvbh9MPuPgWX6N\noY2NBLQ9JUNS9BPASo5hBtfxbMLLuf2hXDHxN4wSJ9W/nbahta4O/c2tEG/tsF6qpe0yYcYZctcc\nhohwrAQ4Uo9ijRzMPcE6QqH0qtv5zWPiRFi5sn161CjX5tAWwRMKtdUYourn4RFaqORhxvMP9qCe\nOp7Bo7ISKv0kduUctdNbchJ/ETkTmAkcBByuqmm75IrIScAvgCBwq6pel8t+DcPoORkbWj2PwMoV\nvHHRHAKvv8ouO7UwePM6Mvb9V0U0zlhWMJYVhPU2Aqt/CdEGF36Z0uia7s2jthZer49x4KYot0+p\n4UvxlxihC9FAHKn2fUNVVWhTM02JCm7nPO6uqKNptMeqVa7tQcQNT1BXZ1E7vSVXy/8vwDeAWzIt\nICJBYD7wNWAj8LyILFXVNTnu2zCMLEha+2+/nTlhWgyPca/d76J64vCTwJWcnbibf7Ibh7C6rdcs\ntL8RtH22NMHUqe2twiIQCMDXvw7TpuH5lUASb8mVeLfMJaHKYYAizqefwBWsoaEtD8/rNSE+aPCY\nHXLrplZeyShOE/3ekZP4q+pawMXVZuZwYJ2qvuEvew9wGmDibxh9TKq1Hwy6BlbY3k2S6ptPJGA6\ns7mS2W7Ac41xLvUA/Es+zRXycwKJ9sqAYLBjOJCqm16yBB56CEaMgAMOgGnTYPVqmDMHcOkFFAjg\n3Dsq4vLwJM14z6MWqE05HovPzx/94fPfG9iQMr0ROCLdgiISBsIAQ4cO7fuSGUaJkyrqABdcAEOH\nbi+enQdCSep3MAjPxT1icbdwQGDP8OnUUQ+bNrnRUkaNcj1tm5o6xoMC2toKa9fC2rXIQw/B6NHb\nlTGBQGUVgUnnpe2UlYpZ+vmjW/EXkUeBPdLM+g9VfSCfhVHVCBABl9Uzn9s2jHKkc4NrJm3t7JuH\n9u+rV8OUKU7Xq6tdL+HtNuKnjWbLFvj5z9H49m4ibW1F9tqrw2oSDCIXXNCt6ENpZ+IsBN2Kv6oe\nn+M+3gH2SZke4v9mGEYf05NQz/QDwrQ30na1jRgeUTxqhsOWr5/O4KX17J9Yw7GsaGs4jgcqqJg2\njfX7n4zctoCtO+1FxYxpWXXOskyc+ac/3D7PAyNEZD+c6J8FfKcf9msYBr1zlXS2srvaRlKY270+\nnv8H5xNhEgt4l70IXD6Nz+Fx3C88mprCsBmqLoGo79TvqnLJZXQvIz25hnqeAfwK2B34g4i8rKon\nisheuJDO8araKiJTgUdwoZ4LVfWvOZfcMIw+oadWdlKYO7n7AbiVMLcSJhCAnw6Gtf6ySVpaXLqH\nO+7oen+WiTP/5Brtcz9wf5rf3wXGp0w/DDycy74Mw+gfemplJ4U5TXsvwaD7TBXs5LIAlZXus7v9\nWSbO/GM9fA3D6EBPrexUYd6yBW64wQl5dXX6RJuPP+6sfXDtvNDR8s+0P4v0yS82hq9hGNuRS2RN\nb9a1SJ78ke0Yvib+hmEYJUS24m9j+BqGYZQhJv6GYRhliIm/YRhpicVcuuVYrNAlMfoCi/YxDGM7\nrEdt6WOWv2EY25Eu1j8VeysY+JjlbxjGdnQV629vBaWBib9hGNvRVY9ay7NTGpj4G4aRlkw9ai3P\nTmlg4m8YRo+wPDulgYm/YRg9xvLsDHws2scwDKMMMfE3DMMoQ0z8DcMwyhATf8MwjDLExN8wDKMM\nMfE3DMMoQ4p2MBcR2Qy81YtVdwP+mefi9CcDvfxgx1As2DEUB/19DPuq6u7dLVS04t9bRGRVNqPY\nFCsDvfxgx1As2DEUB8V6DOb2MQzDKENM/A3DMMqQUhT/SKELkCMDvfxgx1As2DEUB0V5DCXn8zcM\nwzC6pxQtf8MwDKMbTPwNwzDKkJIRfxE5SUReFZF1IjK90OXpKSKyUETeE5G/FLosvUVE9hGRx0Vk\njYj8VUQuLXSZeoqIDBKR50Tkf/1j+HGhy9QbRCQoIi+JyEOFLktvEJE3RWS1iLwsIqsKXZ7eICKD\nReReEXlFRNaKSFElwS4Jn7+IBIHXgK8BG4HngbNVdU1BC9YDROQY4COgXlUPKXR5eoOI7Ansqaov\nisjOwAvA6QPsOgiwk6p+JCKVwJPApar6TIGL1iNE5IfAGODTqnpqocvTU0TkTWCMqg7YDl4icgew\nUlVvFZEqYEdV3VLociUpFcv/cGCdqr6hqs3APcBpBS5Tj1DVFcD7hS5HLqjq31X1Rf/7h8BaYO/C\nlqpnqOMjf7LS/xtQFpKIDAFOAW4tdFnKFRHZBTgGWACgqs3FJPxQOuK/N7AhZXojA0x0Sg0RGQaM\nAp4tbEl6ju8yeRl4D/izqg60Y5gHTAMShS5IDiiwTEReEJFwoQvTC/YDNgO3+e63W0Vkp0IXKpVS\nEX+jiBCRTwGLgctU9V+FLk9PUdW4qo4EhgCHi8iAccOJyKnAe6r6QqHLkiNHq+po4GRgiu8WHUhU\nAKOBm1V1FPAxUFRtkaUi/u8A+6RMD/F/M/oZ30++GLhLVe8rdHlywX9Nfxw4qdBl6QFHARN8n/k9\nwFdF5M7CFqnnqOo7/ud7wP041+5AYiOwMeWt8V5cZVA0lIr4Pw+MEJH9/IaVs4ClBS5T2eE3li4A\n1qrqfxe6PL1BRHYXkcH+9x1wQQSvFLZU2aOqM1R1iKoOwz0Hj6nqdwtcrB4hIjv5AQP4rpITgAEV\nBaeqm4ANInKA/9M4oKgCHyoKXYB8oKqtIjIVeAQIAgtV9a8FLlaPEJFFQAjYTUQ2Aj9S1QWFLVWP\nOQo4F1jt+8wBrlLVhwtYpp6yJ3CHH0EWAH6vqgMyXHIA8zngfmdLUAHcrap/KmyResUlwF2+QfoG\ncF6By9OBkgj1NAzDMHpGqbh9DMMwjB5g4m8YhlGGmPgbhmGUISb+hmEYZYiJv2EYRhli4m8YhlGG\nmPgbhmGUIf8fPq1jq/CO0nkAAAAASUVORK5CYII=\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
@@ -889,7 +930,7 @@
"source": [
"Much better! The evaluation metrics we printed show that the model has a low loss and MAE on the test data, and the predictions line up visually with our data fairly well.\n",
"\n",
- "The model isn't perfect; its predictions don't form a smooth sine curve. For instance, the line is almost straight when `x` is between 4.2 and 5.2. If we wanted to go further, we could try further increasing the capacity of the model, perhaps using some techniques to defend from overfitting.\n",
+ "The model isn't perfect; its predictions don't form a smooth sine curve. For instance, the line becomes almost straight when `x` is between 4 and 5. If we wanted to go further, we could try further increasing the capacity of the model, perhaps using some techniques to defend from overfitting.\n",
"\n",
"However, an important part of machine learning is knowing when to quit, and this model is good enough for our use case - which is to make some LEDs blink in a pleasing pattern.\n",
"\n",
@@ -898,7 +939,7 @@
"\n",
"Since this model is going to be deployed on a microcontroller, we want it to be as tiny as possible! One technique for reducing the size of models is called [quantization](https://www.tensorflow.org/lite/performance/post_training_quantization). It reduces the precision of the model's weights, which saves memory, often without much impact on accuracy. Quantized models also run faster, since the calculations required are simpler.\n",
"\n",
- "The TensorFlow Lite Converter can apply quantization while it converts the model. In the following cell, we'll convert the model twice: once with quantization, once without:"
+ "The TensorFlow Lite Converter can apply quantization while it converts the model. In the following cell, we'll convert the model twice—once with quantization, once without:"
]
},
{
@@ -906,7 +947,11 @@
"metadata": {
"id": "1muAoUm8lSXL",
"colab_type": "code",
- "colab": {}
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 34
+ },
+ "outputId": "4c008dac-7629-471d-bfed-2853f4fca115"
},
"source": [
"# Convert the model to the TensorFlow Lite format without quantization\n",
@@ -918,14 +963,49 @@
"\n",
"# Convert the model to the TensorFlow Lite format with quantization\n",
"converter = tf.lite.TFLiteConverter.from_keras_model(model_2)\n",
- "converter.optimizations = [tf.lite.Optimize.OPTIMIZE_FOR_SIZE]\n",
+ "# Indicate that we want to perform the default optimizations,\n",
+ "# which includes quantization\n",
+ "converter.optimizations = [tf.lite.Optimize.DEFAULT]\n",
+ "# Define a generator function that provides our test data's x values\n",
+ "# as a representative dataset, and tell the converter to use it\n",
+ "def representative_dataset_generator():\n",
+ " for value in x_test:\n",
+ " # Each scalar value must be inside of a 2D array that is wrapped in a list\n",
+ " yield [np.array(value, dtype=np.float32, ndmin=2)]\n",
+ "converter.representative_dataset = representative_dataset_generator\n",
+ "# Convert the model\n",
"tflite_model = converter.convert()\n",
"\n",
"# Save the model to disk\n",
"open(\"sine_model_quantized.tflite\", \"wb\").write(tflite_model)"
],
"execution_count": 0,
- "outputs": []
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ "2512"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 15
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "g3n1TwSS091-",
+ "colab_type": "text"
+ },
+ "source": [
+ "To create a quantized model that runs as efficiently as possible, we have to provide a \"representative dataset\"—a set of numbers that represent the full range of input values the dataset the model was trained on.\n",
+ "\n",
+ "In the above cell, we can use our test dataset's `x` values as a representative dataset. We define a function, `representative_dataset_generator()`, that uses the `yield` operator to return them one by one."
+ ]
},
{
"cell_type": "markdown",
@@ -941,13 +1021,13 @@
{
"cell_type": "code",
"metadata": {
- "id": "-J7IKlXiYVPz",
+ "id": "xvluIurpelrQ",
"colab_type": "code",
- "outputId": "0c10f56c-dbd7-4cc3-e332-30ad673769e5",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 281
- }
+ },
+ "outputId": "a9a7d9ae-c17c-4f68-dda5-40a95b923647"
},
"source": [
"# Instantiate an interpreter for each model\n",
@@ -958,25 +1038,33 @@
"sine_model.allocate_tensors()\n",
"sine_model_quantized.allocate_tensors()\n",
"\n",
- "# Get the input and output tensors so we can feed in values and get the results\n",
- "sine_model_input = sine_model.tensor(sine_model.get_input_details()[0][\"index\"])\n",
- "sine_model_output = sine_model.tensor(sine_model.get_output_details()[0][\"index\"])\n",
- "sine_model_quantized_input = sine_model_quantized.tensor(sine_model_quantized.get_input_details()[0][\"index\"])\n",
- "sine_model_quantized_output = sine_model_quantized.tensor(sine_model_quantized.get_output_details()[0][\"index\"])\n",
+ "# Get indexes of the input and output tensors\n",
+ "sine_model_input_index = sine_model.get_input_details()[0][\"index\"]\n",
+ "sine_model_output_index = sine_model.get_output_details()[0][\"index\"]\n",
+ "sine_model_quantized_input_index = sine_model_quantized.get_input_details()[0][\"index\"]\n",
+ "sine_model_quantized_output_index = sine_model_quantized.get_output_details()[0][\"index\"]\n",
"\n",
"# Create arrays to store the results\n",
- "sine_model_predictions = np.empty(x_test.size)\n",
- "sine_model_quantized_predictions = np.empty(x_test.size)\n",
+ "sine_model_predictions = []\n",
+ "sine_model_quantized_predictions = []\n",
"\n",
"# Run each model's interpreter for each value and store the results in arrays\n",
- "for i in range(x_test.size):\n",
- " sine_model_input().fill(x_test[i])\n",
+ "for x_value in x_test:\n",
+ " # Create a 2D tensor wrapping the current x value\n",
+ " x_value_tensor = tf.convert_to_tensor([[x_value]], dtype=np.float32)\n",
+ " # Write the value to the input tensor\n",
+ " sine_model.set_tensor(sine_model_input_index, x_value_tensor)\n",
+ " # Run inference\n",
" sine_model.invoke()\n",
- " sine_model_predictions[i] = sine_model_output()[0]\n",
- "\n",
- " sine_model_quantized_input().fill(x_test[i])\n",
+ " # Read the prediction from the output tensor\n",
+ " sine_model_predictions.append(\n",
+ " sine_model.get_tensor(sine_model_output_index)[0])\n",
+ " # Do the same for the quantized model\n",
+ " sine_model_quantized.set_tensor(sine_model_quantized_input_index, x_value_tensor)\n",
" sine_model_quantized.invoke()\n",
- " sine_model_quantized_predictions[i] = sine_model_quantized_output()[0]\n",
+ " sine_model_quantized_predictions.append(\n",
+ " sine_model_quantized.get_tensor(sine_model_quantized_output_index)[0])\n",
+ "\n",
"\n",
"# See how they line up with the data\n",
"plt.clf()\n",
@@ -993,7 +1081,7 @@
{
"output_type": "display_data",
"data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAEICAYAAAC3Y/QeAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi40LCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcv7US4rQAAIABJREFUeJzsnXl4FFXWuN/bnbCELbIMCiHpqKzZ\nISBkYXGZDFECIhFkEWRcUFHHJCAOIo7K/DAkcRkc/XRGXAi7DIQx8+GHbAmRkTWYIMiSTtgUBAIB\nAln6/v6o7k4n6ex7ct/n6ae7q27dulV169Stc849R0gpUSgUCkXLQtfQDVAoFApF/aOEv0KhULRA\nlPBXKBSKFogS/gqFQtECUcJfoVAoWiBK+CsUCkULRAn/BkYIMUUI8W1Dt8OCEKKtEGKTEOKKEGJt\nPewvXQgxsq73Ux8IIQxCCCmEcKhE2RlCiOT6aFdlEEK4CiGuCSH0Dd2W+kAIMVIIcboO6m1U17U8\nmo3wF0JMFkLsNXfgc0KI/wghghq6XRUhpYyXUv6+odthwwSgO9BFShle1zuTUnpIKbfX9X4U5SOl\nzJJStpdSFtakHiHEdiHEk7XVLpt6K/1gVVSOZiH8hRARwHvAX9EElyvwd2BsQ7arIhppR3YDfpZS\nFtTlThrpsSsULQcpZZP+AJ2Aa0B4OWVaoz0czpo/7wGtzetGAqeBucB54BwwDggFfgYuAX+2qesN\nYB2wGsgB9gM+NuvnASfM6w4DD9usmwHsAt4FLgJvm5clm9cL87rzwFXgR8DT5ji/BC4AmcBrgM6m\n3mQgBrgMZACjyzkf/YHtQDaQDoSZl/8FyAPyzef0jyW26wHkAp1tlvkBvwGOwF3AVvOx/QbEA842\nZY3AK8Ah4BbgYF52fyWuk/U82dQngbvNv0PN5zsHOANElXHsttcgGzgJBJiXnzKf++kl+ldZ511v\nPue/met53twmB5tt/4nWp86Yr7e+5PGUd93ttP8J4CfzcZ4Enimxfq55f2eBJ0ucoweBA+Z9nALe\nsNnOUKLt24G3zOcqB/gW6Gpe1wZYbr7O2cAetEHXIqAQuInWf5aWcQxrgV+AK8BOwMNmXVsg1nyu\nr6D167ZAlrl918yfYWj34vJyjqHMc4X5vi+jfR8BMSWWbQQiKnmPJ9trj815fdLm/0xzGy8DmwG3\nqvaJasvO2qysIT7AH4AC2xNsp8ybwG7gd0A3IAV4y6YTFACvowmwp9Bu9BVAB8ADTeC5m8u/gSYc\nJ5jLR6EJW0fz+nA0IakDJgLXgTtsOkYB8AKa4GtborOEAPsAZ/PF72+z7ZfmDtjB3Kl+xiyczXXk\nm9uuB55Fu/mFnXPhCBwH/gy0Au41d+K+Nse3vJxzuRV4yub/EuBj8++7gQfQhHg3tBv7PZuyRuAg\n0Atoa7Ps/kpcJ+t5sqnPVrCdA4LNv28DBpbRfss1eMJ8rt5GEywfmtv9e/P5aF+J8z4LOGI+ns7A\nNooLn38B/wO0Mx/TD5gFUGWvu532P4j2kBXACOCG5VjR7oVf0PqsE5qAtj1HIwEvtL7pDfwKjLMn\nqNCE1AmgD1o/3Q4sNq97Bthk3oceGAR0tNnuSXtttzmGmebzaXnYH7RZ96G5jp7mugPM5Yq1z15f\ntXMM5Z2rkZQt/IejPRyFTX/KBXpU8h6vlPBH00wcN19vB7SBRUpV+0S1ZWddCub6+ABTgF8qKHMC\nCLX5HwIYbTpBLkUjsg7mC3aPTfl9NjfJG8Bum3U6bASPnX0fBMbadIysEuttO8u9aMJlKObRpXm5\nHm1EPsBm2TPAdps6jtusczIfw+122hOMJiBs61+JeRRIxcL/SWCr+bcw3yTDyyg7Djhg898IzCxR\nxkiR8C/vOlnPk816W8GWZT4nHSvoCzOAYzb/vcz1dLdZdhHwrcR53wrMsln3e3NdDmgj4VuYH3Lm\n9Y8B2yp73SvZ/zcAL5l/fwb8P5t1d9ueIzvbvge8a/5toLTwf82m7HPA/5p/z0R7MHvbqXM7FQj/\nEuWdzfvthHYv5WLzJm1Trlj77PVVe2XKOVcjKVv4C3N/Gm7+/xTmPl9G+ZL3eGWF/3+webs2H/8N\nNNVrtftEZT/NQed/EehagQ65B9prpIVM8zJrHbLI0JVr/v7VZn0u0N7m/ynLDymlCU1t1ANACPG4\nEOKgECJbCJENeAJd7W1bEinlVmAp2ujnvBDiEyFER/P2jnaOoafN/19s6rlh/mnbZgs9gFPmdpdV\nV3l8DQwTQtyBNkIyAUkAQojuQohVQogzQoiraCPPriW2L/P4qfg6lccjaKqfTCHEDiHEsHLKlry2\nSCntXe+KznsPih+PbTk387bnbPrC/6C9ARSjnOteCiHEaCHEbiHEJXOdoRSd45LtOVVi23uEENuE\nEBeEEFfQ3lxKXh9bfrH5fYOi/vQVmopilRDirBAiWgjhWE49tm3QCyEWCyFOmPuI0byqq/nTBm0Q\nUGMqOFdlIjVJvArtYQ0wGU2Faam3onu8srgB79vUcwntwdOzKn2iujQH4f892ghrXDllzqKdaAuu\n5mXVpZflhxBCB7gAZ4UQbsCnwGw0bxlnIA3tglqQ5VUspfxASjkIGID2yj0HTaecb+cYzlSj7WeB\nXuZ2V7kuKeVlNP3vRLSbYpX5ZgHN4C4BLyllR2AqxY8dyj/+8q7TdbQ3GgCEELeXaNceKeVYNOG6\nAVhTmeOpgIrO+zls+oJ5nYVTaP2yq5TS2fzpKKX0sLejMq57MYQQrdEevjFobyrOQCJF5/gcWl+0\n0Kt4DawAEoBeUspOwMeUvj4VIqXMl1L+RUo5AE0t8xDwuGV1BZtPRlN33I822jeYlwu0830TTVVT\nard2lhXrE4C1T1TiXFXESmCC+Z6+x1wXlbzHbdtHWW1E6yPP2PQPZyllWyllClSuT9SEJi/8pZRX\n0PT1HwohxgkhnIQQjuanfrS52ErgNSFENyFEV3P55TXY7SAhxHjz28af0G7y3Wi6XYlmM0AI8QTa\nqKBSCCEGm0dnjmgd5yZgMr+VrAEWCSE6mDtgRDWP4b9oo7i55vM0EhiDNtKpLCvQbvYJ5t8WOqAZ\n464IIXpS9c5a3nVKBTyEEL5CiDZor/wACCFamedLdJJS5qMZyEzUkEqc9zXAi0IIFyHEbWiGQMu2\n59AekrFCiI5CCJ0Q4i4hxIiS+ynruttpUis0/fcFoEAIMRpN1WRhDfCEEKK/EMIJWFBi+w7AJSnl\nTSHEEDRBXGWEEKOEEF7mOQFX0R6Qlvb+CtxZzuYd0O6Xi2hC8a+WFea30c+AOCFED/NbwjCzIL9g\n3odt3QeB4eY5Cp2AV23WVXSuykVKeQDtYfQPYLOUMtu8qtL3uJTyAtpAYar5WGZS/MH2MfCqEMLD\nXFcnIUS4+Xdl+0S1afLCH0BKGYt2U76GdlFOoT2ZN5iLvA3sRfMy+RHNQ+ftGuxyI9rI9zIwDRhv\nHg0dRvNU+B7tJvBC85aoLB3RRhWX0VQIF9EMqqAZia+jeS0kowndz6racCllHpqwH43Wuf8OPC6l\nPFKFahKA3mi2llSb5X8BBqJ5aXwDrK9i88q8TlLKn9EMwluAY2jnwJZpgNGsSpiFZguqDco775+i\nqT9SzW0tebyPowmhw2jXdB1wh519lHfdrUgpc4AX0YT8ZTThnWCz/j/AB2iG5+NoAxLQhC1oevs3\nhRA5aA/W6r4d3W4+lqtonio70FRBAO+jjZgvCyE+sLPtl+ZjPIN2XnaXWB+Fdu33oKlB3kHTed9A\n8ybaZVaTDJVS/h+a190hNLvcvy2VVHSuKskKtDcU6wCnGvf4U2iDoItohvgUm7r+ZT6+VeZ+m4Z2\nX0Il+0RNsFizFZVECPEGmgFtakO3RaEoDyFEfzSB0lrW8bwNRdOjWYz8FQqFhhDiYSFEa7Ma6h1g\nkxL8Cnso4a9QNC+eQZsYdAJtwtWzDdscRWNFqX0UCoWiBaJG/gqFQtECabTBtbp27SoNBkNDN0Oh\nUCiaFPv27ftNStmtonKNVvgbDAb27t3b0M1QKBSKJoUQIrPiUkrto1AoFC0SJfwVCoWiBaKEv0Kh\nULRAGq3OX6FoTOTn53P69Glu3rzZ0E1RKABo06YNLi4uODpWKqBqKZTwVygqwenTp+nQoQMGgwEh\nqhwIU6GoVaSUXLx4kdOnT+Pu7l6tOpTap5kQHw8GA+h02nd8fEVbKKrCzZs36dKlixL8ikaBEIIu\nXbrU6E1UjfybAfHx8PTTcMOcwiUzU/sPMKW2YlsqlOBXNCpq2h/VyL8ZMH9+keC3cOOGtlyhUCjs\noYR/MyAry/7yzMziqqDnnlOqoabOhg0bEEJw5Ej56Rc+//xzzp6tfrK67du389BDD1V7e0XjRwn/\nZoCrq/3lQmgPACm1748+Kv7/6afVA6CuqCsbzMqVKwkKCmLlypXllqup8Fc0f5TwbwYsWgROTsWX\nCaEJ+fK4cQOmTgUHB628ehuoHSw2mNp+0F67do3k5GT++c9/smpVUdbNd955By8vL3x8fJg3bx7r\n1q1j7969TJkyBV9fX3JzczEYDPz2228A7N27l5EjRwLwww8/MGzYMPz8/AgICODo0aM1a6SiyaAM\nvs0Ai1F3/nxNBeTqqgmcylJYqH0rQ3HtUJ4NpibndePGjfzhD3+gT58+dOnShX379nH+/Hk2btzI\nf//7X5ycnLh06RKdO3dm6dKlxMTE4O/vX26d/fr1IykpCQcHB7Zs2cKf//xnvv766+o3UtFkUCP/\nJkJFaoQpU8BoBJNJexOoriOAMhTXnLJsMGUtrywrV65k0qRJAEyaNImVK1eyZcsWnnjiCZzMr36d\nO3euUp1XrlwhPDwcT09PXn75ZdLT02vWSEWTQQn/JkBV1Qjz51es8imPkkJKzSGoGmXZYMpaXhku\nXbrE1q1befLJJzEYDCxZsoQ1ayqff93BwQGTyQRQzDd8wYIFjBo1irS0NDZt2qRmMLcglPBvAlTV\nlbOmI0xbIVVX+uvmjD0bjJOTtry6rFu3jmnTppGZmYnRaOTUqVO4u7vTqVMnli1bxg1zB7l06RIA\nHTp0ICcnx7q9wWBg3759AMXUOleuXKFnz56AZiRWtByU8G8CVFWNUJMRZkkhVZUHj3pD0JgyBT75\nBNzcNPWbm5v2vyb6/pUrV/Lwww8XW/bII49w7tw5wsLC8Pf3x9fXl5iYGABmzJjBrFmzrAbfhQsX\n8tJLL+Hv749er7fWMXfuXF599VX8/PwoKFB53lsUUspG+Rk0aJBUaLi5SamNu4t/3Nzsl1++XEoh\n7G9T3sfNTdvWlrLqEUIr6+am/e7SRUpHx+JlnJxK19dUOXz4cEM3QaEohb1+CeyVlZCxtTLyF0J8\nJoQ4L4RIK2O9EEJ8IIQ4LoQ4JIQYWBv7bSlUVY0wZUrVdf5CaPWVHJ2W9RbRuXNxddDFi5CfX7yM\nMh4rFI2X2lL7fA78oZz1o4He5s/TwEe1tN8WQXXUCF26VG0fUtrX5Zf14IHS6iB7ZGUpdZBC0Rip\nFeEvpdwJXCqnyFjgS/NbyW7AWQhxR23su6Vg68ppNFZff6wr54rbG6mX9eC5VN7VtqHkG4IyGCsU\njYP6muTVEzhl8/+0edk520JCiKfR3gxwrYnVUsHFSf1ok9uat7cKIoypPBg4nH0uNzh/1484XunB\nXZk9udjhJl1y2tD7N8FWz4v0+OV3/EJ3QhcNJHH+XGtdU6aUftjMn1/xRDJHR7h8WXtg2VIbE54U\nCkXNaFQzfKWUnwCfAPj7+9fAU71l0e1PoYica3Q5D6CdNkeXAm72OkrU43qij/nzW7fjmG47C1JP\nftcMjjifBod8LhS24og+D/Jbc+x3RjA5cH9u+bNCQVMH2YaRtocQpQW/hZq6oyoUippRX66eZ4Be\nNv9dzMsUtUDAr9240CuZI94/cMR7P0d8/kt+9xNgEiAKOd9nryb4TQ5wqx2dTwwEh3xtvT5Pq0QH\nmByIXeVOxPKlFe7Tog6y8Roshl4PeXllb295sVP2AIWiYagv4Z8APG72+hkKXJFSnqtoo5aCrQDs\n2lX7VEUYblwbT9jmIHDMA/1NTbAD6MwvT0L7BCcHEvyDH5fu3o/uSg/QS+s6HG4R/F8/IoypxYbl\noYuiiXstppiEjnsthtBF0UyZAl98Yd8gbIkXZA9HR+3NQU0gqxqnT59m7Nix9O7dm7vuuouXXnqJ\nvDKesGfPnmXChAkV1hkaGkp2dna12vPGG29Y5xXUJbb7ef3119myZUuZZQ8ePEhiYqL1f0JCAosX\nL67zNjZFasvVcyXwPdBXCHFaCPFHIcQsIcQsc5FE4CRwHPgUeK429tscKCkAL17UPlUShoWFbNyd\nRMcsL9CbigS65SMFSEgK/J6kIQfofHwgpk5noVBbjgQKWpN0zwHiDD7F/Dvvz9URlbeYOOEMUhIn\nnInKW8z9uVrXKcsg7OZWdnM7dtS2K28CWZN/I6jlA5BSMn78eMaNG8exY8f4+eefuXbtGvPt+NIW\nFBTQo0cP1q1bV2G9iYmJODs716ht1aG6E8refPNN7r///jLXlxT+YWFhzJs3r1r7avZUZjJAQ3xa\nyiSvLl0qN/mqXPR6GTY0WLJQSBboJAsp/vmzk2z/hIf2+3W99j3fUft+rZW5TGvJn50k8zrK2PlL\niup2c5OxBh8p5nSRwaNGSDGni4w1+JTZKMvEr4qOqaJJaE5OjWvCWJUmeS1fXusHsGXLFhkcHFxs\n2ZUrV2Tnzp3l9evX5bJly+SYMWPkqFGj5PDhw2VGRob08PCQUkp5/fp1GR4eLvv37y/HjRsnhwwZ\nIvfs2SOllNLNzU1euHBBZmRkyH79+sknn3xSDhgwQD7wwAPyxo0bUkopP/nkE+nv7y+9vb3l+PHj\n5fXr16WUUi5cuFAuWbJElmT69OnymWeekYMGDZK9e/eWmzZtklLKUm2UUsro6Gjp7+8vvby85Ouv\nv26t4+2335a9e/eWgYGBctKkSdb9TJ8+Xa5du1ZKKeUPP/wghw0bJr29veXgwYNldna27NWrl+za\ntav08fGRq1atksuWLZPPP/+8lFLKjIwMOWrUKOnl5SXvvfdemZmZaa3zhRdekMOGDZPu7u7W+s+e\nPSuDg4Olj4+P9PDwkDt37qz29asrGnySl6J6xMdro/yKyHQpW/UCMDZ8CgkhyZDfCgrbQIGjtqEE\nCtqAhGs9jtM+ywMKHHD8zZ1+qffQ7Wd/+h0cQtjmYNqdv4t+aUPod2k0W9raWGmzsogwphK015Ok\nETsI2utZSjVkezyWt5iKKG8Sml7fxNNS1kFezfT0dAYNGlRsWceOHXF1deX48eMA7N+/n3Xr1rFj\nx45i5f7+979z2223cfjwYd566y1rjJ+SHDt2jOeff5709HScnZ2tMYDGjx/Pnj17SE1NpX///vzz\nn/+ssL1Go5EffviBb775hlmzZlkDxtm28dtvv+XYsWP88MMPHDx4kH379rFz50727dvHqlWrrKP4\nPXv2lKo/Ly+PiRMn8v7775OamsqWLVto164db775JhMnTuTgwYNMnDix2DYvvPAC06dP59ChQ0yZ\nMoUXX3zRuu7cuXMkJyfz73//2/qmsGLFCkJCQjh48CCpqan4+vpWeNxNiUbl7dPSqJQsCIzGl3Si\n8r4B4UKEzGTsHa4kyIXE5v4FgJTuF+h2KqiYt4/OBJk9LtHrl84U4sAt5+549CvuwlkpXF2JE84k\n+6cRvGMEyf5p9G8XBDoHnvIMJCI9BYA4jwDevrsXN/wGwq4q7sMGJ6eyPYiajIdQXcV0roAHHnjA\nbkjn5ORkXnrpJQA8PT3x9va2u727u7tVwA0aNAij0QhAWloar732GtnZ2Vy7do2QkJAK2/Loo4+i\n0+no3bs3d955pzXtpG0bv/32W7799lv8/PwALVnNsWPHyMnJ4eGHH7aGqQ4LCytV/9GjR7njjjsY\nPHgwoD0IK+L7779n/fr1AEybNo25c4v66bhx49DpdAwYMIBff/0VgMGDBzNz5kzy8/MZN26cEv6K\n2qMyssD/nI59j3zDmKQBRIUfZunxADK8kwnbHETEuaXwdhQX3kskPt6+770l06ubGyR+VfU2xk2d\nTVTeYmLWuhBh3EFchg+REw+BvoDI/g5w3QeAyDFpINPwW+3PATv1VJRZTAjN1LBoUdlzCJrM1I+y\nsunU4AAGDBhQSod/9epVsrKyuPvuu9m/fz/t2rWrdv0ArVu3tv7W6/Xk5uYCWpC4DRs24OPjw+ef\nf8727dsrrEuUSChh+W/bRiklr776Ks8880yxsu+99151D6Ha2B67NHfU4cOHs3PnTr755htmzJhB\nREQEjz/+eL23ra5Qap96pKQNsKy8G0IUGU+3XVlKzFoXNgUfpsNvPcnwScH90DA27k6yPj0qo26p\n7qBzS1sTMa3mESGzQQgiZDax//ak36GBICByWjqR09JAFBC72p19xjn8xRBIh4cmQaCmlnJzg6++\nKtst1M2t+MzlugiJXK/UwQHcd9993Lhxgy+//BKAwsJCIiMjmTFjhnWEXBaBgYHW2P+HDx/mxx9/\nrNK+c3JyuOOOO8jPzye+kobrtWvXYjKZOHHiBCdPnqRv376lyoSEhPDZZ59x7do1AM6cOcP58+cZ\nPnw4GzZsIDc3l5ycHDZt2lRq2759+3Lu3DmrSignJ4eCgoJSoaxtCQgIsKa/jI+PJzg4uNxjyMzM\npHv37jz11FM8+eST7N+/v1LH3lRQI/96wiKgLSqNzExo1Upze7QGRAuMxv+cjm1XltL+UhbgSmwH\nZ77r2QnDsXZk+KbQMdMb491HiTP4aAIZ+yrmklR30GlVE70dZV0WodMRkS4Zfn0ESSMs+uVb7Li9\nI+DDwolpINLw3N6aw8HRhHrO5emn7bt/2pOJ9tJS2gs612ipgwMQQvCvf/2L5557jrfeeguTyURo\naCh//etfK9z2ueeeY/r06QwYMIB+/frh4eFBp06dKr3vt956i3vuuYdu3bpxzz33lClcbXF1dWXI\nkCFcvXqVjz/+mDZt2pQq8/vf/56ffvqJYcOGAdC+fXuWL1/OwIEDmThxIj4+Pvzud7+zqnZsadWq\nFatXr+aFF14gNzeXtm3bsmXLFkaNGsXixYvx9fXl1VdfLbbN3/72N5544gmWLFlCt27dWLZsWbnH\nsH37dpYsWYKjoyPt27e3PnibDZWxCjfEp7l5+5TlAdOlS1FY5CCPJUXeNGD1svEIDZIsFNJ9XIAU\nc7rIsKHBWjmzV05lPGdq1VPG7AHEvI6SP7eVvNa6yLNogaNkXkfN++iVjrLz48GyzX3v2G2XXt90\nQj435ZDOBQUFMjc3V0op5fHjx6XBYJC3bt2qs/3ZeuQo6hbl7dMEKEvtcrFfNC9OjcHkaiApfQ4x\na12InJhB58d9iQo/zZikAaT77idscxAnN6RYVUAeSQ9avXLKG9XXRiKRksRNnU3kpAyQELuiD7HL\n+4HJUZtToM+ndXZ3Ekakgr6AS3ek0v+E/W5mMjWh0XwT5saNGwQFBeHj48PDDz/M3//+d1q1atXQ\nzVI0MErtU0/YtQFODsU120RU3l7Nk4dMPhvQAVpf4/KdBwneMYJ8vSRmRW8eMWZhQjDemM3ytfP4\n6S4TP5pVMvbi7Dg51b7Qt/DlZRMds0YTmXaKl40pvGvw0dxLRSEIE7duP6aFjshvS+xqd142ziGT\npYw3zOZAT5PVG6jJGHCbOB06dGDv3r31tj+VDrJpoEb+9YQ9G6DnyW5kDf6WAen9iQo/TZepA0kf\nnAyFjla3yvvPXCE8Mxt3jOgx4Y6Ro+ej+McTxaNu2s6qtfjJW2bK1jbZ38zl6r9XsdC4i0GGJURO\nzADpQNi3gVBgHlHqJDjcZMftHRHAeoMzB8MX41mYDoHRCAGhobXfNoVCUTmU8K8nigVCmxyK59Dp\nHNr9JWGbg0gfvAupK+DS3fvB5EhsfD92bttBzFoXosJPs+Sx2RUmcrH1krEYVusqVo6tCutATxMd\n0kczZruPpuqRDubJZjrQmUgIScJxthuRkzI0FVbwN4wu/F/aPziJ/0mLtratyYdzUCiaGErtU48s\nSYnGafR+emZfJy3kfxlHMCN+uUpCfmtoewXy20BhkS42QmZDq3lsGWDCWAlhWN7E0tpU/xRTYe2a\nSw6w/aFJoN8HhQ7Eru4NQOTkY6DPpaBrJhTqSBiZSth2HxJG7gMJfqv9rRPdSnpCPf209lvZBBSK\nukHI8mbeNCD+/v6yPvWUNcUyycqeZ1/oomi+y1rNnafbccQ7FSR0N/bh1342x3fDGZyu4LEnkMMe\nP2m+9TbulZVBp7M/kaq8uPrVoaTbKgCB0XS4bT9RaadYYNRm/Y4bGkzCfT8gbnZEdrgAJrSHW0Eb\nYle7s/32TnzjkUMr4yRufld6VrCbm+b73xj46aef6N+/f0M3Q6Eohr1+KYTYJ6WsMCmHUvvUAhWF\nJu64Q0delyMcGZyMxyFv0BcUCX4B3HBGLsm2qoAGZPgXj69TScoyoNa2YbVkJE9AewMw2wF0SAYZ\nlrAp+DBh3w0BnQlx9Xdab3PM47Yzd7Hj9o5sCtmJqftR7r6ebp0QZktmplIB2dK+fftSyz7++GOr\n//nnn3/O2bNn67tZxVDhl5sOSvjXAmWpW156SYvNv/j/lhK7sjfktdUMug65ReGWczuB0xXGDg1m\n4+4kwo5O49eeuqrH4KF+Z8ba5hS2F775QE8TPikPsin4MGOSBiBb3bSGkL581wESQpIg34mwrYNI\nD/4GvzP2u2JTjPEfHQ3bthVftm2btry2mTVrljXkQF0JfxV+uXmihH81KGmcLCuswsWQUG79fjhf\nG5yJMKYSvHuwJvB1Uou/VugISDz2BJIQkszYSY+zceUXXHgv0X6FFVBWbP261pvbfegcmIv08WDM\nqQdJGKmpumK/8kZ/uZf1wSdutSNhZCoxa13YZ5xDBgb8DDGl3gKaVERPYPBgePTRogfAtm3afzsT\nVWuMZaS9bt069u7dy5QpU/D19SU3N5d9+/YxYsQIBg0aREhICOfOlc6fNGPGDGbNmoW/vz99+vTh\n3//+N6A9SMLCwrj33nu5776VBShvAAAgAElEQVT7AFiyZAmDBw/G29ubhQsXWutYtGgRffr0ISgo\niKNHjxar2xKPaM+ePQQEBODj48OQIUO4cuUKr7/+OqtXr8bX15fVq1fz+eefM3v2bECLCnrvvffi\n7e3NfffdR5bZy2DGjBm8+OKLBAQEcOedd1rrP3fuHMOHD8fX1xdPT0+SkpJq/2Q3M5TwryL2VDwl\nYlhZ8TzZjWt9komafIyxQ4NJCkouSp4CeOy7BwSkex3CI/33pHS/UOP22Y7ILbFy6pqyHjoHP5xL\nvqcH/S6NJma1Oztu70jhbafApAeJZgfQ3+RTzw68a/CxuoP+5czGUg+BJhPRExg1Ctas0QT+669r\n32vWaMvrigkTJuDv7098fDwHDx7EwcGBF154gXXr1rFv3z5mzpxpN/ELqPDLLRXl7VNF7Kl47NrM\nA6OZfuYQSZuDSAhJ0tQcABI89gaR7rOf9MG76LcniF8de+DQcyAX3qt+KOSGZsoU+w8ai/pq7GPT\nSej7pVXVk3DvPnC8AQ55HPHeQ6RvIRS20tRjXNfmBCQ9SFpgNOya2+QmhI0aBc8+C2+9BQsW1K3g\nt8fRo0dJS0vjgQceALRAcHfccYfdsir8cstEjfyrSGVHoH5ndMwNP8WIX66iz3axqjrcfwwgLTGZ\nmJW9af9zEEec23N54yqOfT63Sem1q4qWcyCYsK2D2BR8mNgVvQnbHIzjBXct9aRDAbS6wVK/dkVh\nLcy2gCYV0dPMtm3w0Uea4P/oo9I2gLpGSomHhwcHDx7k4MGD/Pjjj3z77bd2y1Yl/LKlvuPHj/PH\nP/6x7g6gHMoLv9yzZ09mzJjR/IKw1QFK+FeRyo5A1xu1UMyRk49R6Hzaqu7J6H+QOIMPjxizubZy\nJ6zQ9PtNTa9dVS68l8j5f+4k/6GHeP7MPB49lc2G3UksTuwIeW257aQvFLQiwyeFVjmd2RR8mJi1\nLvxgnMfMcTHEG+vAWlpHWHT8a9bAm28WqYDq+gFgG864b9++XLhwge+//x6A/Px80tPT7W6nwi+3\nTJTwr4CSxt3QUPseNV26FF/mSpYW4tjxhjbiTw0gbHMwOOYSOfkY4w2zS+2rKem1q8sUw1w+2xBF\nr0IjgwxLiAo/Texqd17bKbUUlIU6bt1+jDbZ3YgwpvKBwZMPey7GMS3dmraysbNnT3Edv8UGYEcd\nXiVu3LiBi4uL9RMXF1dsvcV46+vrS2FhIevWreOVV17Bx8cHX19fUlJS7NZrCb88evTocsMvT548\nmWHDhuHl5cWECRPIyckpFn559OjRFYZf9vHx4YEHHuDmzZuMGjWKw4cPWw2+tvztb39j2bJleHt7\n89VXX/H++++Xe262b9+Oj48Pfn5+rF692pq5TFE2apJXOdibzOTkBNOnQ2Ji8QldAFM/isbvjI71\nxqW4kUmHPw7geo+fafvrXdzs9Bsxa13YfnsnEj2vUfjTxFLpDhvTpKa6oph3VKB2vsbxL94IP8qY\npAEkjExFd8sJU8dfaHu2HzedLzAmaYD2JlCNiW+1RXOd5DVjxgweeughJkyY0NBNUVSDmkzyUgbf\ncijLfz8xsbSQDl0UjWdhOgfDv2H9WhfAmevdj4FJz9vftgG0OD2+a6fifCyK3Fywrbop6rWrQ7G3\nm11zOQAcCDThmZTOpuBviF3lToQxFaen+pHb8witf+ltVQFFyKXFksooFIrqo9Q+5VCVPNz35+pI\nD7bk2j3N/N/fBMdcwr4bwsvGVMYbs/FdO48DPU1cutQw/viNAbs2k11zSdN74Lt2Hi8a04gz+HDT\n+QKtf+nNrduPYTjelwhjasvQi9Uzn3/+uRr1t1CU8C+HyoZL6PanUHZsTyyWa/dmj6Pocn7Hxt1J\nSATuGDlgjIJdc9HpYNo0bdtZs7TvadNaRiiDsmYhdzkylwPGKIYYFlu9fW51PsNtx/3I8P6esUOD\nQacj7rWYJqP7VygaM0r4l0NlwyUE/NqNhPu3s+P2jhiO9eWq2yGQYGqdQ5zBhyyKPy0KC4smiH30\nUdkxgZojZU0Ie/997dwe6GnCI0kLCxG2dRDZd2RpM6Dv3cfYwQFE5S3m0pc6a5L7rl2b9/lSKOqM\nyuR6bIhPY8nhu3x5UY5dN7cycs7q9VrOWkse24VIXnWy5toN8lgihdBy1paXa9fycXOr32NsLFjO\nNYHvyCCPJbJQp7fmMXYfFyD5s5OMNfjIDNyKna9Wreo+F3BTzuGraL6oHL51SKXCJRQWMuKXqyB1\nIKBjljexKzVD5ZhTD9LhMRMmU+XDKrdU1bblXMvkuSSlRaGTJiKMqQTt9STDN4Xg7wcTYUzFleIn\nKC+vec+RUCjqAiX8awO9nsXDHEFIOmZ6c9X1R3bc3pGYda7ke3pYQxxUdoJYUwtlUFdc6+xKnMGH\nZP80a1pLe2o0aBkPzKYQ0tkedRW6eeTIkfWSm9h2P6GhoWRnZ5dZdsOGDRw+fNj6v6Kw1g1KZV4P\nGuLTWNQ+9hj99jsydv4Sqz4oLHCUZKGQ3R7zlxLMKiAhwyY9Xmy75culdHIqX+Xj5FT3KoymQpDH\nEinmdJGxBh8pwaoC8jMsqXdVWVXUPu8kvyO3ntxabNnWk1vlO8nv1KgN7dq1K3f9iBEj5J49e2q0\nj7pg2bJl8vnnn6/1emtyvPn5+XWyn+nTp8u1a9dWq03VQal96pHQRdE4pqUTlbeYOOEMUvIfXyMU\nOjBvt5Y8d+OeFMKOTisVpdOesfPZZ1umy2dlSHY24bt2HuON2ZgQLApoTa90f7r3/AYTgnwc8Bw6\nHSaHNqo5EoN7DObRdY+yLUOL57AtYxuPrnuUwT1qP6ZzTUM6Z2RkWGftvvbaa9a3i+3bt/PQQw9Z\ny82ePZvPP/8c0OL0Dx48GE9PT55++mlrfJ2RI0fyyiuvMGTIEPr06UNSUhJ5eXnlhm729fW1ftq2\nbcuOHTu4fv06M2fOZMiQIfj5+bFx40YAcnNzmTRpEv379+fhhx8mNzfX7jkxGAzMnTsXLy8vhgwZ\nwvHjx4GiGdD33HMPc+fOrdZ+DAYDv/32GwBffvkl3t7e+Pj4MG3aNFJSUkhISGDOnDn4+vpy4sSJ\nYmGtv/vuO/z8/PDy8mLmzJncunXLWufChQsZOHAgXl5e1sB6O3bssJ4bPz+/MkNhVJvKPCEa4tNY\nR/6x87XRqMWY6/5wgDbKHxrcci21dYSbW/HRvefQx4vOtc0bVsADj1dYV02pqsF368mtsmt0V7lg\n6wLZNbprqTeB6mBv5L9w4UK5ZMkSKWXxEWpeXp4cNmyYPH/+vJRSylWrVsknnnii1PZjxoyRX3zx\nhZRSyqVLl1r3sW3bNvnggw9ayz3//PNy2bJlUkopL168aF0+depUmZCQYN1/RESElFLKb775Rt53\n331SytIjf3tvAgkJCTIoKEjm5eXJV199VX711VdSSikvX74se/fuLa9duyZjY2Otx5Camir1er3d\nEbmbm5t8++23pZRSfvHFF9bjmD59unzwwQdlQUGBlFJWaz9ubm7ywoULMi0tTfbu3VteuHCh2Dkp\nOfK3/M/NzZUuLi7y6NGjUkopp02bJt99911rnR988IGUUsoPP/xQ/vGPf5RSSvnQQw/J5ORkKaWU\nOTk5dt9W1Mi/HolYvrSYP3+GTwruh4axcXcSpswsawwg5X5Yc0q62h7YHU+3nweREJJEpye8SQhJ\nJmxzEI8cO9TofP9HuY/iWf9neWvnWzzr/yyj3Os3prNtSGdfX1/efvttTp8+Xarcrl27eOyxxwAt\ndHJl2LZtG/fccw9eXl5s3bq1WMC48ePHAzBo0CCMlYxVcuzYMebMmcOaNWtwdHTk22+/ZfHixfj6\n+jJy5Ehu3rxJVlYWO3fuZOrUqQB4e3vj7e1dZp2WY3rssceswe0AwsPD0ev1ADXaz9atWwkPD6dr\n164A1tDXZXH06FHc3d3p06cPANOnT2fnzp3W9fbOW2BgIBEREXzwwQdkZ2fj4FC7ARlqpTYhxB+A\n9wE98A8p5eIS62cAS4Az5kVLpZT/qI191ztZWUTITJYeCyDDN4WOmd4Y7z5KnMGH8cZspI2/PigV\nTk2wnLv58zWDrl4WMu/7fCLvduCq2yE6Znoz4perRIWfJia3cY1jtmVs46O9H7Fg+AI+2vsRowyj\n6vUBIKUW0tlW8JVFyZDOAA4ODphs3NMsCV5u3rzJc889x969e+nVqxdvvPGGdR0UhVvW6/WVSv94\n7do1Hn30UT799FNrvgEpJV9//bXd6KKVxfaYbH+XDFNd0/3UFvbO27x583jwwQdJTEwkMDCQzZs3\n069fv1rbZ43vGCGEHvgQGA0MAB4TQgywU3S1lNLX/GlSgt82sudpnStjhwaT4fM97gcDyOl6xhrS\nwTZSZ3MP0Vxf2LraCr1ei5SqK4BCB666HiJy8jEt7s/ypQ3dVCsWHf+aCWt4c9SbrJmwppgNoK6o\nTkjnwMDAYqGTLbi5uXH48GFu3bpFdnY23333HVD0EOjatSvXrl2z6rMr266SzJw5kyeeeKJYyOaQ\nkBD+9re/WW0JBw4cALSY/StWrAAgLS2NQ4cOlblPS5TQ1atXM2zYMLtlarKfe++9l7Vr13Lx4kUA\nLl26VO6x9u3bF6PRaLU/fPXVV4wYMaLM9gOcOHECLy8vXnnlFQYPHmy1BdQWtTFcGgIcl1KelFLm\nAauAsbVQb6PA9/lonls1iZkikEIpWN3LmYT79tDu1AD6XXCwqoA8kh7kQM/ijvwtwf2wPhkbPsWs\n6gnmtkwvLUGO4w3tgZCZ2WhCP+w5u4c1E9ZYR/qj3EexZsIa9pytWUznugjp/P777/Phhx/i5eXF\nmTNnrMt79erFo48+iqenJ48++qg1g5ezszNPPfUUnp6ehISE2A3hXJKyQjdnZmaybt06PvvsM6th\nc+/evSxYsID8/Hy8vb3x8PBgwYIFADz77LNcu3aN/v378/rrrzNo0KAy93n58mW8vb15//33effd\nd+2Wqcl+PDw8mD9/PiNGjMDHx4eIiAgAJk2axJIlS/Dz8+PEiRPW8m3atGHZsmWEh4fj5eWFTqdj\nliW2Sxm89957eHp64u3tjaOjI6NHjy63fJWpjGGgvA8wAU3VY/k/DU2tY1tmBnAOOASsA3pVVG9j\nMfjeFjZR8mcnySsdZazBR/Z7KEgyv41kfmsZY55t6mdYIgl8R83UrWO6vjRahgWOkrEGH8m8jpI/\nt5XMbyPb/XGA1Q00dv6SYttUaoZ2JWgpM3wrcidtCliMsi2BpmDw3QQYpJTewP8BX9grJIR4Wgix\nVwix98KFmiczrymhi6IZev48FDqAvoDIyT9zxO+/4HCTsO+G8Igxu1jANltaSojm+uTCe4mMGBmq\nJYBZ5U6/Q4NA6rjePYPIiRnErHWBDf+yjv4t+RhaUuwkhaKy1IbwPwP0svnvQpFhFwAp5UUp5S3z\n338Adt/XpJSfSCn9pZT+3bp1q4Wm1YyMrP38Z+Q+wnb4ABJa5YI+H4eLrmzcnVQqzIBer/z165ot\nbU1aUhdjKk+l5WC5LreduxOAqNCj3G82/paVj0HZYsrGkqaxKWM0Gq1eOIqyqQ3hvwfoLYRwF0K0\nAiYBCbYFhBB32PwNA36qhf3WOiVTNj6x/RRISLh3HzjkaYUkFHT4rVSYAScn+OKLCmIAKWpM4vy5\nWjYvNzdtgckR8tpy2eWodfT/9Mea8bcq+Rgqg5SNM+udomVS0/5YY+EvpSwAZgOb0YT6GilluhDi\nTSFEmLnYi0KIdCFEKvAimg2gwSgp5OPj7asI5vycoo36HW+ArhBMeshzAgGREzOY1Ge2Guk3EHFT\nZ1vVP8HfD7G+lQE4XdSke2XzMVSGNm3acPHiRfUAUDQKpJRcvHjRbr7lylIrfv5SykQgscSy121+\nvwq8Whv7qikl8/Ja9MBt25ZWEQDsdreZQl7QirBtg0gYkUr7LB86P27CpFQIDcKWtibeiO8LXLcG\nfksacoBPPTsw3ujKSINNrmAbHB2rZ4txcXHh9OnTNAZblEIB2oDExcWl2tu3uBy+ZemB7Qn+twwB\nnL9zP+Q7Efz9YJKGHCBhZCqjtw9iW7s/kLhibumNFPVC4vy5BK/UsSt0sebnb9xBXIYPUeE/MT5t\nHplG+9tVNqx2SRwdHXF3d692exWKxkbjmhZZD1RF3xvj2QsKWhG7ojc7t+0gdrU7SEjp+jv+8YQS\n/A1NzggTg74uCvxmmye5LAoL4aWX6rGRCkUjpcUJ/7L0vY4jo/G/M4YMDBSiIwMDAnA7O5qnc4qE\nS9C/F2BwHaj0+42AOQFzybgShTtG9Jjw72Lf7bYk5kmZCkWLpkWpfeLjwZ4nmxDQpft+9g79D+tX\nuRNhzGS9wZmrnv+hx6XRtP/NCIABSKrPBivKpKTtBiA3V3uIexp1rDcuxZUssnBlvGG29jZQwUNB\noWhJtJiRv0VY2Bv1SQnP/ldz64yclMHwUSOInJQB0uzuqWh0lGW7GXhWx8Hwxaw3OKNDst7gzMHw\nxfidKerqXbrUc2MVikZIsxX+Jd05X3rJvlGXwGj8DDEsMKZoOn1dPkkjdoDDDWJXuzPn55RSbqGK\nhqcs282qn7WQ21Hhpxk+aoQW8XOtC+uNmu9/q1bw/vv12FCFopHSLIW/PZ/9svS8fme0keK7Bh9t\ngdCycSGLTo0KD9D4KNOHnyxr0vekETsI2utpTfrepQt06ADTpqkHuUIhGuukFX9/f1nd5MwGg30f\nb3tkYGC9wVlT8zjcAn0e7oeGkdEnDQT8ZZUnC427im3j5qbN4lU0HPZ0/k5O8GtbA590cCYq/DRB\nez1J9k8jZq0L4VnZuJqMperp0kV7E1AGfEVzQQixT0rpX1G5Zjnyr4o7p2Wk2PbSHeBwC/fUYZz8\nV4rVrTPGs1epbVSo5obHXj7kTz6BT2bNtqp6dg/IxOFaJyInZbDG1RkTgn6hw+FPLhCoBX+7eFG9\nzSlaJs1S+JelEujSxRwS5vl+uIb+ARMCgSTO4ENu9xNwswPG3lpWrheNadzzrwXkXB5Y6foV9Ytt\nohdLPCVr4DeZTZ8MF/J/dxIccvmHZwe8QoM4MjgJ2v1WzAB84wZMnapUQYqWRbNU+5SlErDE33F/\n6A8Y/TfjsSeImYdziJxyFBxuWv9HhZ8mptU8uvePKrceRSNHCDxDg0gfnAxSgJBQ0JrY+H6MN4fj\nLom6voqmTotW+5SlEpgyRYvRPzv9Fzz2aEIhcmIGONyk3SkPfkxMJkJmE9NqHlvamsqtR9E0SEtM\nRnelB+gkCAhOGWo1ANtDhXxWtBSa5cjflvj4ogTgrq4wJjCGD3tq8WDmjLuAyfksmASxX3rzsjEV\n0UjPh6IaVGPkb96s2jGAFIqGprIj/2Y9w7eY+icwms5ndLy7Yh7uBk8ipxzRvHskICSfDejAy8YG\nbrCiVvGcEEK6x2YoaE2/1MFcbneTX/vtJXLKEW6sGISfjLE781fZdBQtgWap9rFgOwvU4s//gcGT\nzwZ00AS/APfUAKsKyBAa0rANVtQqP7saaXfWm9j4fjyVlsN51wy6H/FHd70z8R5af/A/V/wWsE2/\naS/vg0LRXGjWah+dTpucBUX+/FHhp5H6W9D6Gu6pARh7HyVmrQufDujIkTvPI/92pBZar2h0GAzE\nidL+/0/nZOPZ3khmppaGs7BQs+2EhmqZ2ZSxX9HUaNEGXwudOxf9tvjzG473hTbXcD8UwMkNKdZQ\nAO2PhLF8qBL8zZYs7fo7/+pabOZvu4uZ3N46BgKjKTRP7s7MhI8/Vvl/Fc2bZi38r3ppcXu08Mya\nP3/GgH20PdsX491F/vyBifPI/4NJjeiaM66uxBl8uNzjBOS1JemeA8QZfHjX4MMPY4sHfoOiN8aS\nqAl+iuZCszP4Wrx7Ml2iYdgSDgReY318X8CZyMnHQF/ALaer1hE/rRaT9HZUQzdbUcfETZ1NVN5i\nYldp2bgiJ2YQOflnKHQkdrU7441LcafifqCMwYrmQrMS/rbePX5Cx4HWOeBwi8gpR2l7wU1LxA70\n+/kuIuQpMPvzRzRwuxV1z5a2JmKYR4RxDgCv3HCloEsWt53sS4TxIBLwHDqdtDsvwAotHbUQxd8A\nbI3BCkVTp1kZfK0B3QKj+cuZjbTnepFLpwAkeOwJ4sfEZOXP31IxG34jJx/TBgOFjsQuH8CO2zuS\nEJJM5yNDuaR3xmlDItOnQ2Ji0RyRRYuUsVfR+GmRBl+LPtbvjI43wo8C4H54kCb4ARDMPJzTIG1T\nNA7ipmqB32JX9Kb7EX/Q5xM57RAJIUl0PzKIS/12453RjU8+gb//vXTsIIWiudCshL9FH7veqCX0\niJx8jAzvFG0ilwlAEjlFM/QqWibWwG/GVH5ZvRf9ZRfQazN/f+23j7DNQaT+EK8EvaLZ06yE/6XR\noXgPnY4b5mD+DrnaqP9mB2K/9IGC1uBwkwX3K5VPSyVx/lwi3o4CvZ6xQ4MpvO0M5LcGx1voL/dk\n4+4kKCwk7rUYQhdFN3RzFYo6o1kJ/1HZ3TgU8hXjhgbzqWcHbaEE9JoDd0x8P9qe9qag462Ga6Si\nUTA2fAoJIcl0PzIIHPLApKPwttPcPtGfOIMPUXmLuT+3Wd0eCkUxmpXBFwcHxg4OICEkCUwOoCsg\nbHMwI365ag3THKHcOhVAtz+Fov8tm1/v3k3Y5iBG/HKVyKmHQZ8PeU7EruxNhMxWKdsUTY4WafCV\nhYVs3J2EPtsF9AV0zPJm4+4kXjamWsM0KxQAF95LpLCrM2FHp7FxdxIRxlSCdwWAAIdrXYkwpqoZ\nXYpmTbMS/oWY9bjOZ+iY6c1V1x+1/+iJeDuKxPlzK65E0WJ4b3Aiqd9/gRE34gw+JA3bg/vBAArb\nXNecAnQ6pftXNFualfD3G6rpccM2B3Fl2SHCNgeREJKM31DluqEojmVCYGYmjDdo7p9hWwdh7H2U\nMUkDiAo/zdjBAUr3r6hX6jOSbLPq1Yd7X8Bz8zS+3p2CBL7enYLn5mkc7n2hoZumaGTYhvs+0NOE\n79p5fL07hb4/9SdhpBYAMOHefcSsdYEN/1Kjf0WdYzsgkVL7fvrpunsANCvh/2VIIicPfYEjBeiQ\nOFLAyUNf8GVIYkM3TdHIKKbO3zWXA8YodJh4Ki0HdPlk+KQQ/P1gAKJCj6rRv6LOsR2QWKjLSLLN\nqkernLuKymIvQFsW5oUmRy3y57AfiJyYQcxaFx5ZtBQhwMEB7r9fJXlR1D5l+RfUld9BsxL+oAl6\nNSVfURGLFmmB2mwJv9Mc+mGVO8HfD4FWuZrrJ9DLnPC9sBC++67+Xs0VzRtbHb+uDGlcV5Fka0X4\nCyH+IIQ4KoQ4LoSYZ2d9ayHEavP6/wohDLWxX4WiukyZAtOna9m7QPvO8DTxxtq+AFbPHwod+dSz\nAyZ0+Bm0pC8lKflqrtI/KipDSR2/JZmQLXUZSbbGwl8IoQc+BEYDA4DHhBADShT7I3BZSnk38C7w\nTk33q1DUhPh4LU2j5YYrLIRLm+aygYeLef6E7fDhaP+feGRoAAfDSyd9sWB5Na9vo52i6VJKxx9Y\nlHyqEB1GDIwJjCHeWDfOBrUx8h8CHJdSnpRS5gGrgLElyowFvjD/XgfcJ4QQKBQNhD3jmpTFPX9i\n1rqwKfhwMc+f9calduuzvJrXt9FO0XQppssPjKa96785MOkt1huc0SF5cagrq4csIP3I/jrZf20I\n/57AKZv/p83L7JaRUhYAV4AuJSsSQjwthNgrhNh74YJyz1TUHWUa0Ww8fyKMqehvtrN6/kQYU3Ej\nE8+h02FyqHUT21fz+jbaKZoutrp8vzM6rrmlgiggclIGd44LICEkGUw6ntp1quxKakCjMvhKKT+R\nUvpLKf27devW0M1RNGPKMqJZ3kez0HL+FrT/DSQkBewmzuDDuKHBpIV8heGKCQKjS3mUlVWvSv+o\nKMmiRUX9bb1xqZZiVDqA4w0yfFOgwJHYlb2Zn5FSJ/uvDeF/Buhl89/FvMxuGSGEA9AJuFgL+1Yo\nqoU9bx8nJ5g1S3MRtsz6jV3ZG489QVo60MfTSAhJxmNPIJkD9hI7UlfKo6yselX6R0VJpkwBGaDp\n+d3IJMKYivtRT9AXaAWkvk73XxvCfw/QWwjhLoRoBUwCEkqUSQCmm39PALbKxhpOVNEiKGtOiCV7\n1+1PFiV9SUtMpvWvvUFXCDc7cNjjJ2LWuhCxvLT+X801UVSW0EXRdDDr+d81+DB2aHBR8qlCPUhB\n5KQMlvQJqJP910pIZyFEKPAeoAc+k1IuEkK8CeyVUiYIIdoAXwF+wCVgkpTyZHl1Viuks0JR21jD\nhCfDzQ7Q9iq3Hffj0vIDAMTNX8KWtiYVNFBRZfo/M4kjXTdpwl4ADjdBmMDkQNj/DSNhpGYD6Hdx\nDD/9z6pK11uvIZ2llIlSyj5SyruklIvMy16XUiaYf9+UUoZLKe+WUg6pSPArFI0FS9IXjz2B2oJC\nRy7fdQDP0CCV9EVRI57adUrLOyLQsg7qtJDzYf83jA27k3hzlSdup8fg7jqwTvbfvJK5KBS1TLc/\nhdL9jInD7nu1IG9A5JSjoM+DvPbErnJnvDGbkW5GFi1S6h1FFRCCOIMPkVOPgIM5u2B+K2Lj+/Oy\nMRUdEje3qucTapHJXBSK2ubCe4m4+t5LTKt5vGxMZUvPTrinDwSdidvO3kWEMZWvDc5kukSryVyK\nKrPj9o6gNwv+QgcwORA5KYO3DJqevy5dhJXwVygqIHH+XLr3jyILNxwLJRk+3+N+MIDs7lmMHRrM\nnPDT+J3RqclciioR5xFAwv27AbRQIvlOmgpIFBDjqTlQ1qWLsBL+CkUFWEI2PGyYzabgw4RtDsLY\n+6g28zckmTFJA6wzf9VkLkVZhC6KJu61GGvgp0/vagVAt5/9ObkhRfPzNznQ3jiInMsD69xFWAl/\nhaICLCEbDvQ0sWStC0IxldEAACAASURBVBt3J+H8qysZPim4HxpGvl7gRiZ+hhha36uSvijsk5G1\nn8iCt4gTzlosEVMBFDrinNMeE4Lxxmz8Vi3gWtZD6HfPrXMXYSX8FYoKsI7md83lEWM2cQYfLvc4\nAXltyeibxv1nrvCuwYeD4Yt5sru6pRSlCV0UTZ/08yAgcmIGw0eN4IjXftAVMi0tDz0m3DFywBiF\n04G5fPFF3TsPqJ6qUFSArd7VOvN3lTuxK/qAhMjJP1uTvvxtl/3Ab4qWzf25OjYFHCJsuw/o80ka\nsQNa3SDsuyG8ZkxpkEmBDnW/C4WiabNokabzt6h+/NbO42XjHASw4YcRJI3YwW0n+xJhPFgUrEWh\nsCFi+VIQLkROSrUmCKKgFSN+uYqg6u6ctYEa+SsUFVAsZEPKXC7JKK53cSPO4EOyfxrBO0aQ3f0U\ncQYfFcFNYR+L7lB/E/QFdMz0hsI2RE7MIM6jbsI3VIQS/gpFJSiZHvSTWZr6J2atCzu37SBmrQtR\n4aeJmzq7oZuqaIy4urJ4mCM45ON+MICcrmc0FZCA9/16Vbx9HaCEv0JRDba0NQd+k9kgBBEym5hW\n89jS1tTQTVM0QuKmzuaCu+YmfHJDUaIgz+3jyMoY2CApP1V4B4WiDoiP11xEs7I0TZAK/dCyCV0U\nzS//0LHeuBRXssjClfGG2RzoaYJdRUEBnZxqbvCtbHgHJfwViloidFE09+fqePrjpThdLH6DOx2o\ne79tRePGwcF+kvaSVCeejy0qto9CUc/cn6sjKm8xn3TQcrCuNzhbk76r0A+Kygh+qL9Z4kr4KxS1\nRMTypfT9qT+RU45w58MBVoPwZFbQ4aFJZLpE17teV9F4cHOrXLn6chhTwl+hqC2ysngqLQeQZPik\nYDjeF4A5E0+Q4/kf/M7oyMyEqVOha1f1EGhOxMdbQ/aU+YC3l+KzJPWZ8lMJf4WitrAM2QrbQH4r\nMrxTiJzyEwiIXeVuDf4GcPEiKgR0MyE+Hh7/n2g6ixhOSgMnM3UETzPwwpQYQhcVxXqyl+Lz2Wcb\nLuWnEv4KRS0RN7Uo9ENwyjAtPK9jHu5HPIkwpuJKcWWusgM0D574RzQD8tM5GL6Y9QbN3vPCPa4s\nNSwsleWt5HwRS85oy//6dAhQwl+hqCW2tDURk6ipepKGHIC8ttobgMd+4gw+ZFFamatCQDdt4uPB\n06gjPfgbxiQNICr8NHeO03I+h20dpIV1aKQo4a9Q1BKJ8+fCuIeJnJihqXpW9CE2vj8UtCJyUgbj\nDaVn/+p0SvXTlHlyWTTj+Jd10laH33qS4ZtC23N92Lg7qdjTvTJ2gfpECX+FohbZ0tZEv8ujid2k\nqXr+X4AD/X70pW+aN7f33EQ+DngOnQ6TQwHN/W/mzIYXBIrq0f+EjjfCjwJgON6Xq26HoNCB3NvO\nFYv1ZEkIlJmphfLPzGx4m48S/gpFLTLFMJfczauIOrwLg5ukR4YHRwYn0fc3QeKunTwyNIC0kK/w\nPNnNuk1eHrz0UgM2WlFtEk4tJWatC5GTj5HhnQIFrSHPibAdPsViPVkSAtnS0DYfFdJZoaglLKM7\ny02emQn5mfE8IoNICEmmU39vrromE7Y5iK93x+PIF9ZtL15soEYrakTPwizAGfR5ICB411DGZWQT\nFX6YMaceZIuniQjKtu00pM1HjfwVilrC3uhOTyEbdyfRMcuLq26H6JjlxcbdSeip5HRPRaPDNhev\nQPKpZwcobMVtJ/1I9k8DICaxL/meHpodiLInbjVkBHAl/BWKWsLeKK4QPWOHBnPV9f+3d+/RUdVZ\nose/uyoBEgQjEBEIlQqIIIk8BDWGlJGW7jRRiD29aBkjcKdv6+2eca49gWG4g3fZvZS1EEPWONfp\n26O2LjSo04zdEjTd3KZbMYFBQXmYBFAkIYDKQ4iAiUKqfvePUxXyqEpSlZB67c9atULFU6d+p5B9\nTu3fPvu3Dzl3LeccH1GY7QLAUfB9+LtJAAwf3p8jVb3ha+NRKimUOqdyIMvK8z/6rqe1tTf3/qA1\n8IP/G7z684YufzTto1QfcTisVE9b07OLqM5/masaMrngqOGqhkzK86sYemMmFxybcezM5/NEePrp\n8IxZ9UzbLq0NtmdgrBXkU06MBQNr/yOD4vq91p1a3tbexW1e76vfj6hOr8aYiHzMmDHDKBVNysqM\nSU42xqrn8D6K5prZ9yw2HjCZBbmGxzD88yDDY9ZzD5jZ9yw2Ix6Z63d/6enGiFg/y8r6/ZCU6fz3\n6kaMAeOanWf4hfXTgPUXFQGAXaYHMVbTPkr1EX+375fNreAvm6yJ3eqKKvhmKAz4Br4ZSnVFFfdm\nu3h7xsuk7UptV/8diaWB8eonL67B/p2F/NI5Cw+CYCjMdlGZ815rnj8al/DUfv5K9QMjwk0FudTc\n4j0BDDqHnL8WM+SUt/pnO4m0AFYuOCnJfwVQb3u9q+DdnFHC7oWPt6Z3tl43lPL8KriYxNpXJwBY\nHVwHrKD4iWVhHq3281cqooy7O5+aW6rI3JmLedIb+IeeRM6ndqr+aWoKXPrZcU5BXVmpPy/guusq\nWPtaBggsvf9jyr+3DTzC2lcnUFy/N2qX8NTgr1Q/qHfW49iZz0cVVRRmuzBDTiHnrsUMOUlhtgs3\n9h7tx96zzVQfyTmRyh/y32HrdUNxvTcdBjSDzcOwumnWBK8I1NdT/MSydtU90UCDv1L9IP3NAzRU\n/JEp2Yutpl+bc/GUnmT+Zhfl+VVMz+5Z2UdPV4NSvZf68wKor2P+5lzK8yupdFWBAYxwZswhSp1T\nqTeOiOjTEwoN/kr1A1+dd/W4U2RtXsTrO7ZjgN+9t50pmxdRPe5U67bJyYHr/nu6GpTqvZwTqZTn\nV/HpMAPGDjbrzJu5c5aVArrPatbnm4z/27+NrMZt3elV8BeRYSLyJxH5xPvzmgDbuUVkj/dR3pv3\nVCoatVYCbaug5r11XJ/ewitlBrunheUPryN9W0W7BT2efjrybgqKNxs3rGf+Zu8kvbitq35g/Bmh\n5LUMhtTMZfcYK8/f1AS//nV0VWf19iavFcCfjTGrRWSF9/k/+dmu2RgzrZfvpVRUKyrqfFNPwao1\nzGm2UX3hGZJNAw1HHPzVow9T7fTwkyXLqaiIoJuC4oDv76O47Jn2OTaBoUemcOf+qynPr+Lw5kWc\nf3Ndu9d2LJz0NW6L1L+z3qZ9CqG1O9U64N5e7k+puOJrFfDsEGsFqN85U9izYDVZ9Taef94K+OFY\n5SletW3dAPBm9iEABn4xgXOOjwCYt9nVLk3XlUherKdXdf4i0miMSfH+WYCzvucdtmsB9gAtwGpj\nzBsB9vcQ8BCAw+GYcUTr2lSsczq58aaxHMjch+t964ahkg1pXGAwj40pJP3Ycq3r709OJ6WSwrIF\nx7jmcwdnxu8mc2cu1d4qrfL8KrI2L6J6R/urfpHOV/4Qnvsy+qzOX0S2iEi1n0dh2+28txUHOpOk\newdzP/AvIjLe30bGmGeNMTONMTNTU1P9baJUbGlo4MHq8zCgmcq8reTuygLgFwsOkuWu4Ujamm52\noPpUQwPF9XvJ3ZXFmet3M+zT6VRXVGGA13dstwL/uFMMH97+Tu6f/jT65mi6Df7GmDnGmCw/j43A\nCREZBeD9eTLAPo57fx4G3gGm99kRKBXNfC0B3IlgoHLWDpYurGNe5WRqXG+R2xhaZjbSlgyMdK1t\nmm02Sp1TqZpZTcaeHM44DlLqnMoR0kmkxbrif6UCaJ+S+9WvOrf2ePbZyE7V9TbnXw4s8f55CbCx\n4wYico2IDPT+eQQwC6jt5fsqFRNKH3iYZQuOsfaVCWTszYGEbyGxifK8vZRsSOMPXwS/ALj2BQqe\nL9dfeEsOyxYcY17lZOonHGT+X2awbMGxTusvf/ll58+0qMg6EUTLHE1vg/9q4Lsi8gkwx/scEZkp\nIs97t7kR2CUie4G3sXL+GvyVwlrzt6RiIgD1Ew4y9MgUsLcw6Owoiuv3ctWZ4GcMI3HJwEhXXGYt\nx1j+nQ9wHprIJlctJRvSeH3HdqZtWNFa0tlWtH+m2thNqTArfbSEZRdXM69yMptctTgPTaRuyn8x\nf3MuGz9vCHrG0GbzP/koYl2VKj+8H9ods/OozNuKa2se7769FQ+CncAfWiR+ptrYTakosSXJw7yj\nd7debR7+/XZSP55B+V3vU3rVmNbEfemjJRSs6n4COBKXDIx4Dkdrrt+1Na+1TXMDXX9o0fyZavBX\nKswqVi7nUlam1RLYNIIIw5uuAo+d5zKsK9JSSWFpy+PUNXzY7f4iccnASOebeynZkMa7b29tXY6x\nY66/rWj/TDX4KxUBKlYut3rBe2cMHzx0EUwCBzL3ccfsPJbeVwcCD2472u2+/C0qE+mVJ/2p7QLs\nvm9Vz53axcSv5lBsGjEIPzraGDDXD7HxmWrwVyoCFddst3rI2y9RmbcV7JdY+1oGxTXbe1TGGW2V\nJ70VTGlru7t4vd+qDl69hTkXZuKkHrt4yE2rZ3f9MtjWuU2zt4tz1H+mGvyViiIGLePsyF9p6wMP\nwIgRnT+XglVr4I3ft6Z17pidx9KFdUzcfyP/+Ooz7fYh4v/9ojnP35YGf6UiUGlmjpXq8STi2poH\nnkSW3lfHqowcLePswF9pK/ivxZ/TbGNZwUEAcndlWd+qbJd4sPo8aaZ9Wa0xnU8A0Z7nb0uDv1IR\n6LlZY0Fg7WsZvPv21tZlBNdkjvW7fSQ3ELvSujr2jidGXz3/0vvqqLz9fbiYBJ5Eaz9+KnuMid25\nk962dFZKXQEZjpt5sHkmxeYZELGqgOz/m5XN/icgYyUVEQqHo+u1jdudHBoaID0F7JdgQDOurXnc\nW9fIsgXHKNuwAurbvzYcjdn6i175KxWBOlb/+NaJnTgRZo4roQ4nbmzU4WTmuBJS7o7fBnD+Slvb\nandidDh4LmsIuBNb6/kBVm+cyN6x7U+ssZTi8UeDv1JRZPE1Nj744Wp+57zc//+DH65m8TXx+0/Z\nV9rqb+nLjgG89IGHOXjjftb+R0a7ev6EBT/gpf+xPGZTPP7E7/8xSkUhX87aV6niuzGpuCz4BnA9\nES3dQYuK4PRpKCtrn6Of8N/WcGL/5Zr+LTv+zLyjd7Nl/KjWdFrJgBVsSfLEXXksxpiIfMyYMcMo\npToQMQaMa3ae4RfWTwPGiJiyMmPS061N0tONKSvr3VuVlRmTnGzt3vdITu79fvvT2pVPGfnH4Wat\nc6oxYNY6p1rPVz4V7qFdMcAu04MYq43dlIombVaayt2V1bry10PnGxnZXN+u5DE5uXepC6fT/0Rq\nVE2CBvi8ik1jFB1EcLSxm1IxKFAPmrnXPdzj+v+epnIClVBGVVlpm5W5fCulFdfvjbKDuDI0+CsV\nRbYkedo1gPPlrKtS/JeAdoxxwSz0Es7uoH021xCgW2dc18b69CQ3FI6H5vyV6rlBdz1ppjufMnWk\nGzdi6kg3051PmUF3Pdluu+HD2+fwfY/09M77DFfOvzfvO/eJJ83D9z9ljtqtz+HxjBzDPw01k+7J\n1Zx/h4de+SsVA34y0sbu+x/jkWxHawnongWruT2ppnUNgPXrrZYH/vjLgoSrO2iglciWLOn+G0Dt\nwQ95Zuzj/HasVQq7PtMG9haMJHSq7ol3eoevUjHg/2x7hobDMyjPr2LcqBzqrz/oXRnsLUqabwK6\n7v8TKAtSVNT/JY+B0vFut5WiAv9jKli1hsmfnOTIGFh6Xx1vvJ/HgZt2gngo+uhi65Jbxd5HvNMr\nf6ViQUMDG3dUkrHvduqmbmfI6TGtK4P57gHoqgXClbiTNdS8fVfp+K6a2M1ptvFH1z7mvzP1civs\nAU3M//MtPFq/Pdjhxzy98lcqFjgclEoK9dcfhKYUzqXvI2NPDsXeoPedeUvg/lPwSkWnlw4f3ndX\n9+vXW8HZ1xLZV0num1iG7t9r1SprW3+dOqHzN4PUnxeQcyKVja+9BM6pLF24FxK+tf5jywDyvjgX\n+gHFML3yVyoG+EpA51VOBvtFMFA3dTuF2S4Ks128PeNlsg6ndnqdCDz9dN+MoW0lEXReRD6Y1tNJ\nSYH/W8dvBjknUimf+DKF2S7rF4lfg82D/UwauAex9L46nrohp2dvHEc0+CsVA7YkeZi63VoEfu2r\nE5i/2QqE5d/bRnl+FfM357J7R+e8izF9d9UfqK9+W4Hy+b4UkQgsWhR4Ytpfs7WNG9Yzf3Mu5flV\nLP3REbC5STo+Cc/AZisFJPDinf5bYcczDf5KxYCKlcvZQybTNqzgH+r3snFHJUMbplhXwI2j2bij\nEjvuTq9LTw/9PTvm9LuaU/Dxl8/v7huDT8BqI7ebjTsqGXjiekhuxH42jabnDlCyIY1NrlqmvHMv\nAxNuDuLI4oMGf6ViRPqx5eyuX4YbO4XZLs45PmLokSm4Uz6jMNuFG3u77XvTstjfzWLdCfR+PfnG\n0OW6uXbreL8d+QkDv5iA+5rjFGa7+If6vUzbsIJ99kwa32q/Fm+0NKy7onpyM0A4HnqTl1LB8d0c\nlZW92PCYmPnZLmPAzM92GR4TM/uexe0av/3sZ6E3gktP93+zWKBHV/v39qrr9vWBzF/o/3izshe3\nvl6k8+cUzQ3rukIPb/IKe5AP9NDgr1TwysqMsS2aa7KyF5tL2I0HjLHbzfyFi82IR+a22y7UAFhW\n1vOgP3z45dcEOtF0dyLpblwjHplr5i9cbIzdOt5L2K3Af/9cvyePQO/X1QkmmmjwV0q1mvvEk1ZL\nA28EPmq32j8w68mgAqC/k0ZXD5HuTzQ/+1nnq3/fc9+JouP4TXq6WbvyKTP3iSe7HV/Hk0dXY40F\nPQ3+mvNXKgZ1zGmPr7Wx1DxG4SgHGMNvx1rtH7LcNTDr8hKQ3TW77El+vi2HI3C7hpUrrXGuW9dh\nknfWGn40p4S5d83lf8o0ipYkkPjmm63jL8hxWW2aL65mTnP7ENZdS4r1663fBxprPNF+/krFGN9k\nbNuAe0Sc/P1tDsrzKxn26c2cHXXE2/6hlszKu6m2Z8K25djtVhcEh8OanO04wWqzBa7GSUyES5cu\nP/etJ7Bokf/XiHgXX59VQNbhVL4YV8tX13zJhLox1E77kMTGkVxKOQEXk2DQOTI/vI2aW7aRse92\n6q8/GFJf/kBVSSLw8suxsXqX9vNXKk75u9JOM1b7h2Gf3syZ6z/E9m0ym1y1zKucTI3rrdZvAG73\n5eqdH/+4cxVMoKvj9HR48UX/V9ydXjNrDUPuWUjSf8+iKecWJjeepzr/Zc6M+IxLI+qovaUKEpu4\nNOw4JDbB4C9JOjGe2sz9re0rOvbl7+0aBaYP73eIFhr8lYox/gJcA1Zf+7OjjmA/Mxb3sKPYvk1q\ndwKYfrx9OLh4ER55pP1+Vq2yrujbSk6GggLrpNPQ0Plbw6pVYHOtYbqzhDqclBx/hfNZf6Dpuo85\ndcMuaqftBHcCnms+a79j+0UQsJ9No3nMAZyfTKT++oOd+vL3xRoFvbnfIVpp2kepGOMvtTHdWcKe\nBatbUz1GWiD5K+TctWB3k3xmJOmfD8Ntg3FnbZRv28b07CKqx53CrG/fD8jXv8cX6AsKrLx9u28b\nRQUkpHxCwQejuWSHBLdh0+wPsDVdzbBToxnZmETNLVXgsYOtzc1nAlxMhgHenV0YAYO/ZNin0zkz\nfjfzN+eycUclpc6p1opmA1bwr2XLerzcpL+UWG+Xu4w0PU37aGM3pWKMv8Zoe8d6uHP/3WxyvcW8\nysmU37kXPGCGniTp+CScnw+zgnHLQB5aP4kfZudQnf8Sjp354HRSkOZkTuMlnhs/ADwtfD3rG+QH\nn9F0YjQvXWri0t80Y0v4Fk9yI7bmq0n+ahgXRhyiPP8QfD0ckhrB5sYzoImvm66mZsKHVkC/fnf7\nwTdfDYO+At816eDTjDwwkxPja8ncOYtNrlpKv5hq5fq9ffm7W26y48lqyRKoqPD/LSWe9OrKX0QW\nAL8AbgRuNcb4vVQXke8DTwN24HljzOru9q1X/kqFrmPAW7UK1tevIbG6hk1j25wAEr+2rrzdCWDs\nkHCRoQ03cc7xEZk7Z1GbuZ+SDWmA1SMfewu4ExhZfwMnJnn/fboTwd5mptcjYDOXf7ZhP5uGe9gx\nrjk0nbNpn0JCk7VPPxJPjuPStYcBmLwzlzFNV9F86i6qUjykH1veGrS7Wmje34kw1q70O+rplX9v\ng/+NgAf4d2CZv+AvInbgY+C7wDFgJ/DXxpjarvatwV+pvlew6vIJoGRDGm9kpFh974GMvTl8mXKB\nc+n7GHpkCl+9uK81vZK7K4vK23aDgYyDWdRN/S+SPptI8+gDHd5BuHzZ3uZXgFwYgbnqNAO/mMC3\nIw9ZJxz7JeskYRJAWqyThTuRyR/exsmUZm4/PJg/zDhOkvsG3C9V+A3iEDjA+9pLd+QvJRQr+iXt\nY4zZ732zrja7FThkjDns3fY1oBDoMvgrpfpexcrlFKxaQ8mrh4Cvqbx1t1VKKW7qst4Hm5uhR6Zw\nzvERhdkuNu6o5I1deVTmbcW1NQ+Ayryt1jbp+7A1jsaT4p2o/WYIDDrf+U295wIz+DRJxyfRnNrA\n4KOT+XrMAQYfzeSXf7HC0PNZQzg95FsaB7tJdM7j1L9d7sfjdMKRAPcK+IK4vwnnRYv8fw7d3c8Q\nD/oj5z8GONrm+THgNn8bishDwEMAjni740KpflKxcjmlzTaWtjwOAmtfuYEXJg9pzfk/9raw9Tqr\nRXLWsFxqM6txbc27fOW/J8e68j8+ybry913oDzwPxs+VvzsRxAM2N82jD5C5M5fazP1k/amIansm\ny+q9Qb7e+mG3wz4PON+6HMS7y+sHWm7S4fB/5a/hpQelniKyRUSq/TwK+3owxphnjTEzjTEzU1M7\nLzyhlOobW5I8TDo7l7WbrHr5EynNZO5yMWnvLWwZczUbd24ns+Z71Nz8HiUb0ri3rtGK6fYW6iZW\nM/LAjMspH3dimz17c/1tiQH3QCbvzCXxdAafp1xk2oYVrTeWddT2XgNfyWagYN1dEA9Umnollq2M\nNt1e+Rtj5vTyPY4DbVdSSPP+TikVJhUr2wfdU362caxaw4+b51BsnqEgzcnaN7Naq31OD/0G21ej\nGX5iNE2Dm7g4sBl3m2qfSR+P58ioMyQ3JfP14GbsX2RR+9XN8G/LOQOcgdYr/a74UjuBJm67C+K+\nbwOB7kGIZ31S5y8i7xB4wjcBa8L3LqygvxO43xhT09U+dcJXqcjStoKot2Gj7fq+PWGM/womDeKd\n9Ut7BxH5gYgcA24H3hKRzd7fjxaRCgBjTAvwMLAZ2A/8trvAr5S6soJdzKTjXbS9YbcHtw+7dw2a\noiJrctfj6WJhF9VjeoevUnEmlLtce7pMY3eSk4PrCuoToWEqImljN6WUX121WA6kL0ojhw+/3G7Z\nH7vd/+/jse9Of9Dgr1Sc6a5s0p+uGqKVlXWuqBkwwAr2vg6fZWVw+rT1zSJQBc5DD2llTn/S4K9U\nnAmlbLKrkkl/C6i88IIV7P3l59tuD9YVf1OT1W9nyZLAC7GovqXBX6k44y+QA1y4EHjit7sVsoKd\njC0qsrqBilh1/WDNKaxbZ41PJ3WvPA3+SsUZXyAfPrz977/8MnAffN/rugvwPa0iWr8efv3rzhO5\n3c09qL6j1T5KxamuumGG0vQsmCqirqqHRKwTjAqNVvsopboUysRvV4KpIgplcln1LQ3+SsWpUPvl\nBBIooB850jkFFOg9RLS6p79o8FcqTvV107OuThod19X1994i8NOf6iRvf9Hgr1Sc6q6CJ1iBqoh8\n2qaA/L33yy/Dr34V2nur4OmEr1IqZP4Wc6+o0MnccNIJX6XUFdWx2VvbOv1ALRl0MjdyaPBXSoWk\nq+oeXUQl8mnwV0qFpKtS0b6eT1B9rz/W8FVKxaDu1scNtK6uigx65a+UCommdqKbBn+lVEg0tRPd\nNO2jlAqZpnail175K6VUHNLgr5RScUiDv1JKxSEN/kopFYc0+CulVByK2MZuInIKCNAeqpMRwOkr\nOJz+EgvHoccQGWLhGCA2jqO/jyHdGJPa3UYRG/yDISK7etLFLtLFwnHoMUSGWDgGiI3jiNRj0LSP\nUkrFIQ3+SikVh2Il+D8b7gH0kVg4Dj2GyBALxwCxcRwReQwxkfNXSikVnFi58ldKKRUEDf5KKRWH\noj74i8j3ReSgiBwSkRXhHk+wROQFETkpItXhHkuoRGSsiLwtIrUiUiMij4R7TKEQkUEi8r6I7PUe\nxy/DPaZQiYhdRHaLyJvhHksoRKReRD4SkT0isivc4wmViKSIyH+KyAER2S8it4d7TD5RnfMXETvw\nMfBd4BiwE/hrY0xtWAcWBBG5A7gAvGSMyQr3eEIhIqOAUcaYD0VkCPABcG80/T0AiIgAg40xF0Qk\nEagCHjHG7Ajz0IImIsXATGCoMeaecI8nWCJSD8w0xkT1DV4isg6oNMY8LyIDgGRjTGO4xwXRf+V/\nK3DIGHPYGHMReA0oDPOYgmKMeRc4E+5x9IYx5nNjzIfeP58H9gNjwjuq4BnLBe/TRO8j6q6ORCQN\nuBt4PtxjiWcicjVwB/AbAGPMxUgJ/BD9wX8McLTN82NEYdCJJSLiBKYD74V3JKHxpkv2ACeBPxlj\novE4/gVYDnjCPZBeMMD/E5EPROShcA8mRBnAKeBFbwrueREZHO5B+UR78FcRRESuAl4Hfm6MORfu\n8YTCGOM2xkwD0oBbRSSqUnEicg9w0hjzQbjH0ku5xpibgbnA33nTo9EmAbgZ+L/GmOnA10DEzEtG\ne/A/Doxt8zzN+zvVz7w58teB9caY34V7PL3l/Xr+NvD9cI8lSLOA+d6c+WvAd0SkLLxDCp4x5rj3\n50ng91gp3mhzDDjW5tvjf2KdDCJCtAf/ncAEEcnwTqYsBMrDPKa4450o/Q2w3xhTGu7xhEpEUkUk\nxfvnJKxCggPhaVF4kAAAANlJREFUHVVwjDH/yxiTZoxxYv17+Isx5oEwDysoIjLYWziAN03yPSDq\nquGMMV8AR0VkovdXdwERUwQR1Qu4G2NaRORhYDNgB14wxtSEeVhBEZFXgTuBESJyDHjMGPOb8I4q\naLOARcBH3nw5wD8bYyrCOKZQjALWeavIbMBvjTFRWSoZ5UYCv7euKUgAXjHG/DG8QwrZ3wPrvRen\nh4G/CfN4WkV1qadSSqnQRHvaRymlVAg0+CulVBzS4K+UUnFIg79SSsUhDf5KKRWHNPgrpVQc0uCv\nlFJx6P8D4Obclx42P3sAAAAASUVORK5CYII=\n",
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAEICAYAAAC3Y/QeAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsnXlcVUX7wL/DBUXcMDUXFC6aKyCY\n4G6KSySZW6ImmUtWar71/sxIM7Nces2tLEvfNjVFRazUSl/KxF0LTEw0zYULgktq4gYKXOb3x7lc\nL3jZZIf5fj7nc+85Z87Mc+bMeWbOMzPPCCklCoVCoahY2JS0AAqFQqEofpTyVygUigqIUv4KhUJR\nAVHKX6FQKCogSvkrFApFBUQpf4VCoaiAKOVfwgghAoUQP5W0HBkIIaoIIb4XQlwXQoQWQ3rHhBA9\nijqd4kAIoRdCSCGEbR7CjhZC7C0OufKCEMJZCHFLCKEraVmKAyFEDyFEfBHEW6qea06UG+UvhBgh\nhIg0FeALQohtQoiuJS1Xbkgpg6WUj5e0HBYMAeoBtaWUAUWdmJTSTUq5s6jTUeSMlDJOSllNSmks\nSDxCiJ1CiHGFJZdFvHmuWBV5o1wofyHEZOBD4D00xeUMfAoMKEm5cqOUFmQX4C8pZVpRJlJK712h\nqDhIKcv0BtQEbgEBOYSpjFY5nDdtHwKVTed6APFAEPA3cAEYCPgDfwH/AG9axPUOsBEIAW4CvwOe\nFuenAmdM544DgyzOjQb2AR8AV4E5pmN7TeeF6dzfwA3gKOBucZ9fA5eBWOAtwMYi3r3AQuAaEAP0\nzSE/WgE7gUTgGNDfdPxdIAVINeXp81muawgkAw9ZHGsLXAHsgKbADtO9XQGCAUeLsAbgDeAP4C5g\nazrWOw/PyZxPFvFJ4BHTf39Tft8EEoAp2dy75TNIBM4CnU3Hz5nyflSW8pVdvutMeX7FFM/LJpls\nLa79Eq1MJZiety7r/eT03K3IPwb403SfZ4GXspwPMqV3HhiXJY+eBA6b0jgHvGNxnT6L7DuB2aa8\nugn8BNQxnbMH1piecyIQgdbomgsYgTto5WdpNvcQClwErgO7ATeLc1WARaa8vo5WrqsAcSb5bpm2\nTmjv4poc7iHbvML03mcj3zJgYZZjm4HJeXzH91qTxyJfx1nsjzXJeA0IA1zyWyYeWHcWZmQlsQFP\nAGmWGWwlzCzgIPAwUBfYD8y2KARpwNtoCuwFtBd9LVAdcENTeK6m8O+gKcchpvBT0JStnel8AJqS\ntAGGAbeBBhYFIw34F5riq5KlsPgBhwBH08NvZXHt16YCWN1UqP7CpJxNcaSaZNcBE9BefmElL+yA\n08CbQCWgp6kQt7C4vzU55OUO4AWL/QXActP/R4A+aEq8LtqL/aFFWAMQBTQGqlgc652H52TOJ4v4\nLBXbBaCb6X8t4NFs5M94BmNMeTUHTbF8YpL7cVN+VMtDvo8HTpju5yEgnMzK5zvgv0BV0z39hkkB\n5fW5W5H/SbRKVgDdgaSMe0V7Fy6ilVkHNAVtmUc9AA+0stkGuAQMtKao0JTUGaA5WjndCcwznXsJ\n+N6Uhg5oB9SwuG6cNdkt7mGsKT8zKvsoi3OfmOJwMsXd2RQuk3zWyqqVe8gpr3qQvfJ/DK1yFBbl\nKRlomMd3PE/KH80ycdr0vG3RGhb781smHlh3FqViLo4NCAQu5hLmDOBvse8HGCwKQTL3WmTVTQ+s\ng0X4QxYvyTvAQYtzNlgoHitpRwEDLApGXJbzloWlJ5py6YipdWk6rkNrkbe2OPYSsNMijtMW5xxM\n91Dfijzd0BSEZfzrMLUCyV35jwN2mP4L00vyWDZhBwKHLfYNwNgsYQzcU/45PSdzPlmct1RscaY8\nqZFLWRgNnLLY9zDFU8/i2FXAKw/5vgMYb3HucVNctmgt4buYKjnT+WeA8Lw+9zyW/03Aq6b/XwH/\nsTj3iGUeWbn2Q+AD03899yv/tyzCTgT+Z/o/Fq1ibmMlzp3kovyzhHc0pVsT7V1KxuJL2iJcJvms\nlVVrYXLIqx5kr/yFqTw9Ztp/AVOZzyZ81nc8r8p/GxZf16b7T0IzvT5wmcjrVh5s/leBOrnYkBui\nfUZmEGs6Zo5D3uvoSjb9XrI4nwxUs9g/l/FHSpmOZjZqCCCEeE4IESWESBRCJALuQB1r12ZFSrkD\nWIrW+vlbCPGZEKKG6Xo7K/fgZLF/0SKeJNNfS5kzaAicM8mdXVw58Q3QSQjRAK2FlA7sARBC1BNC\nrBdCJAghbqC1POtkuT7b+yf355QTT6OZfmKFELuEEJ1yCJv12SKltPa8c8v3hmS+H8twLqZrL1iU\nhf+ifQFkIofnfh9CiL5CiINCiH9McfpzL4+zynMuy7UdhBDhQojLQojraF8uWZ+PJRct/idxrzyt\nRjNRrBdCnBdCzBdC2OUQj6UMOiHEPCHEGVMZMZhO1TFt9miNgAKTS15li9Q08Xq0yhpgBJoJMyPe\n3N7xvOICLLGI5x+0iscpP2XiQSkPyv8AWgtrYA5hzqNldAbOpmMPSuOMP0IIG6ARcF4I4QJ8DkxC\nGy3jCESjPdAMZE4RSyk/klK2A1qjfXK/jmZTTrVyDwkPIPt5oLFJ7nzHJaW8hmb/HYb2Uqw3vSyg\ndbhLwENKWQN4lsz3Djnff07P6TbaFw0AQoj6WeSKkFIOQFOum4ANebmfXMgt3y9gURZM5zI4h1Yu\n60gpHU1bDSmlm7WEsnnumRBCVEarfBeifak4Alu5l8cX0MpiBo0zx8BaYAvQWEpZE1jO/c8nV6SU\nqVLKd6WUrdHMMv2A5zJO53L5CDRzR2+01r7edFyg5fcdNFPNfclaOZapTADmMpGHvMqNdcAQ0zvd\nwRQXeXzHLeUjOxnRyshLFuXDUUpZRUq5H/JWJgpCmVf+UsrraPb6T4QQA4UQDkIIO1OtP98UbB3w\nlhCirhCijin8mgIk204IMdj0tfFvtJf8IJptV6L1GSCEGIPWKsgTQggfU+vMDq3g3AHSTV8lG4C5\nQojqpgI4+QHv4Ve0VlyQKZ96AE+htXTyylq0l32I6X8G1dE6464LIZzIf2HN6TkdAdyEEF5CCHu0\nT34AhBCVTPMlakopU9E6yNIpIHnI9w3AK0KIRkKIWmgdgRnXXkCrJBcJIWoIIWyEEE2FEN2zppPd\nc7ciUiU0+/dlIE0I0RfN1JTBBmCMEKKVEMIBmJHl+urAP1LKO0KI9miKON8IIXyFEB6mOQE30CrI\nDHkvAU1yuLw62vtyFU0pvpdxwvQ1+hWwWAjR0PSV0MmkyC+b0rCMOwp4zDRHoSYwzeJcbnmVI1LK\nw2iV0RdAmJQy0XQqz++4lPIyWkPhWdO9jCVzxbYcmCaEcDPFVVMIEWD6n9cy8cCUeeUPIKVchPZS\nvoX2UM6h1cybTEHmAJFoo0yOoo3QmVOAJDejtXyvASOBwabW0HG0kQoH0F4CD7TREnmlBlqr4hqa\nCeEqWocqaJ3Et9FGLexFU7pf5VdwKWUKmrLvi1a4PwWek1KeyEc0W4BmaH0tRyyOvws8ijZK40fg\n23yKl+1zklL+hdYhvB04hZYHlowEDCZTwni0vqDCIKd8/xzN/HHEJGvW+30OTQkdR3umG4EGVtLI\n6bmbkVLeBF5BU/LX0JT3Fovz24CP0DqeT6M1SEBTtqDZ7WcJIW6iVawP+nVU33QvN9BGquxCMwUB\nLEFrMV8TQnxk5dqvTfeYgJYvB7Ocn4L27CPQzCDvo9m8k9BGE+0zmUk6Sil/Rht19wdav9wPGZHk\nlld5ZC3aF4q5gfMA7/gLaI2gq2gd8fst4vrOdH/rTeU2Gu29hDyWiYKQ0ZutyCNCiHfQOtCeLWlZ\nFIqcEEK0QlMolWURz9tQlD3KRctfoVBoCCEGCSEqm8xQ7wPfK8WvsIZS/gpF+eIltIlBZ9AmXE0o\nWXEUpRVl9lEoFIoKiGr5KxQKRQWk1DrXqlOnjtTr9SUthkKhUJQpDh06dEVKWTe3cKVW+ev1eiIj\nI0taDIVCoShTCCFicw+lzD4KhUJRIVHKX6FQKCogSvkrFApFBaTU2vwVitJEamoq8fHx3Llzp6RF\nUSgAsLe3p1GjRtjZ5cmh6n0o5a9Q5IH4+HiqV6+OXq9HiHw7wlQoChUpJVevXiU+Ph5XV9cHikOZ\nfSoQwcGg14ONjfYbHJzbFYoM7ty5Q+3atZXiV5QKhBDUrl27QF+iSvlXEIKD4cUXITYWpNR+X3wx\nbxWAqjQ0lOJXlCYKWh6V8q8gTJ8OSUmZjyUlacdzoiCVhkKhKL0o5V9BiM1m2kdcXM7XPWiloSga\nNm3ahBCCEydyXn5h5cqVnD//4IvV7dy5k379+j3w9YrSj1L+FYDgYMjuC9HZ2frxDLKrHHKrNCo6\nRWUqW7duHV27dmXdunU5hiuo8leUf5TyrwBMn66ZbKwxd27O12ZXOeRWaVRkispUduvWLfbu3cuX\nX37J+vX3Vt18//338fDwwNPTk6lTp7Jx40YiIyMJDAzEy8uL5ORk9Ho9V65cASAyMpIePXoA8Ntv\nv9GpUyfatm1L586dOXnyZMGEVJQZlPKvAOTUSn/11Zxbp3PngoND5mMODrlXGhWZojKVbd68mSee\neILmzZtTu3ZtDh06xLZt29i8eTO//vorR44cISgoiCFDhuDt7U1wcDBRUVFUqVIl2zhbtmzJnj17\nOHz4MLNmzeLNN98smJCKMoMa518BcHbO3uZ/9ar2m9E6BQi0WP024//06Vol4uysKf7AwlohtxxS\nVKaydevW8eqrrwIwfPhw1q1bh5SSMWPG4GCqoR966KF8xXn9+nVGjRrFqVOnEEKQmppaMCEVZQal\n/CsAc+fCs3lYcTijdZpVsQcGKmWfH7KrbAtiKvvnn3/YsWMHR48eRQiB0WhECEFAQECerre1tSU9\nPR0g09jwGTNm4Ovry3fffYfBYDCbgxTlH2X2KQUU9Tj6/Cju2Fitc7hOHTWc80EpClPZxo0bGTly\nJLGxsRgMBs6dO4erqys1a9ZkxYoVJJnsTP/88w8A1atX5+bNm+br9Xo9hw4dAuCbb74xH79+/TpO\nTk6A1kmsqDgo5V/CFNc4+tq18xf+6lXta0FVAvknMBA++wxcXLSK1MVF2y/I19O6desYNGhQpmNP\nP/00Fy5coH///nh7e+Pl5cXChQsBGD16NOPHjzd3+M6cOZNXX30Vb29vdDqdOY6goCCmTZtG27Zt\nSUtT67xXJErtGr7e3t6yIizmotdbNxG4uIDBUHjpBAfDmDHwICZdB4eCK6+yzp9//kmrVq1KWgyF\nIhPWyqUQ4pCU0ju3a1XLv4QprnH0gYGwYkXm1uiECdpvbqhJXQpF+UMp/xKmOMfRBwZqXxOrV2v7\ny5ZBfHzerlWTuhSK8oVS/iVMcY+jt+xjADAa83admtSlUJQvlPIvYQqjc9DaaKHsRhBNWjefpJ4v\nwbgO0HGxdnBcB2qNbY7/M4K+IwSp2OLecRSM8AfUpC6FojxSKB2+QoivgH7A31JKdyvnBbAE8AeS\ngNFSyt9zirOidPg+KMHBmh0+Y2im5WO0s9OOpaSYDnSZD13fo2GNhpz/fhyi+0ykXTLojNgZIU0I\npI0WQdUrjdBfseNYSwPOEY/z992eDB5oy7Va29kauDXf8pWXiWGqw1dRGikNHb4rgSdyON8XaGba\nXgSWFVK6FZKJE2HkyHumm6z1d2qqheIHSPABuyTOp/yJTe/XsT06FHSavSdVB9LC6dvtyqkcaxlD\nvTgXzrlF8rj9EtZemULML73zPB/BUr6M4atjxmjDRiv6mgAKRWmhUJS/lHI38E8OQQYAX0uNg4Cj\nEKJBYaRd0QgOhuXLs3fUBkCX+bTVLyQGPUZs+NawBLF9DqTbkK5LJ9XnKy2cMG02EuK6orvUEmpc\ngjs1ueRswF7cYov3eQhbwJiv/+bDtxcS6/ESsvP8bOcjZCdfaqo2d0CtCfDgxMfHM2DAAJo1a0bT\npk159dVXSclUy9/j/PnzDBkyJNc4/f39SUxMfCB53nnnHfO8gqLEMp23336b7du3Zxs2KiqKrVvv\nfaFu2bKFefPmFbmMZZHisvk7Aecs9uNNxzIhhHhRCBEphIi8fPlyMYlWdggOhlGjclH8QNsEG6IC\n5vGt3hEbJDF6A3SbD0eH31P4AjDagATSdeC8F2O9E9gkNoQq1yGlKskOd0Hq6H8xDtvG/yMyYDb2\n7qtomKDV29aGgObkQdSScj98tJCnbUspGTx4MAMHDuTUqVP89ddf3Lp1i+lWMjEtLY2GDRuycePG\nXOPdunUrjo6OBZLtQXjQCWWzZs2id+/e2Z7Pqvz79+/P1KlTHyit8k6p6vCVUn4mpfSWUnrXrVu3\npMUpUbLqjokTtdbyfaNzRvjj3nEUqdiSjtZZW7/+Vhz+qceUgHge8+3OlIB4xu9xxMZ9vabsMzab\ndDjxFBjtzNGlOyRS5bITVLoNtx8CYWTLcx/z2nN/YGObhG59KIk44t3PiyrP+BLbxT+TOPkZElpu\nh48WwbTtHTt2YG9vz5gxYwDQ6XR88MEHfPXVVyQlJbFy5Ur69+9Pz5496dWrFwaDAXd3rfstKSmJ\noUOH0rp1awYNGkSHDh3I6E/LcPVsMBho1aoVL7zwAm5ubjz++OMkJycD8Pnnn+Pj44OnpydPP/20\n2ZVEdmTMLvb29qZ58+b88MMPAPfJCLBgwQJ8fHxo06YNM2fONMcxd+5cmjdvTteuXTO5mR49erS5\nUouIiKBz5854enrSvn17rl+/zttvv01ISAheXl6EhISwcuVKJk2aBIDBYKBnz560adOGXr16EWcq\ngKNHj+aVV16hc+fONGnSxBz/hQsXeOyxx/Dy8sLd3Z09e/Y88PMrjRSX8k8AGlvsNzIdU1jBmu5Y\nvvx+N8EA7mfrEu23mqc7dkYAT3fszDa/nfQ6Vpuuke7s6b4L99NOfN77LOm6dEi3gXMdzIZ+0Wwr\nJLrA3y3BaEfV25VJrpMAcZ2g8k240kwzC9lI0m3TuNN6M+nDhhDpeYLk5ruoF+eZSZ78DAm1sSmn\nfQBF4NP52LFjtGvXLtOxGjVq4OzszOnTpwH4/fff2bhxI7t27coU7tNPP6VWrVocP36c2bNnm338\nZOXUqVO8/PLLHDt2DEdHR7MPoMGDBxMREcGRI0do1aoVX375Za7yGgwGfvvtN3788UfGjx9vdiZn\nKeNPP/3EqVOn+O2334iKiuLQoUPs3r2bQ4cOsX79enMrPiIi4r74U1JSGDZsGEuWLOHIkSNs376d\nqlWrMmvWLIYNG0ZUVBTDhg3LdM2//vUvRo0axR9//EFgYCCvvPKK+dyFCxfYu3cvP/zwg/lLYe3a\ntfj5+REVFcWRI0fw8vLK9b7LEsXl1XMLMEkIsR7oAFyXUl4oprTLHNZ0h5RoQy+lDm42hOjhYPBl\nFKuZm1iVLX57sO9Qn7uOe+kf1pXuF28wJSCebru6s6dTBBirwFVnxLaP6WBw4KA+GfpOQta4wJNR\ndfkDT6olvk6s50r4dQCLDsazq6MtW3r9BkYd2GifHEafLzFKAUJSOWwOXx2K0FxU2ITj2DqCZ/yD\nWLXKekWVlYyvmOzcSZdZSmj5sz59+lh16bx3716zK2h3d3fatGlj9XpXV1ezgmvXrh0Gk3+R6Oho\n3nrrLRITE7l16xZ+fn65yjJ06FBsbGxo1qwZTZo0MS87aSnjTz/9xE8//UTbtm0BbbGaU6dOcfPm\nTQYNGmR2U92/f//74j958iQNGjTAx8cH0CrC3Dhw4ADffvstACNHjiQoKMh8buDAgdjY2NC6dWsu\nXboEgI+PD2PHjiU1NZWBAweWO+VfKC1/IcQ64ADQQggRL4R4XggxXggx3hRkK3AWOA18DkwsjHTL\nK9nqiLO9ofkP2LT5kqrD+zDeX/C6nySxqmabv1vrIjXiPMyKf2FoI3aH76Lf2pcgzYGHt81gu2Eu\n++nCL4Y5tFwWSs2PLvFY/z3E7V3K8eixJK3dzaCDLqTrd7Gl+xHsjRK71T/AkcDMHcTptrx3MZS+\nqZv5j6iP3ZBBJB7zYdUqrV8iJ7cR1paULFd9AEUwbbt169b3tdhv3LhBXFwcjzzyCABVq1Z94PgB\nKleubP6v0+nMdvnRo0ezdOlSjh49ysyZMzO5hM4OkeUhZ+xbyiilZNq0aURFRREVFcXp06d5/vnn\nC3QPD4rlvWcMf3/sscfYvXs3Tk5OjB49mq+//rpEZCsqCmu0zzNSygZSSjspZSMp5ZdSyuVSyuWm\n81JK+bKUsqmU0kNKqQbw58B9OqLLfKr3G47/xXgIW0i6XTq3K6Wz3AfNjCN0IIw4xD7KDeejzOtk\nx4LQxkyWiSAE38tvmZTwBlcax9OLcGyQ9CKcOAd3Pvm8MhYNIFxc4DuG8LrTcPTH2qNbvxE7UrFv\nEap9AUggXYBNGlOejabui40ZO/waVUJX8IthFm883IX154fT+oX5WDiPzER2HcLlpg+gCKZt9+rV\ni6SkJLMCMhqNvPbaa4wePdrcQs6OLl26sGHDBgCOHz/O0aNH85X2zZs3adCgAampqQTn0T4XGhpK\neno6Z86c4ezZs7Ro0eK+MH5+fnz11VfcunULgISEBP7++28ee+wxNm3aRHJyMjdv3uT777+/79oW\nLVpw4cIFs0no5s2bpKWl3efK2pLOnTubl78MDg6mW7duOd5DbGws9erV44UXXmDcuHH8/nuOU5PK\nHKWqw1ehYdYdXeZDv5dwNx7jpvs2tg77ks7sg2RHrfUtAF062N6hc9ggbq/4nf5hXbnc/BBrnNpo\njnzS08Fg4Pt9U0jfE5QpHWutbXPa+4Iw/PAzNUnEOGwId2wF2KSji3gem7tVIc0eqTNyteE57ugg\nkGCi9NeYOTyaay230TvZJs+uIzIoNy4kisCnsxCC7777jtDQUJo1a0bz5s2xt7fnvffey/XaiRMn\ncvnyZVq3bs1bb72Fm5sbNWvWzHPas2fPpkOHDnTp0oWWLVvm6RpnZ2fat29P3759Wb58Ofb29veF\nefzxxxkxYgSdOnXCw8ODIUOGcPPmTR599FGGDRuGp6cnffv2NZt2LKlUqRIhISH861//wtPTkz59\n+nDnzh18fX05fvy4ucPXko8//pgVK1bQpk0bVq9ezZIlS3K8h507d+Lp6Unbtm0JCQkxm87KC8ql\ncylk/r75/O/XM4RvbIHtY29hFHZ0+sOF/e2Om23vZiTYGCuxPTiFHgYwoqNtx0Cim1xGBt8b8mZj\nk32LO+txy9nDdJkPtc5Qpf5+0qOHs+3gdtDvZGInPSeaJsDtulDjAqTZaf0RRjsWhbgyWSZiG2+w\nWgHY2IC9feZ+gdLuNrosz/A1Go2kpqZib2/PmTNn6N27NydPnqRSpUpFkt7o0aPp169fnuYZKApG\naZjhqyhEzuz2Ye8/a7Hv/iaVdk3F1iaJ/T5HNcVvHqNfGSLGgxCkS8FTw3Ws19fDjjSiD67CZV9m\nVwzZtaqFuH+kTYb3TxcXYF8Q/PBfkr84SpeDHXAlBglcaJRA1TXf0OuDVdgcGQZ2KVApmVoXmvK5\ne3UWCUdSjYJ0BO/qu1C933CtIgFeeqnwFztRZE9SUhJdu3bF09OTQYMG8emnnxaZ4leUHVTLv4Sx\n9IFTued8xtWzYfAPe+n30PMkDXsWnV0SRhujZuYBSEdrYadVpvO6qeyv74BwX4/tRTecr9lxZt/n\nVlvRwcGaywVrjzu7hWMyhpxmbaH3DnyBnT/7s8nwEYf113hteAzo7mgyGu1M8whsaXlF0vx8dbZ4\n3AIBbdfPoMpgG2o+mj8/QaWBstzyV5RfVMu/lJPdZM+s4/lbnbHhE6d5HH7IwA+GxehO+mO0TdOU\nasbErLSqNP55Akhb9g9byFMX42j7xTKa/zydM/s+z7YVHRiY/47W7EzXXR75nNltqrNXn6IpfgkT\ngp+i3+pJkG5r+jpJ40SDO2zxOQ+2d1m03pVn67/O/uqvsX+rPlOe1O8YzjNL5xdGVisUijxSXOP8\nKywTJ2b2dWM5pj3reP5vDUv5NrQRUwLicT8tMbbZqSl8gHRb7A49h85jDfE9vqbvznbsrfMwPzZ2\n4us53nkymbi4WF8yMqeO1sDA+yuS4GB4cXtvktr+TvXoxkyJPscMgzYhaPF6T75wr86lh25xrUmU\ndoHtXZY+cZqYesCJp7jd9EtmfZJObKMmIHy49NhQvvtkA8G1lOlHoSguVMu/CMnOyVnGKJvYRtoQ\nznf1XUhH4IKmmW2THTjq+Yfmc+evJxFh70OKA7o2a7DZNZ0mx9uzTfcEDx1dz9cvBeVZYRbWCERz\npbUviJs/rGemYR82SGyQPG1I5IXom1xrcBbSKpu+WAQx9W/DNVdsnffQPqoFfz3+Gb0fXsxDAb1p\nG/oGd0/4Us4GUygUpRql/IuQbJ2cjfDHrsF43k3YzE23bcwcHk1Xfw/qjdPz2jOnSK19Dq48AilV\n6X+gKb8c3IZDyBqM0c/g2+QUL3b5Gbk3CIMhfy3lwhqBmNN4/JFuk3htWAwIWLSmJZ1P1AZhMlvV\niqFGoiP7vc7S6EwLtnte4snIxpwzjKKr/h2utpxfvtw8KBSlGKX8i5DslGTts56c9vuMi/UvsSjE\nFWzust/nKJcbnINKybQKG8uLS6cwIaQP3wcE87v+Gmtjv8T5h+ns27oap/v8oeadjJE8puH/D7Ri\nWHZ9By4uUP2ZdFpe68ui792h/hH2t7wK5x/V+gGSa/GPkwGb6/WIb3qSBlG9WON9i84dn2ZvwFJq\nJTTB5IPrvjTLpQ+gfFKtWrX7ji1fvtw88WvlypWcP3++uMXKhHK/XIaQUpbKrV27drKs4+IipaYq\nM2/fMkBW7jhHMlPIxmOcJTORvKNtrgM7SwkyHWQMLrKtfoGky/uZrndwkHLNmuK9lzVrtHSt3U92\nMjmPfVTiP17av15N9hlUTzJTSF5upt1vkKNkmoN0HNRPOz7sKcnE5tJhxGOZ0qxUKXM6lSoV/71L\nKeXx48fzHPb996XcsSPzsR07tOMFoWrVqjme7969u4yIiChYIlZITU3Nc9iZM2fKBQsW5CnsihUr\n5Msvv/ygYimk9XIJRMo86Fgp86xVAAAgAElEQVTV8i9C/P3v92MjBAxkM9sObkfEdeKcS5zmoiGl\nCqRUIaZlNIv1nsTigisGDhumaGPtLSgJPzjWnM1lkJ35SJ5aiL3HWsbsqcf2R9KYENYE25qx6OK8\noUoiVEoi0fMHON8WWn4Pdf9i6Nmr5utffTXLimRo+6W9b8DHB4YOhfBwbT88XNu3MlG1wGS0tDdu\n3EhkZCSBgYF4eXmRnJzMoUOH6N69O+3atcPPz48LF+73pajcL1dg8lJDlMRW1lv+5pZyl/cl+h0S\npBRCygkTpPxFj+z4zENai3eGjWQmsrO/h1yk95S8UUMytYbW4s+mlZ0RV3EiRP7leH/v+3LH2R2y\n75z35aLpC6R0cZEL9Z7SocsMqR/mrn0BWGwTOjaV6WZXUNnfu0WQYiM/LX8ptZZ+nTpSzpih/Wb9\nEngQrLX8LVvali3/lJQU2alTJ/n3339LKaVcv369HDNmzH3Xjxo1Svr5+Umj0Sj/+usv6eTkJJOT\nk+WKFSukk5OTvHr1qpRSyrCwMPnCCy/I9PR0aTQa5ZNPPil37dolIyMjpbu7u7x9+7a8fv26bNq0\nqVmeUaNGydDQUHn37l3p6uoqf/vtNymllNevX5epqan3tfwt9/v16ydXrlwppZTyyy+/lAMGDDDH\nOWTIEGk0GuWxY8dk06ZNpZRSLly4UM6ZM0dKKWVaWpq8ceNGQbK6zFCQlr8a6llETJ8OSW3ng9EW\nu4BBrAq1Z5jhb96JcqTPszak664h/nqCh27ak5j+EPt9vqLNP014/zt35rVszGGndDBkH39x+8Fx\nds7/MNGgLtoXi+90X+3AnCnMqQXPMYTlnS/AJQ+ob3IyZrSFi14IzhSy5CWDry9MmACzZ8OMGdp+\ncXLy5Emio6Pp06cPoLl4aNDA+sqpyv1yxUSZfR6AvHRCxsYCCT5U6vYOdnsmMyHgH0YPksx+/Brp\n15phFzaXX9Ylc+WH7/h56xkqh83m2zZOBJ3cxz+b1yP2B90fqYkCOoh8IAprmOjSpfCdU2M4FgD1\njlqsKpbGsk5JhNcYYA5bu7b1OLI7XpoID4dlyzTFv2zZPRNQcSGlxM3Nzewu+ejRo/z0009Wwyr3\nyxUTpfzzSZ5X6Bvhj3v9ldiHrkJ0m8ftK4+y2hO40ZB+nywg5OCvuBJDOgJXYuhysAMpG+6twJRd\ni1qnKxk/OIU1TDQwEHT1GoPPcgAmhDVlUZjpZPNtvNn/3lT1JUvAzi7z9XZ22vHSTIaNf8MGmDVL\n+7XsAygqLN0Zt2jRgsuXL3PgwAEAUlNTOXbsmNXrlPvlioky++STnFbos1SEtc96Eu33Pv3DYvjf\nqX6keYWCBGH/D5P1/eluEOhIzxSPuH7v/9y51v3qlKQDNGuzfR8Ez8HbST3TkiGp3fn0wv8gTkAt\nRz70rsa15kcypQf3fB85O2v5UtpnAUdEaAo/w9Tj66vtR0QUzPyTlJREo0aNzPuTJ0/OdD6j87ZK\nlSocOHCAjRs38sorr3D9+nXS0tL497//jZub233xZrhfvnHjRo7ul//88086deoEaMNO16xZk8n9\n8sMPP5yr++Xk5GSqVKnC9u3b8fX1Zd68eXh5eTFt2rRM13z88ceMGTOGBQsWULduXVasWJFj3uzc\nuZMFCxZgZ2dHtWrVVMs/L+SlY6AkttLa4ZvXjs/vGCDtO87J3Kn5ZlVZueMcWfN1O7lWX+++OFxc\nMsexZo12TAjttySGOBaU8nAPUua/w7eskNEpqyibqKGexUhOK/TN3zef8Bjt234Am5l7MVRbKF0A\ncV2punYdtt3mkrrnLd5p9FSm663ZzwsyIas0kGcTWQHiVxPAFIoHQyn/fJJTx6dPQx+GbhxKeEw4\nAvio5yXNtcFFD3DeR6/6C9CFBuPb5BSPNvq83Puzz8lEBgVT3kVdsVQUVq5cqRZdqaAom38+yc4O\nnaCfz+71Pky7/QZDP+uNy7O1iG18Ec61p9dXc9nX8Ve2+M2g/3ZbunjuyLRubnklO/cWcXH3rxVg\n6e00L5VgXvteFAqFdVTL/wGwZo45s9uH1TcG8G5wU/r+2phDj1zTxvjveJfpzGXrwe3Yh83m8KPG\nCqH4IWcTWW5fBbmRU8WiUChyRyn/QsLtoBG79atIGT6c1Z3+RqTYQ6oD4/gCX3bi6xLD1p4dmNRo\nV+6RlRNyMpHlRXnX/bc/A54ZBba2mn3M1pYBz4yi7r/9c6xYFApF7ijlX0gM/nEcM3mXOzY6qJSM\nPPA6fUImEhrwDeGugMGA79zeFabVDznPDciL8u58qS5bWqxmgE9nAAb4dGZLi9V4nKmLabh5JipV\nKv7JbwpFWUUp/0KioTGOk+5HsEsHds3A1vsjfqU900I9iWjtWNLilRjZjVjKy4zhzaHB1P2rHVv8\n9lBzTBu2+O2lf1hX/KP/4GrL+5d9lKVzOepCoyy4dLZGUblu7tGjB8WxzrdlOv7+/iQmJmYbdtOm\nTRw/fty8n5tb65JEKf8C4B/sT+DSxej1EKJ/mOBWVUj9YxQNG2zGIXQFxoBA3mUm9rrQkha11JGn\nGcNGI1MPpEK6LTdc/qBGnAfdL94gKOAcbRPuL7qpqcXv7dQalkN+MwiPCWf+vsJfp3j8+PE899xz\nQNlR/v3792fq1KklKBGkpaU90HVbt27F0TH7xlxW5T9r1ix69+79QGkVNUr5F4Aj3/Zm7ZUpTGpg\nQ7hTVe4cGwE+/6XGWR82GT5CFxpMY6dvmbm7dD78kibXeQw6Hbvq1wCbNDDacsP5D14bcYqFoY34\n1rDUapylocPXcsgvaIp/6Mah+DQsfJ/OBXXpHBMTQ6dOnfDw8OCtt94yf13s3LmTfv36mcNNmjSJ\nlStXAppC8/Hxwd3dnRdffNHsX6dHjx688cYbtG/fnubNm7Nnzx5SUlJydN3s5eVl3qpUqcKuXbu4\nffs2Y8eOpX379rRt25bNmzcDkJyczPDhw2nVqhWDBg0iOTnZap7o9XqCgoLw8PCgffv2nD59Grg3\nA7pDhw4EBQU9UDp6vZ4rV64A8PXXX9OmTRs8PT0ZOXIk+/fvZ8uWLbz++ut4eXlx5syZTG6tf/nl\nF9q2bYuHhwdjx47l7t275jhnzpzJo48+ioeHh9mx3q5du8x507Zt22xdYTwweZkJVhJbaZ3ha8lw\nu1BJx4WSmUj9GCdtFm/HhXIh/5YxuMie/Fwi7pfLC/2HPyeZKWT/jt1kN9/u2oI3JvfXRm1tSG3T\n7zAveJN1lnRhkW+Xzmd3yDrz68gZO2bIOvPryB1nC+7TuShcOj/11FNy1apVUkoply5dak4jPDxc\nPvnkk+ZwL7/8slyxYoWUUprdPEsp5bPPPiu3bNliTn/y5MlSSil//PFH2atXLynl/Yu2WFvEZcuW\nLbJr164yJSVFTps2Ta5evVpKKeW1a9dks2bN5K1bt+SiRYvM93DkyBGp0+msLl7j4uJidu+8atUq\n832MGjVKPvnkkzItLU1KKR8oHRcXF3n58mUZHR0tmzVrJi9fvpwpT7LOmM7YT05Olo0aNZInT56U\nUko5cuRI+cEHH5jj/Oijj6SUUn7yySfy+eefl1Jqbq337t0rpZTy5s2bVhfVUTN8S4jVqcNZdDAe\n4rpicEmAuK4sOhjPq3yMKwZ2oLX41QiUB2N/vcv0DetB94s32OsdTbdd3SHNnv3tjhGif1gLpA+H\ngKGQ4FMi3k6zw9fVlwneE5i9ezYTvCfg61q8Pp0tXTp7eXkxZ84c4uPj7wu3b98+nnnmGUBznZwX\nwsPD6dChAx4eHuzYsSOTw7jBgwcD0K5dOwwGQ57iO3XqFK+//jobNmzAzs6On376yezzp0ePHty5\nc4e4uDh2797Ns88+C0CbNm1o06ZNtnFm3NMzzzxjdm4HEBAQgE6nAyhQOjt27CAgIIA6deoAmF1f\nZ8fJkydxdXWlefPmAIwaNYrdu3ebz1vLty5dujB58mQ++ugjEhMTsbUt3GlZapJXPpm/bz4+DX3w\ndfVFhxE6fgjOwDU9OO+DjnvRHbwXvjQppLLG5Q+30s19IVMC5rEwtBGTDbt4Kub/+GH4l4wbdpW3\nfhN86m2DS+j7xCT6srQUzZIOjwlnWeQyZjw2g2WRy/DV+xZrBSBNLp0tFV92ZHXpDGBra0t6+j3H\ng3fu3DH/Tpw4kcjISBo3bsw777xjPgf33C3rdLo82dVv3brF0KFD+fzzz83rDUgp+eabb6x6F80r\nlvdk+T+rm+qCplNYWMu3qVOn8uSTT7J161a6dOlCWFgYLVu2LLQ0K3TLP7/uBYKD4a3FZ+jz36fY\n0LQ+izvCa35Amj3VznSEsAW85geLOopy7bahOLnZPZ1230xlsCGRdARjDWdxWL8GxwtNmdMdnoxs\nzDnDKL6duL3U5HOGjX/DkA3M8p3FhiEbMvUBFBUP4tK5S5cumVwnZ+Di4sLx48e5e/cuiYmJ/PLL\nL8C9SqBOnTrcunXLbM/Oq1xZGTt2LGPGjMnkstnPz4+PP/7Y3Jdw+PBhQPPZv3btWgCio6P5448/\nsk0zJCTE/JvhiTQrBUmnZ8+ehIaGcvWqtuzoP//8k+O9tmjRAoPBYO5/WL16Nd27d89WfoAzZ87g\n4eHBG2+8gY+Pj7kvoLCosMo/v75hMsLX/b0rRqFjbMAV/uNTE9LswViJHtENNBNQ2EIWN/Uqs87Y\nShtRnwTx71lT6OFiwFak46OLYjYzOO9yiioJLVnjfYtp+j74vtcH94AnqPRa4bWMHpSI8xFsGLLB\n3NL3dfVlw5ANRJyPKFC8GS6dM7bFixdnOp/Roenl5YXRaGTjxo288cYbeHp64uXlxf79+++Lc8mS\nJXzyySd4eHiQkJBgPt64cWOGDh2Ku7s7Q4cONa/g5ejoyAsvvIC7uzt+fn5WXThnxdfXl+PHj5s7\nfDOIjY1l48aNfPXVV+aOzcjISGbMmEFqaipt2rTBzc2NGTNmADBhwgRu3bpFq1atePvtt2nXrl22\naV67do02bdqwZMkSPvjgA6thCpKOm5sb06dPp3v37nh6eprdaw8fPpwFCxbQtm1bzpy5tyqdvb09\nK1asICAgAA8PD2xsbBg/fnyO+fbhhx/i7u5OmzZtsLOzo2/fvjmGzy8io9YrbXh7e8uiHMOr11tf\nltDFRVPa2YX/loGM1D/P7RHDoFIypFSh6toQthgW8xh7eIb1nPIcQlRUkYleoQlvIhg6BFLiunGj\n5R5qnOhGJec9VItzx9AyGrdjfkSH/q/Q0/3zzz9p1apV7gHLONWqVTMv2FJW0ev1REZGmu3x5Rlr\n5VIIcUhK6Z3btRW25Z+be4GsJqHYLv480nE8A9nMLGbcu0B3l1nMwJedPOKSxsA1SvEXJRGtHZkW\n6kmlkG+oEzGYGy33cFXW0RR/RFeiN4blHolCoSicDl8hxBPAEkAHfCGlnJfl/GhgAZDxXblUSvlF\nYaT9oOS0ILk1j5O1G3hy2u99Bj7UkF88okHYamvPplfi7WHReIWAIaZYb6FC4uMZytAfPdnAUHy3\n7sSmuRPSMQGR6ET01r0lLV6Zp6y3+oE8jzKq6BS45S+E0AGfAH2B1sAzQojWVoKGSCm9TFuJKn7I\n2b2ANY+Tnx/8k8phs9nic57blQHbFHRh87AP/obboirz3F2KTfaKTETN3mx48wi+7MTdvyuyZgLc\nqoOsmYC7f1cAFr+1EP+5hT+btrSaSBUVk4KWx8Iw+7QHTkspz0opU4D1wIBCiLdIycm9gDWT0EA2\ns+3gdkh0ARsjIq4zPx/8H1sNC6i6fg1/3Bha/DdRAQkKAt+5vXEf4scxn73UO+GNEJJ6J7w55rOX\n+sO8mZIyj97JhWvRtLe35+rVq6oCUJQKpJRcvXrV6nrLeaUwzD5OwDmL/Xigg5VwTwshHgP+Av5P\nSnkuawAhxIvAi6AtKl3UZLcgeXYmodCO58AxDmK7IZ33EtqxCZ8cPEPXc9Op1bHwW5qK7DnhZMA5\nwg/D1jA+0Hvy2ojj2F7Rc6n57yxa7cHkc1NZDGyvks7W6QV3pdqoUSPi4+O5fPlywYVXKAoBe3t7\nGjVq9MDXF9ckr++BdVLKu0KIl4BVQM+sgaSUnwGfgTbap5hku4+5czPb/AHGdnRjpd9xKofNJuig\nkfkd/VjmN4Nk3Bg5qbca0lnMNNp0Qhumiw2TDUdY+mdnYjz343qkM5MN+1ms92RKyjwWUjgOxOzs\n7HB1dS2UuBSK0kBhfBsnAI0t9htxr2MXACnlVSnlXdPuF0D2A3RLAdZMQiFNalM5bDbbDm5nFjPZ\ndnA7lcNms6FJbaX4S4AM01wczizWe2J45CSuRzoT0+YATQZ2ZkpAvDYreM39DuDUwu8KBQV37Ib2\n9XAWcAUqAUcAtyxhGlj8HwQczC3e0ubYzdFRyp78LGNwkUaE2XGbo2NJS1YxcXHRnLq11S+Q4vXa\ncpHeU0qQrgM7S97RfiXIdDJ71VuzRkoHB3nPKRza/po1JXMfCkVhQ3E5dpNSpgGTgDDgT2CDlPKY\nEGKWEKK/KdgrQohjQogjwCvA6IKmW5T4z53P4rcWZmoePuu/kJ3dfscVAzrSccXAQYfeLLXuWVhR\nxGSM1jrslI5X6FReMURrXwDNTuIa1ZmY1lEs1nuSoMvcd1TQtYMVivJCodj8pZRbga1Zjr1t8X8a\nMK0w0ioOeifbMCVlHohGTJaxLBaOfOI0j5eZyvcumsnB2VlTQMrkUzJk5PuoUUEcNkJ7PUSZHcDt\nZ3GUJ1MC4lkTOpXfLa5TC78rFBoV1r1Djuj1LBaOTAmIp2ukO3u9ozWlIhOt+35QlBg2NqblG7vM\np22CDd8aluJMHHE4M1g/iT+bppO8/d5on/y69VAoyhp5de+gXDpbIy6OyTKWTZHd2dN9F912dWey\nYZfW+6soVZiH5e4L4jDgyhTzOYe/4Ys5mcNbG8ml3G4rKiIVxrdPbiM8Mtn5pWSx3pM97Q9T66wX\ne701e7JalaX0YW2mNkDt2tbdaedp7WCFogJQIVr+1nz1vPii9j/jpbe086N35LVhMSDgrd0SaMSU\ngHioNJXJJXIHiuzIeH7Tp+e9Lya7yX0KRUWiQij/nEZ4JOi1lbkmr1kKQlPyVa/XAtu7LApuyWTD\nEa15WGkq26ukK+VfClHKXKHIPxWiw9fcKWiFqu7hMGAQ3wdfx9cAbQZ6ctTrCPWjenNh03bNNmCx\nnJ1CoVCUZpQ/fwtyMtV3iDaiC17BoAA7Hh9Uj6Oef2ATNZxLzQ6zWO/JrYeUnV+hUJQ/KoTyz65T\nEOBLxrHJ8BG3Tw/mZ89L2P4xlO2bLrIwVDMB9a0/qXiFVRQpLWf7M3HG+Ey9/xNnjKflbP+SFk2h\nKFYqhPK3HOFhpst82uoX4kIs6HeS3moznO6j/ep38n+GI3iFTmWvozL5lCdq/eXJMt1nTGxgC1Iy\nsYEty3SfUesvT3MY5ftHURGoEDb/DIKDYeRIzf7fVr+QqIB5jN/jyNpucdzZM5O73T6g8p7/w77b\nuywLfYgRhotq8k85I7zmQHo9mYJsvg3HS41IrJeAfdhsxlw8TPjjtlS2fZRTK4PumweghoMqygrK\n5m+F6dNBPuMPHRfzrWEpC0MbsaznBa4nunG397ssCm3EtoPbkaEhzHB6Sk3+KYf43tjM+AMOIHUk\n1o+HS60Zc/Ewy4b/zIla26i+y0b5/lFUCCqU8o9tNB8S9eA3hW86xjLZcIRql53AKQp7gw+TDUfo\nwU68DdU5u/9z1dorh0iA+lEgjJCug3rHWDZyC0hYFOLK6mPWPfUp3z+K8kaFUv710nyo5LYGIl5i\nih80fMWeWw1PQcKj3GlwksV6T2JxYQe9AaX4yyPPd3Jjmd9Z7MPm0HhPIAhAl0qDhMZMNhzBGeta\nXk3uVpQ3Kozy9w/2p8Gjh5kb+ghV3VbB7bpceOgOpFah3+fvsMg0umewXhvdo1728sl619pmG/+5\nDpsgpQoY7bjgeoLFek+uVXO+b2SYMv8pyiPlTvlnN1Ij5pfeRD08hZn1B9Lygj1Uu6zZAOySadzx\n/8yjew47pauXvRxTeesuvC+msmz4z5qpZ21zJqzuD6lVeW1YDANdJinfP4oKQbly75CdD58frs3H\n9+8kTvy5gCS/KRwCTfEbK8HvY1nmt5wmCKJ+nYKLhLnqZS+3LF0Kz/3XgerRfZkSfY7/M+wHjlB/\nfWcWujdmr2M6e5S7CEUFoFwN9czOV3tV93Bs+/rRfU9Htjy+F2wkGG2p8/MUkrst4faxUdRz/JWL\nwb/ff7Gi3BEcDKNGgdF4/zk1tFdR1qmQQz2zG5HRMdqIDA1ha++9IDTFj42RW1RjVmhz/K6f4Pq3\nSvFXFAIDYdWq+2d9W5r71EQvRXmnXJl9zAt7ZGGlbhzv1bdlmU5qozv2BkHyQ9zxm8KMsHfpdrAz\nX6wqdnEVJUyVKvdMhLVrw5IlWsWQFxfgCkVZp1y1/OfOhUqV7j/e0BjHj+63IMUBds3AwftDJlw8\nAGELqdpkM7UCequXugKRodyvXr13LDn53n+1yLuiIlCulD9Yd90con+Y87XuUnldKDPCbbENXcPa\ngC08fzGShscOsW5d8cupKDlyU+5qkXdFRaBcKf/p0yE1FbPTthj0GLFhm1NtdHum4On0BbOYySbD\nR8jQEC73qUZUVElLrShuclLudf/tT73h7fhZryMdQSq2uHccBeM6UNN/fvEKqlAUIeVK+We81G0T\nbIgKmMe3ekdskFxL8COl2wc8mXAJhMDXJYZNI6rT5ZHPS1ZgRYmQ3QQ+Z2fwOFOXi80P03eEjp16\neLpjZ6L9VmPz8BHa1vMpVjkViqKk/A31bDSfdxM2U43bTAmIp2ukO3s6HKZldBt+3naORmmGIpFX\nUXbI2qEL9zx3DnvWlu4d+7PfbxOk24CNEdtUOxzWhrBpRHV85/YuOcEVijxQIYd6OjpCx4S7vBNw\nEkBT/N13gU0qL0TfxMmojLaKzOs7ZJ3Fq8PIvoPf4RDXFnRGEGB34F9sMnyEb/C4khZdoSg0ytVQ\nz9O+rtz+6wn6hwby2rCVoEsFow3Y3gVAuCiHPQqN7BZ9N6Lj6Y6dSXLeC0Yd2Bi52+lTiLkDsaL4\nBVUoiohy1fLvdSoFfJazpUMM6NKgUjLYpON20oUpAfEsflYtyajImcf7BbLFby+2qbZUPjyCRhH9\nSLe7y+Mj7Ahv/zDhMeHM36c6fhVln3Kl/Df/eJ7+EQ2h5fdgm6T570mtzNhfa7Bwawu2V1FLMipy\n5mjTyzjFPIrD2hD6RNfhplsYbhFdSPvbi0VO9gxaNYgzu1XHr6LsU66UPwDHh4AU2p1db0z/tS8x\nJSAeBg5i6/SgkpZOUcq5/OFWXnGLZNOI6kz+5ywyNIR4t18ZeeZvfml6FblqBcNvWXEKpFCUMcqV\n8l9s9wZbev4KQlIjsQbUPMeW1ik8tXGkavUr8kxQEPjO7Y2PXRSbDB+REvkvVnePRUS+yCbDR/j8\nV3X8Kso+5Ur5z3v6EjT+lf4RDbn+4Q3NBOSznAMdEtk6PUg561LkC4ercaDfifBeDrtmaL/6ndpx\nhaKMU66Uv2x1gP7p/dh8zA6EYPMxO/qn90O2OmAe2x0bq7mAyHDWpSoARXaE6B9mUIAdtqFrzG5B\nBgXYEaJ/+L6wEyeCra02dNTWVttXKPJLsTZQpZQF3oAngJPAaWCqlfOVgRDT+V8BfW5xtmvXThYm\nLi5Samo/8+biUqjJKMoRbj3HyRr6b+UOekgJcgc9ZA39t9Kt57hM4SZMsF62JkwoIcEVZZLhH78v\nK7fckakMVW65Qw7/+P18xQNEyjzo7QK3/IUQOuAToC/QGnhGCNE6S7DngWtSykeAD4D3C5pudmRX\ncypnXYr84vHw53Q6Vx1XYkhH4EoMnc5Vx+PhzG5BPvvM+vXZHVcorBG+xoe7Tw0Ffbh2QB/O3aeG\nEr6miEaX5aWGyGkDOgFhFvvTgGlZwoQBnUz/bYErmFxLZLc9SMt/zRopHRwyt74cHLTjquWveBAy\nyo4Q2u+aNfeHsVauMjaFIq8IISX6HZLX60h8Z2i/+h1SiPzFQ3G1/AEn4JzFfrzpmNUwUso04DpQ\nuxDSzkROrnrnzs155SaFwhqBgdqyjunp2q+1WcE6nfVrszuuUMD9VoqHHgIMvhA5AbrP1n4Nvtk6\nIiwoparDVwjxohAiUggRefny5Xxfn5NpJyd/LgpFQchY5SuvxxUK8wCURvORLuHExsKNG2DbLBw6\nfARne4H3Miq3DC+yBmphKP8EoLHFfiPTMathhBC2QE3gapYwSCk/k1J6Sym969atm29BcnLVC3lr\nxSkU+eXTT2HChHstfZ1O2//005KVS1F6MVspEnwgQLPzpzqFw9NPUV3eYPvuXwj5Roft04PYHRde\nJDIUhvKPAJoJIVyFEJWA4cCWLGG2AKNM/4cAO0y2qUJFmXYUJcWnn0JammbpT0tTil+RM2YrhcEX\nQjdAwFBqPTaZNKnjnZA29DJA3bOt0AWvgKS1RSJDgZW/yYY/Ca1T909gg5TymBBilhCivynYl0Bt\nIcRpYDIwtaDpWkOZdhQKRVmg8lh/6LhY2zHZ+a81iaJhUgr/MfzM27zLUDawyfAR/139c5HIUCgu\nnaWUW4GtWY69bfH/DhBQGGnlRnauehWKomD+vvlcP+5D8Fxf4uI0E2Pg9HBqto4gqIvyJaWwzmCv\n3qxtNEXbudgWOi4CCYnV7vC0PojZhhXMYBa+7IS4onElXqo6fBWKssb14z68d3oosSJcmzkuwnnv\n9FCuH1eePxXZEzxpMiPqLAS/KTCyD6JSEgvDYPY6T9YEfE9b/UKWMYFwemTfmVlAlPJXKApA8Fxf\n2KDZbPF9W/vdsEE7rlDkQPCkyThcrwc6I+5xNXn0YA9mGXZSJXQFPk7L2MBQhrKB8MAviiR9pfwV\nigIQGwtNndYy6FQydKDFTP4AACAASURBVJ/NvyJT6WkwEivUoi+KzMzfN5+Xvn+J8Bht9M7iA4tJ\nrnmRWjftOOp8nRc7tkACPxgWs3zfWXxdYtjw5hEiahbNutHlahlHhaK4GVRjO2HGxpz1vM3IKPja\nO4nU5P3YdVvMGyO/49N0bbSZ6odS+DT04b0977H+2HoCPQJZHrmcymlgtE2lXYQ3h/w+ow8GfA/u\nJB2BMBjwBYrqG1K1/BWKAjBO/ywpPediGzGWb5o5kHx6AMl+M+l5rBY0jFDeYxUA5q/A74Z9h0Cw\n7Lf/glFbbHBmiCexW7fSPOwFtjeBcHqQVLvo1xtXyl+hKADR1S/hvaM/qW5bSDr1NCmeG3n0TC3C\nvP6mYUID4J6LEUXFxaehD0M3DgXglQ6vgE06UidxOe7Dfww/E8AGTh1cxvi1TzGUDUS8VDR2fkuU\n2UehKABB8S7YxjbiIFPB73VEbGcONT1A5bDZLDV8w2BGAsp7bEXH19WXDUM24LdiEGkyCXSgS7Ph\nr+Ynqa8PY5lhIouYzGQ+JODNFkTU7F1k5p4MlPJXKApAeOAXvLv2JlW7BXL7RgOky3448iy23eZS\n82Iy7vVHEd3kMs77tuYemaJcs307pMkkpG0qfaLqMS3qEr2Hp3Nx2Mu4htxhsuFDJNoSosUxVkyZ\nfRSKAhBRszfDnabw3J6GUOW6ZsR1C8Fjz5NM6qQn2m819YyXcXzZv6RFVZQwC/63HltppE9UPbY3\nS+N13id9/WaqHHuKGKerLObfGCk+V7BK+SsUBSAoCFok1md5t0Qc1q7D4YQv6FI52Ocb/mxuwO2E\nnr9bRiJiima4nqJs8NJ/whFN11EleCPTNrXi0dAgDgUsQJDOjz8ksGjfRaawiKGsLzaZlPJXKApI\nSLsuVAldwQ+GxTwbMgyu6UEnIa0Sx1vGUDlsDqlfPVXSYipKkqS12IeuYqZhN4P4jt8Nr8H/t3fv\ncVGW6ePHP/ccUDEV01JBYYgszfKQUqiRYphFWVqCJlrbYS2r/VZqrofdzmaRum2/di3Xaj1gCp4y\nVyPU0QylMBPTMlMZ8ZCphZqmHGbu3x8zIOAgRxlmuN6v17xghmeeuR+Ua+657sOVvAhT0GYAnuNt\nBrGU1Q2H1FqTJPgLUU33d0pg5fBGRLGeHREp0Hw/nGsK5jx0jgW/9Kd4Nff5ouNrtUi3qBPen5fK\ncts7vMKLnMEfjZGRtoOkpK0jjiSiSSXFfwizL/0knyIS/IWopvHjnYN09w57kE0DlmPZ1Qka/A6/\nhUJzG5cPjWQQnwDFinjsd27/LOsAfI/bN/fsbKJYTxh7KcCPSL5gNXcCMIs/szUgutZ3IJbgL0QN\n2dTqGNftsrC/ww7MKVPgnX2w625sHXYwI8J5zMVKjQrv98C7CTz6mrXEm/ujr1l5NboZM3iWb+nG\nSObyAx2ZyOvEkUTApKfJyan9VeAS/IWoIcfeXsVlDc7QIOU1GqWP5u+8QtNF82iY8hpJ11wJXLzU\nqPB+1vnh5A50VuYCwGIld2Acnx+ezAu8xjTGMpeHSCKOqUxiYq8Nl2zvnvLIPH8halDXA1+yK/0K\nljOYKNYThZVB6cvo2si5ujM42NkbLO1SFekWtevo11Hwi2uX1y2jocdMSE7iS1sU6yatISpxGWQr\nooKzSIrPJKPZEMZ7qOyD9PyFqEFhd7Rn+aQtRIVkgVJEhWSxfNIWwu5oD0ipUV8XHExRZS76vOr8\naosiJAQOXxeNBRsGHFiwcfi6aI8FfgB1CUrp1ogePXroLVu2eLoZQtS4J590lhe1253F3keNkpq/\n3iwhLYHwwHCiQqNITHTm+HMHDYbDPaBVJg0+TeKRqCjmzCk53uPvf2nKzCqlvtFa9yjvOOn5C1GL\nEhNhzhxn4Afn1zlzZLaPNyvctM2aZeWLbCtqyECa6FOs+WIti5YYMd0/mLkbrXVuoF+CvxA1qLw5\n/DLbx/cUbtoWtziOLefGcC7fyEuLOnObDa7Y1xFj4ke0abbA7XM9OdAvwV+IGlKROfwy28c3RYVG\nMbrHaLYatjHy6+ZMtaXyAi8TRxLLbe9gTU91+zxPDvRL8BeihlSkV1/WH7vM9vE+CWkJRSUZrVlW\nZm6ZychMWNpzP3daxvMqLzCamUSxniB7dp0b6JfgL0QNqUiv3t1sH4DTpyXv720Kc/0zNs8gbnEc\nE2+ZyOprDTy4Loz5sZ8y0vIwMxmNlb6okGBmzYKQEFDK+bW2V/SWJsFfiBpSkV79s3s70D52IFlY\nsGMgCwuBMQP5dVgH2ebByxTm+id8/ne6/d6FqSueZ+LCG0hO38wTyX05HrSPJOKIIwlr/Gzi48Fm\nA4fD+dXTdZ0l+AtRQyoyh79ge08yLSt5JiYfA5pnYvI5HL4Sv6yeMvDrhaJCoxhmH0yqcS13ftWO\nAtvtTOR1km0zeT7NQFRIFkmTMj22ivdiZIWvEDWksCc3ebIz1RMc7Az8xXt445e0YtLZJ1gR/h7N\nrmnKqWaHIeMJXlzVjMnIwG9dlZh44b/roUNgOjWN1XmJjNwSwvwep4nOMrLJ9gKfMtC50M9mIwpq\npTJXZUnwF6IGxcdf/OP8BN6kwapnGdutIacCTsGJdkxf1ZDneJOPIk5guzYbkJKPdUnhLK7CwfzC\nWVyPDJrGv4LeYFpyF8bYMjmSNYHU2Nn0T4Yo23r0foXybNMvSoK/ELVsQ0wSmM45Sz42O8CGmCT2\n/BbGngHv0+pwX083T5RS1iyuJtlTmLYphKm2VLaxmjW2EfRPhrQgM1ZbX9obs2jrmSZXiOT8hahF\n98YEsiL8MOwaCPmNAVgRfpiZA/ZCvj/h62/2cAtFaUWpuOExEDGj6PHXvjwJrTPxGx7NPB5kBPP5\n3PYGK9M2EksSf7LXYmWWKpDgL0QtWneNH2Q8wehFA2i84GNwmCjMDTResJDnfvrKsw0UFyiarbUv\nGgaMK3oDeCEigHED4Ni+YYxkLqu5Eyt9iWQjXcjkm4C6N8hbnKR9hKhFt/6YRfTJxTxHLIdaR7LC\nUFD0s9tav0WUbaMHWyfcmTLFlfNPH+N8YMA46LicKcEn8E95mZXpnxPFeqz0JZYkupBJun80s971\nbLvLIz1/IWrRqlUwJm0IT0aEsWLARmfqZ8PfIb8xKwZs5MmIMKnxW8fExzsXZDVuDKSPgexbIGQj\nZN9CRHovQsnCgSKUrPOB38MLuCpCev5CeEByJw35/jRe8DFjbN8yI+tjzgwfRnInzVw3M0ug7gcT\nXxOTGINRGQlsEsiwXsM4cyaKVkPD+SV4C61/hyPBGzkaMY/QdBtKnZ8CutZL/p2qFfyVUpcDiwAL\nYAPitNY5bo6zA9+57mZrre+pzusK4e2u/O1+zq2N5FPbDGfFL1tfBi5YiGq7scz9gST4167oq6IZ\n9/k4GhgbsHDnQpo/3JpfgnejHIo/TJpeGTewacA8rge+2zzH082ttOqmfSYAa7XW7YG1rvvunNVa\nd3XdJPCLeq9LgwR6ZTcqkTJoE7SC49duKDGjBIsVHrib/b1jJB1Uy8b0HMO026eRa8/lTN4ZcoJ3\noxyg85oQsugfbF61jXtSbqHtVXM93dQqqVYlL6XUj0BfrfXPSqk2wHqt9bVujjuttb6sMueWSl7C\nl1ksbmr5WqwYhsfgMJ9jWgoEHmnFiOEncZjP4b9xOqSPqZVKUPVR6RW8PcclMOouZ3Wu0FduxabP\nD8S32/AgB6xzsLCPLMLQgKpDFRErWsmrusH/hNY6wPW9AnIK75c6rgDYBhQAb2itl5d3bgn+wpcZ\nDM49/4vrxxo2WByo4QMpMOdhdCjsBk3HlD+xO+OjoupfxYWEODcJE1VXegUvvRMwG000jJ5KJ2JJ\nt88ErUBpDA6FI68JLFqCsvVjGmMZY/x/UFBw0deoTTVWxlEptUYptcPN7d7ix2nnu0hZ7yQhrsYM\nB95WSoWV8VqjlFJblFJbjh07Vl7ThPBa7nYA/YDHGGX7iYLN40GB3ahpkx3GrvQPuMe+2O15ZC+g\n6rtgBe+hcPJvnkp+Vi9n4HcYQGlU9k04DIApj4ZD7+IJSxzjmM6Mmxd6qunVUitpn1LP+S+wUmvt\n/n+zi/T8hS+7oLcJ2DGwwaLpP7whdvM55wIwQwH3pESyJH0TZi7sXbZoAceP12LDfZC7T2FYrDBi\nAI3yjJz1P4dpTx+azF9CTsQcTNfP45Ej2wjLAZNOZk2zIayqQ9sx1VYB9xXAQ67vHwI+cdOQ5kqp\nBq7vWwK9ge+r+bpCeLXCuePFi3t81vlKYoYbsJvP0SxlMsz7HJXfiBUDNvLPCDt+fhee59Spig/8\nyoCxe27rMNiiaLuzF2f9zxG6Pwhjm63kWLZD+nP4zd7IsJV9GX8whDFpdSvwV0Z1g/8bQH+l1E9A\ntOs+SqkeSqnCjS06AluUUpmAFWfOX4K/qPdKF/f45J6B5P/ShV4pgzmV/go32C5HL1hJyO5OrA2D\nJk0uPEd+fsVqAFSkvnB95a4OQ+eIhzjY+QuuyQwnq2UuuRtfhNg4zJYUXuXvRQVavJrWuk7eunfv\nroWoT958U+vRgcu0wq6n86zWoEfzrga7nt4rWSultTN0l7wpVf65Q0LcPzck5FJflXeYP9/5u1BK\n61s6vaWZ5K/viYjUTcnRWNZonm+pm0dM1KN6X6VbclRP75Ws33zT0612D9iiKxBjq5Xzv5Qk5y/q\no5gYiD65mDFfDQO7HYxGZty8kKlX7qPDTwbm7XyXYLLJJpj7LE/zbZCDkIPjy53x4zavjTPl5HBc\nkkvxWq/e3pzGPzm3arajyKElAZYlnAjaw/S0I3RT28l4Yy3jx3u6pe7VVs5fCFGDCvf+oaDAGa0L\nChiTNoSJnQx8ee+LJESYMKBZaglgW+wbXOPYR89xCW7PVTzHbyjjL72susP1SUJaAtYsa9H9yakn\n6UYmDXq/Qg4tCWUPJtutjE67jHFMJyVsdJ0N/JUhwV8ILzBm/rtErIth5oB9dB7UhXGxB3liYwC/\nRH5E09/DLzi+dI7f3RqB0vWFfUVlB7bDA8OJWxxX9AawyHIlA2MbcejQYLqTwe80c9blJY67WME/\nDw655NdQKyqSG/LETXL+QhSjlF5HX20YNEzzErrdw8G62fNm3dSyVK+blHrB4WXl+I1GZ147JMSZ\n5/Y18+dr7e9f8pr9/d1f65tfvqnX7VuntdZ63b51umVCSz1y6UhtnNRQN7D8r2jcZR19dUuO6sEk\nV3iMxZOoYM5fev5CeIPgYL615KDbp6L29+JASDZn9tzHcts7RCU+dsHhZS3+cjjOzy7yxW0hyiq5\n6G5GVPEef1RoFHdefSfzts+j3dlYIvf7cR/LivZd6kwmy3D2+H0lVSbBXwgvMGPE00WpnkYtt0Hm\nSAo6J5EcccBtpC8rQBXm/311nn9Zb3ruHo8KjWJUQBK3vRdHm8H9mZ85jxGZcLpBIr+330YoNow4\nCMXGOs5X5YqJuUSNr2US/IXwAmsaOXgiozULIrMxJc/n78uupkHKq7wfbWPGHSW307JmWek5LuGC\nuevgzP378jz/st70Sj+ekJbA5P9YefvZKFrt7sqRrmu4cW9zbjgKExfewNf3vkE3yzS35/LWRV2l\nSfAXwgusmjwe+y090cmLWG57h1d4kdXpazDu7cfEm84UDVZas6wMXjSYpiF7mTXLuf1DWcpKh3gz\ndwu2ig9sJ6Ql8Pinj2MymJi6N44/Os8gp9MaWp7045uwHNbYBzDVlsq05LbcHuR+NNxX9lOS4C+E\nlwhr8h+WD29CVEgWKEVUSBZhtidx5PkxeFY/XuinGDyrH7mn89iWOAyAs2cvfk5fCWSF3G2bUXzb\n6/DAcBbtXMTfUl9GfxcLA8biMMDxpnl0z+hBauQ3tLPMYYwtk9fTTrp9DV/J+csiLyG82Izeixl7\nuDnm4XeQ71eAOc9E/oLPmB6YwzuHhlxYM6CU+rIldPH9+q+8ycrRfoPBeBqtNBgd9M9sxVfLdpFr\n2QRBGaxOW0/ny7K44oytxOI4b6ihIIu8hKgHxnw1jNHMJN+oAMg3GBnNTMZ8NYz9Bivc/Tj0dr8I\nzFfn+ZdWes3DL19FodP/D22yg9GBcX9PUq8uIM+yiYdsB1idtp44ktj+f7OZN69k6qxRI89dR02T\n4C+EF7O2s7Ng2BIoMEFBAzDmMnf4EqaH2zENHQidFsKhCxeBFU+H+Ppun5Mnwx+DYs6Xx7RYMfZM\nAA3KAVy5HTZO4lzsQ1xrmUkU60malElGM+cMn+Kps19/9Z2Bckn7COHFHr/HwNzrzJxbtAqFRg8f\nBOYzGBzgyGtK5KLn2Gh7qej40mkLd3UFvCG1URkGA+ibZ8CAcZDxBKbOcygw54LBjjnjEfI7LwGt\nMW+YwMPG2bx/0F6UC3NbbpO6nS6TtI8Q9cChJvGcW7SK6baVTLP9DzY/BwocRuj/dUd+sD3F4KZr\n3A5+QuUWRXmrZjEJcKQbpEyD8JnY8xuDwY5x9+04Vr0PC5cRtLM3DxtnszQtvcRWzZVZN+BtJPgL\n4cUcOfP4qymH53ibbpa3Md88HfIaQV4jUm/6gYmW/ixt/liZq3p9MbglJkLLls7ZPkrBub3hEBsH\nR7oRnt0A3eQYOEw03fwYdkzcYLuc3JVzGJYWTFLTPxele6Di6wa8kQR/IbxYfDzM2D+EhZZWDBxm\nJF+buWHB67Dgf5hVPi8PzcRqKHvKjy8Ft8KgP2KEMzdf6NyuKEhOwvBADBnBuZjsgKGAnNYHaM1h\nfiaQibxOHEnw9NMlduwsb92AN5PgL4QXe+YZZzWvvwcNpGDHcEYv6s/PtngCg5aRX3AZjlNt+ToQ\nUIoZod0IeLIzjf/aoej5vhLcCscuigf94lq0/hyH3zlQEPXlzc4U0IBxHIuYx0ReZyqTiGRDiV4/\nlL9uwJvJgK8QXkyp898PZjEb6UMScXxryWFs/I9gOkenjFt45PvfGRu/C0y5NM+IpenOJKZMOT/b\np3AOfHAwRY97k7IGZgu1fSyQg1ecxJT+Fwp6fIAxeT603kb7q97j+IKviWQD1oAh5OTUWpMvGRnw\nFaKeWcYQOpNJKFk8Z8tkdOJdUNCQneFfMvahTDDlYsx4lKRVx9m/Hx5KepLA1ztcUEvY04G/KlNP\nS4xR9E4Ay/niLN0s0zjY4gwR312Fsr4CyUnYY0cw6kgG3y+w0ZlMVpqH8O67NX0ldZsEfyG8WOm9\ne9YRTSg2NIp/25Zww6YBoHDegLtYzW1YIeZJ7N1mcvxg81pv88VUtdB8iTGKQ64BXtcbwLXXv0Rj\ndYbtO15HA9iiMCfPxx70LQB7Q6L56CPPv+nVNkn7COHFEhPhkUcgL6/k41lYWGoJKEr1OKO/82+9\n3Uk40AxUgRmdmILOiqr1dpelqvPqi9YrdEs4v6gtNg62jKbpTa8SubMNa1buJZdGRPIF2+mMBuaq\nP3GvY/kluBLPkbSPEPVAfDx8+OH5AckWLZy3+yxPFwV+Y8ajtJyzAOxmAA4EABp04mf0tdWt6u3V\nmnraOwGCv0ANvYeX+Rt/23Ic+rzKaT9YT19yachI5vIDHXmBl1HABx3eqsnmexUJ/kJ4ueI5++PH\nnbfWjzkwnW2BMeNRUlft5ZjtAQy/Ws4/SYHlugTutowlZor7vX88oSpTTx94N4GRSY/T6PccjMFW\ntNHBi/Hf8NotgAaHAc4c785o/s1cHiKJOKYyiX6sYePP7X1iq4aqkOAvhA9aNXk8U1odIrXrMKJC\nsngyBhxX/uT8YeZIKGiALTyFcfG7iD5bd8JARaeeJqQlFNUw+PT9cIwd5/Frv3fQ2bcCdmeqywg4\nzJAyjYaRLxF71f+hUYSp82UZT5zwnb16Kqvu/KsLIWrU+PEQNSUabDZWXuPM8ZMyDa5eDTtjAQjI\ntTPqvYpPc7nUm8BVdF793i/CGTxnMNaI1nyy4xX8NkwA8x84wqxgyisa4MZuov+R46xKPk1GGweh\nIQ5CdMmyjL62nUVFSfAXoh64LPPP6MTPuCc9jMgtnaDrfMgcQZvNw/H/NbtCAb2qM3Eqy93U08RE\naN4cotUaDposDJ30CnrORwy89RRTo3JRkW9A5ghnj9+gnZsb5TUCDKQO/Tff0oXxB0N8cjuLqpLg\nL4QPK+yp/2Cdxd22U/SxvMSXPXYQuaEPtP+MPc0ViyxXlgjoj75m5YF3E0o832CABx+smU3gKvvp\nIfD5GB7cfA1n4y2kRXzFT/ZQFkcc4OxdYzmDP2v7bCZ/z10YrlvsnNCkgYKGqHUvgd0IxgImXN8V\na/xsn9rOorpkqqcQPqr0ds3dLNPYFvsG05LbMsaWyQxLF8bG70KpfCatacYr6SdYZLmSkUNPY8/u\nS0TgrWyfOf6CgF+aUs5eelXaBBffQjpmSgJfb97Crz2Szz+YfTMEf3X+/uHuEPgN2P1gbzRk9cPU\n5wUKlAnz+gk4Wu6mvQ7g4Vv/QVCQ729hXdGpnhL8hfBRF8yZ751At0MGltreJZhssgkmImIIv9z+\nNig73fc2Z2toDloZUHYj5DZF/3EF/HoNtPkWMp4GuwmuXwQ/3A9pzh3QjEaYM8d98Cy9dcSvv8Lp\n0xceVziPPyEtgfDAcN5akOEciF6+jLEDd6COXoNud2E8MGc8wg0nT7M1+AyG4PXELBpFlE3zsuVW\nOl7/AvrUdXQ1vkbYHe2LNmzzhe0sLkaCvxD1nMEAZf15K+UMfG/vH8SQiJ7YB0xwrQM7vxgMuxmM\n+a7vTXTdeiPbwr+GPH/4eCXYzi8Oc9d7dtfLL0vhp4fHp1pZlDuY4Rva8l74EZ7YGMBHfQ5wzmgG\n0x/OfH6hIzcw/b3bKMCMiXxettzK0KBxzDpoxxo/m4xm0SV26KwvJPgLUc9VZLWsVop/WLowNv4H\nMLuWCTs4vyC4cNaMVqCcA6lX7OnGbZu78CMd+Pb6n4rOG6DDyFl5PtqWt9ka4FyYdSicEB2FzQbW\nyWu4e90mHP1eJvL7K0jtchTD9qE4Oi0GU0HR+1Khhimv8b/0NVxFFo8ym60B0T6xOVt1VDT4m2qj\nMUKI2jdlyoU9b6WcAdlicf480AIvDN0BugHYHWAscE4DyW8A5lznk060g4ADrjMYOGb5nsWhmRRg\nxkweDm1A6wacWPhJidev0AyaQ+GYYwfz5pKGaHUUrlKo+xuQ/30cqV0Xovb3wtFl4fmBXCiR8z83\n4G/EMYFf051z/tXJqv2u6iPp+Qvhwwrz2/v3OwN/8T93f3/o09/C6k7HXL18BarAtRcQpXr+RlB2\n5/cOMxhc6SCHEewNMGwfRjhf00XtxHjsKmyN2/P7kdv4MtT1ySAnzDlGMDyGq/cF09a4n7sP/UwX\nWwtiYizYb5zDbTtbYu2Yg9+6ydgjp5H70yDoMg+0AdAYT7Xi5t1XsLnTYQw7B2EKXUM+fjhyroYF\nq4C6XVu3ttRKz18pFQu8BHQEbtJau43WSqk7gH/iXHM3W2v9RnVeVwhReaX7eX/8AVtz4lBZO9Ch\nX4DDSKesK9nZIcv1BAMo1zQehwG2joLwmefHAQCMdrBDxPEcNvXJ5muTCW3aR8DJXzjRKxWjA7Td\nH8fCT+CpjqjGB9nT/jRXZtzA870OE3D8LPnB6+BwN1K7fIthT19U5BuojZMh8h+o3XdiDN5AzIbu\nfBr5Pfd9b2DA99fyz6DL+e1fthLX441FaDypuvP8dwD3AV+UdYBSygj8C7gTuA54QCl1XTVfVwhR\njuKLssryyxcJND7Qg6DtkUxbGMrBlmfgwM2Q8haG31vBsWth10A43RoMeVDQEBzqfBom3w8jBWzu\nu5Y2P4ajTfngMHGi2Wkw2LGb7Pit/ytPtP4ztNyFbnQasiPYFL4Dox1ygnfDyUAI3AaZI1HBm9Ab\nJ9DZ+A3Tk9uy9uOz+C+aTzPjr3RNnsC4oOG8aEvjtzTnOoTCOr2+VGGrttRI2kcptR4Y567nr5Tq\nCbyktR7guj8RQGs99WLnlLSPENVTkQHXkBBnbr54GOjATv7F0yxkGAuIJw8zfpbPODPsQTDYnQcX\nLyGmNQZjHg5TAf77b+SPkK3FfgYGh8Jh0JDnj8ocgQ7/D+Q2gYanILcxNDiDYdswJi/vSIKlB6bY\nIXy6JJe++zSHjMH8yT6bzf7R5Oc7S1YW8rX5+TWlLm3pHAQcKHb/oOuxCyilRimltiilthw7dqwW\nmiaE7ypvwLUwTVJ6desuOnEbVv7D47TmZxYxlFuDZnDzjmCM22NR3w3HvGAx9yx4FMP2oShbHxza\nRJPDYfwRvNW5FkADBQ1AG3EYNSgwpf+Ftat2Y84JdAb+PH/wOwOHuqHbpxJgWcpq21sYkxNZOPwR\nlHbQtsDGGh3NrFnQtOn5NrZoIYG/usrN+Sul1gCt3fxostb6EzePV5nWehYwC5w9/5o8txD1TXBw\n2T3/kJCSi5vKmo+/l/bcx3JIc94P4yd6sYk/8Sb9WM+T3M/M2AyC1z5Gdp+5GApMOEx2VMYodNc5\nzjEB11+yipjBA5Yg8psfglOtockRyI6A4K/omXE942IPMm1VY5bf24SMJv8paoO79QJnz9bAL6ie\nKzf4a62jyzumHIeAdsXut3U9JoS4hNxN9XSXKin8vnDVa+PGcOaM+wVie2nPz/7tGTDrIYiHTU8l\n0GNJBJe1WUubncFspTvG49fSOnQBB8y5ztlAe+4AIP+a//FLsA2yb+am3YGcaXaMneFpqF0x2AIO\n0DV5ApPDHMy+Lprxxdo3eXLZewpJz78atNbVvgHrgR5l/MwE7ANCAT8gE+hU3jm7d++uhRDVM3++\n1iEhWivl/Dp/fvnH+/tr7Qz9zpvZrHWLFu7PERJS8tgwduul3Ks7DLdoHrhLmy2r9d2s0HexQvNU\nB82z7fRdvW/RDtAO0HdGRGnT8NtKnMPfv+RrKFXyNQpvStX878sXAFt0ReJ2RQ4q88kwGGcOPxf4\nBUhxPR4IrCp2P2zJtQAABR1JREFUXAywG9iLM11U7rkl+AtR+0oH88JbSIj748sKzKB1JzJ1Kv2K\nAv0U/qov51iZx5f1epVtU31X0eAvi7yEEEXK2g+orJ07K7SFQxUUf73K7gRa39Wl2T5CCC9R2f3u\n3ZVdLIvZDKYKList/noVre4lKkeCvxCiSEVr6BYqHpjdKVwOEBICH30E//2vc5pmocaNwc+v/Ndz\nV91LVI8EfyFEkar0sgsDs9Ywf37J586b53y8MGDHx8Px4+cz96dPw4cfSq/eEyTnL4QQPkRy/kKI\nKqtsnV3hfWQ/fyFECaVn1+zf77wPko7xJdLzF0KUcLEVtSCfCnyF9PyFECWUtSFcdrZ8KvAl0vMX\nQpRwsbn+5X0qEN5Dgr8QooSLzfW/2KcC4V0k+AshSrjYXP/KrgAWdZcEfyHEBcpaUVvZFcCi7pLg\nL4SoMNlnx3fIbB8hRKUUbtMgvJv0/IUQoh6S4C+EEPWQBH8hhKiHJPgLIUQ9JMFfCCHqoTq7n79S\n6hhQleqgLYHjNdyc2uTt7Qe5hrpCrqFuqO1rCNFaX1HeQXU2+FeVUmpLRQoZ1FXe3n6Qa6gr5Brq\nhrp6DZL2EUKIekiCvxBC1EO+GPxneboB1eTt7Qe5hrpCrqFuqJPX4HM5fyGEEOXzxZ6/EEKIckjw\nF0KIeshngr9S6g6l1I9KqT1KqQmebk9lKaU+VEodVUrt8HRbqkop1U4pZVVKfa+U2qmUesbTbaos\npVRDpdTXSqlM1zW87Ok2VYVSyqiU+lYptdLTbakKpZRNKfWdUmqbUmqLp9tTFUqpAKXUYqXULqXU\nD0qpnp5uU3E+kfNXShmB3UB/4CCQATygtf7eow2rBKXUrcBpYK7W+npPt6cqlFJtgDZa661KqSbA\nN8AgL/t3UEBjrfVppZQZ+BJ4Rmud7uGmVYpSagzQA2iqtb7b0+2pLKWUDeihtfbaBV5KqTnARq31\nbKWUH+CvtT7h6XYV8pWe/03AHq31Pq11HrAQuNfDbaoUrfUXwG+ebkd1aK1/1lpvdX3/O/ADEOTZ\nVlWOdjrtumt23byqh6SUagvcBcz2dFvqK6VUM+BW4AMArXVeXQr84DvBPwg4UOz+Qbws6PgapZQF\n6AZ85dmWVJ4rZbINOAqkaq297RreBsYDDk83pBo08LlS6hul1ChPN6YKQoFjwEeu9NtspVRjTzeq\nOF8J/qIOUUpdBiwBntVan/J0eypLa23XWncF2gI3KaW8Jg2nlLobOKq1/sbTbammW7TWNwJ3Ak+5\n0qLexATcCMzUWncDzgB1aizSV4L/IaBdsfttXY+JWubKky8BErXWSz3dnupwfUy3And4ui2V0Bu4\nx5UzXwj0U0rN92yTKk9rfcj19SiwDGdq15scBA4W+9S4GOebQZ3hK8E/A2ivlAp1DawMA1Z4uE31\njmuw9APgB631DE+3pyqUUlcopQJc3zfCOYlgl2dbVXFa64la67ZaawvOv4N1WusRHm5WpSilGrsm\nDOBKldwOeNUsOK31EeCAUupa10O3AXVq4oNPFHDXWhcopZ4GUgAj8KHWeqeHm1UpSqmPgb5AS6XU\nQeBFrfUHnm1VpfUGRgLfuXLmAJO01qs82KbKagPMcc0gMwBJWmuvnC7pxVoBy5x9CUzAAq31Z55t\nUpX8BUh0dUj3AQ97uD0l+MRUTyGEEJXjK2kfIYQQlSDBXwgh6iEJ/kIIUQ9J8BdCiHpIgr8QQtRD\nEvyFEKIekuAvhBD10P8HhtC0capb0XIAAAAASUVORK5CYII=\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
@@ -1011,7 +1099,7 @@
"colab_type": "text"
},
"source": [
- "We can see from the graph that the predictions for the original model, the converted model, and the quantized model are all close enough to be indistinguishable. This means that our quantized model is ready to use!\n",
+ "We can see from the graph that the predictions for the original model, the converted model, and the quantized model are all close enough to be almost indistinguishable. This means that our quantized model is ready to use!\n",
"\n",
"We can print the difference in file size:"
]
@@ -1021,7 +1109,7 @@
"metadata": {
"id": "6r42iBnULP4X",
"colab_type": "code",
- "outputId": "afe526c9-498d-498e-d768-1edfbf21e870",
+ "outputId": "9afd8a71-362a-4d59-bd0e-0f9ee70c6e78",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 68
@@ -1041,9 +1129,9 @@
{
"output_type": "stream",
"text": [
- "Basic model is 2656 bytes\n",
- "Quantized model is 2640 bytes\n",
- "Difference is 16 bytes\n"
+ "Basic model is 2736 bytes\n",
+ "Quantized model is 2512 bytes\n",
+ "Difference is 224 bytes\n"
],
"name": "stdout"
}
@@ -1056,7 +1144,7 @@
"colab_type": "text"
},
"source": [
- "Our quantized model is only 16 bytes smaller than the original version, which only a tiny reduction in size! At around 2.6 kilobytes, this model is already so small that the weights make up only a small fraction of the overall size, meaning quantization has little effect.\n",
+ "Our quantized model is 224 bytes smaller than the original version, which is great - but it's only a minor reduction in size. At around 2.4 kilobytes, this model is already so small that the weights make up a small proportion of the overall size, meaning quantization only has a small effect.\n",
"\n",
"More complex models have many more weights, meaning the space saving from quantization will be much higher, approaching 4x for most sophisticated models.\n",
"\n",
@@ -1073,10 +1161,10 @@
"metadata": {
"id": "l4-WhtGpvb-E",
"colab_type": "code",
- "outputId": "f975721f-bdd1-440a-93af-55f13c4c8690",
+ "outputId": "87846170-e82c-45d1-8dca-a1518d1f6a1e",
"colab": {
"base_uri": "https://localhost:8080/",
- "height": 3808
+ "height": 1000
}
},
"source": [
@@ -1092,229 +1180,225 @@
{
"output_type": "stream",
"text": [
+ "Selecting previously unselected package xxd.\n",
+ "(Reading database ... 131183 files and directories currently installed.)\n",
+ "Preparing to unpack .../xxd_2%3a8.0.1453-1ubuntu1.1_amd64.deb ...\n",
+ "Unpacking xxd (2:8.0.1453-1ubuntu1.1) ...\n",
+ "Setting up xxd (2:8.0.1453-1ubuntu1.1) ...\n",
+ "Processing triggers for man-db (2.8.3-2ubuntu0.1) ...\n",
"unsigned char sine_model_quantized_tflite[] = {\n",
- " 0x18, 0x00, 0x00, 0x00, 0x54, 0x46, 0x4c, 0x33, 0x00, 0x00, 0x0e, 0x00,\n",
- " 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x14, 0x00,\n",
- " 0x0e, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x0a, 0x00, 0x00,\n",
- " 0xb8, 0x05, 0x00, 0x00, 0xa0, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,\n",
- " 0x0b, 0x00, 0x00, 0x00, 0x90, 0x05, 0x00, 0x00, 0x7c, 0x05, 0x00, 0x00,\n",
- " 0x24, 0x05, 0x00, 0x00, 0xd4, 0x04, 0x00, 0x00, 0xc4, 0x00, 0x00, 0x00,\n",
- " 0x74, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00,\n",
- " 0x14, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,\n",
- " 0x54, 0xf6, 0xff, 0xff, 0x58, 0xf6, 0xff, 0xff, 0x5c, 0xf6, 0xff, 0xff,\n",
- " 0x60, 0xf6, 0xff, 0xff, 0xc2, 0xfa, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00,\n",
- " 0x40, 0x00, 0x00, 0x00, 0x7c, 0x19, 0xa7, 0x3e, 0x99, 0x81, 0xb9, 0x3e,\n",
- " 0x56, 0x8b, 0x9f, 0x3e, 0x88, 0xd8, 0x12, 0xbf, 0x74, 0x10, 0x56, 0x3e,\n",
- " 0xfe, 0xc6, 0xdf, 0xbe, 0xf2, 0x10, 0x5a, 0xbe, 0xf0, 0xe2, 0x0a, 0xbe,\n",
- " 0x10, 0x5a, 0x98, 0xbe, 0xb9, 0x36, 0xce, 0x3d, 0x8f, 0x7f, 0x87, 0x3e,\n",
- " 0x2c, 0xb1, 0xfd, 0xbd, 0xe6, 0xa6, 0x8a, 0xbe, 0xa5, 0x3e, 0xda, 0x3e,\n",
- " 0x50, 0x34, 0xed, 0xbd, 0x90, 0x91, 0x69, 0xbe, 0x0e, 0xfb, 0xff, 0xff,\n",
- " 0x04, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x67, 0x41, 0x48, 0xbf,\n",
- " 0x24, 0xcd, 0xa0, 0xbe, 0xb7, 0x92, 0x0c, 0xbf, 0x00, 0x00, 0x00, 0x00,\n",
- " 0x98, 0xfe, 0x3c, 0x3f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n",
- " 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4a, 0x17, 0x9a, 0xbe,\n",
- " 0x41, 0xcb, 0xb6, 0xbe, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n",
- " 0x13, 0xd6, 0x1e, 0x3e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n",
- " 0x5a, 0xfb, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00,\n",
- " 0x4b, 0x98, 0xdd, 0xbd, 0x40, 0x6b, 0xcb, 0xbe, 0x36, 0x0c, 0xd4, 0x3c,\n",
- " 0xbd, 0x44, 0xb5, 0x3e, 0x95, 0x70, 0xe3, 0x3e, 0xe7, 0xac, 0x86, 0x3e,\n",
- " 0x00, 0xc4, 0x4e, 0x3d, 0x7e, 0xa6, 0x1d, 0x3e, 0xbd, 0x87, 0xbb, 0x3e,\n",
- " 0xb4, 0xb8, 0x09, 0xbf, 0xa1, 0x1f, 0xf8, 0xbe, 0x8d, 0x90, 0xdd, 0x3e,\n",
- " 0xde, 0xfa, 0x6f, 0xbe, 0xb2, 0x75, 0xe4, 0x3d, 0x6e, 0xfe, 0x36, 0x3e,\n",
- " 0x20, 0x18, 0xc2, 0xbe, 0x39, 0xc7, 0xfb, 0xbe, 0xfe, 0xa4, 0x30, 0xbe,\n",
- " 0xf7, 0x91, 0xde, 0xbe, 0xde, 0xab, 0x24, 0x3e, 0xfb, 0xbb, 0xce, 0x3e,\n",
- " 0xeb, 0x23, 0x80, 0xbe, 0x7b, 0x58, 0x73, 0xbe, 0x9a, 0x2e, 0x03, 0x3e,\n",
- " 0x10, 0x42, 0xa9, 0xbc, 0x10, 0x12, 0x64, 0xbd, 0xe3, 0x8d, 0x0c, 0x3d,\n",
- " 0x9e, 0x48, 0x97, 0xbe, 0x34, 0x51, 0xd4, 0xbe, 0x02, 0x3b, 0x0d, 0x3e,\n",
- " 0x62, 0x67, 0x89, 0xbe, 0x74, 0xdf, 0xa2, 0x3d, 0xf3, 0x25, 0xb3, 0xbe,\n",
- " 0xef, 0x34, 0x7b, 0x3d, 0x61, 0x70, 0xe3, 0x3d, 0xba, 0x76, 0xc0, 0xbe,\n",
- " 0x7d, 0xe9, 0xa7, 0x3e, 0xc3, 0xab, 0xd0, 0xbe, 0xcf, 0x7c, 0xdb, 0xbe,\n",
- " 0x70, 0x27, 0x9a, 0xbe, 0x98, 0xf5, 0x3c, 0xbd, 0xff, 0x4b, 0x4b, 0x3e,\n",
- " 0x7e, 0xa0, 0xf8, 0xbd, 0xd4, 0x6e, 0x86, 0x3d, 0x00, 0x4a, 0x07, 0x3a,\n",
- " 0x4c, 0x24, 0x61, 0xbe, 0x54, 0x68, 0xf7, 0xbd, 0x02, 0x3f, 0x77, 0xbe,\n",
- " 0x23, 0x79, 0xb3, 0x3e, 0x1c, 0x83, 0xad, 0xbd, 0xc8, 0x92, 0x8d, 0x3e,\n",
- " 0xa8, 0xf3, 0x15, 0xbd, 0xe6, 0x4d, 0x6c, 0x3d, 0xac, 0xe7, 0x98, 0xbe,\n",
- " 0x81, 0xec, 0xbd, 0x3e, 0xe2, 0x55, 0x73, 0x3e, 0xc1, 0x77, 0xc7, 0x3e,\n",
- " 0x6e, 0x1b, 0x5e, 0x3d, 0x27, 0x78, 0x02, 0x3f, 0xd4, 0x21, 0x90, 0x3d,\n",
- " 0x52, 0xdc, 0x1f, 0x3e, 0xbf, 0xda, 0x88, 0x3e, 0x80, 0x79, 0xe3, 0xbd,\n",
- " 0x40, 0x6f, 0x10, 0xbe, 0x20, 0x43, 0x2e, 0xbd, 0xf0, 0x76, 0xc5, 0xbd,\n",
- " 0xcc, 0xa0, 0x04, 0xbe, 0xf0, 0x69, 0xd7, 0xbe, 0xb1, 0xfe, 0x64, 0xbe,\n",
- " 0x20, 0x41, 0x84, 0xbe, 0xb2, 0xc3, 0x26, 0xbe, 0xd8, 0xf4, 0x09, 0xbe,\n",
- " 0x64, 0x44, 0xd1, 0x3d, 0xd5, 0xe1, 0xc8, 0xbe, 0x35, 0xbc, 0x3f, 0xbe,\n",
- " 0xc0, 0x94, 0x82, 0x3d, 0xdc, 0x2b, 0xb1, 0xbd, 0x02, 0xdb, 0xbf, 0xbe,\n",
- " 0xa5, 0x7f, 0x8a, 0x3e, 0x21, 0xb4, 0xa2, 0x3e, 0xcd, 0x86, 0x56, 0xbf,\n",
- " 0x9c, 0x3b, 0x76, 0xbc, 0x85, 0x6d, 0x60, 0xbf, 0x86, 0x00, 0x3c, 0xbe,\n",
- " 0xc1, 0x23, 0x7e, 0x3e, 0x96, 0xcd, 0x3f, 0x3e, 0x86, 0x91, 0x2d, 0x3e,\n",
- " 0x55, 0xef, 0x87, 0x3e, 0x7e, 0x97, 0x03, 0xbe, 0x2a, 0xcd, 0x01, 0x3e,\n",
- " 0x32, 0xc9, 0x8e, 0xbe, 0x72, 0x77, 0x3b, 0xbe, 0xe0, 0xa1, 0xbc, 0xbe,\n",
- " 0x8d, 0xb7, 0xa7, 0x3e, 0x1c, 0x05, 0x95, 0xbe, 0xf7, 0x1f, 0xbb, 0x3e,\n",
- " 0xc9, 0x3e, 0xd6, 0x3e, 0x80, 0x42, 0xe9, 0xbd, 0x27, 0x0c, 0xd2, 0xbe,\n",
- " 0x5c, 0x32, 0x34, 0xbe, 0x14, 0xcb, 0xca, 0xbd, 0xdd, 0x3a, 0x67, 0xbe,\n",
- " 0x1c, 0xbb, 0x8d, 0xbe, 0x91, 0xac, 0x5c, 0xbe, 0x52, 0x40, 0x6f, 0xbe,\n",
- " 0xd7, 0x71, 0x94, 0x3e, 0x18, 0x71, 0x09, 0xbe, 0x9b, 0x29, 0xd9, 0xbe,\n",
- " 0x7d, 0x66, 0xd2, 0xbe, 0x98, 0xd6, 0xb2, 0xbe, 0x00, 0xc9, 0x84, 0x3a,\n",
- " 0xbc, 0xda, 0xc2, 0xbd, 0x1d, 0xc2, 0x1b, 0xbf, 0xd4, 0xdd, 0x92, 0x3e,\n",
- " 0x07, 0x87, 0x6c, 0xbe, 0x40, 0xc2, 0x3b, 0xbe, 0xbd, 0xe2, 0x9c, 0x3e,\n",
- " 0x0a, 0xb5, 0xa0, 0xbe, 0xe2, 0xd5, 0x9c, 0xbe, 0x3e, 0xbb, 0x7c, 0x3e,\n",
- " 0x17, 0xb4, 0xcf, 0x3e, 0xd5, 0x8e, 0xc8, 0xbe, 0x7c, 0xf9, 0x5c, 0x3e,\n",
- " 0x80, 0xfc, 0x0d, 0x3d, 0xc5, 0xd5, 0x8b, 0x3e, 0xf5, 0x17, 0xa2, 0x3e,\n",
- " 0xc7, 0x60, 0x89, 0xbe, 0xec, 0x95, 0x87, 0x3d, 0x7a, 0xc2, 0x5d, 0xbf,\n",
- " 0x77, 0x94, 0x98, 0x3e, 0x77, 0x39, 0x07, 0xbc, 0x42, 0x29, 0x00, 0x3e,\n",
- " 0xaf, 0xd0, 0xa9, 0x3e, 0x31, 0x23, 0xc4, 0xbe, 0x95, 0x36, 0x5b, 0xbe,\n",
- " 0xc7, 0xdc, 0x83, 0xbe, 0x1e, 0x6b, 0x47, 0x3e, 0x5b, 0x24, 0x99, 0x3e,\n",
- " 0x99, 0x27, 0x54, 0x3e, 0xc8, 0x20, 0xdd, 0xbd, 0x5a, 0x86, 0x2f, 0x3e,\n",
- " 0x80, 0xf0, 0x69, 0xbe, 0x44, 0xfc, 0x84, 0xbd, 0x82, 0xa0, 0x2a, 0xbe,\n",
- " 0x87, 0xe6, 0x2a, 0x3e, 0xd8, 0x34, 0xae, 0x3d, 0x50, 0xbd, 0xb5, 0x3e,\n",
- " 0xc4, 0x8c, 0x88, 0xbe, 0xe3, 0xbc, 0xa5, 0x3e, 0xa9, 0xda, 0x9e, 0x3e,\n",
- " 0x3e, 0xb8, 0x23, 0xbe, 0x80, 0x90, 0x15, 0x3d, 0x97, 0x3f, 0xc3, 0x3e,\n",
- " 0xca, 0x5c, 0x9d, 0x3e, 0x21, 0xe8, 0xe1, 0x3e, 0xc0, 0x49, 0x01, 0xbc,\n",
- " 0x00, 0x0b, 0x88, 0xbd, 0x3f, 0xf7, 0xca, 0x3c, 0xfb, 0x5a, 0xb1, 0x3e,\n",
- " 0x60, 0xd2, 0x0d, 0x3c, 0xce, 0x23, 0x78, 0xbf, 0x8f, 0x4f, 0xb9, 0xbe,\n",
- " 0x69, 0x6a, 0x34, 0xbf, 0x4b, 0x5e, 0xa9, 0x3e, 0x64, 0x8c, 0xd9, 0x3e,\n",
- " 0x52, 0x77, 0x36, 0x3e, 0xeb, 0xaf, 0xbe, 0x3e, 0x40, 0xbe, 0x36, 0x3c,\n",
- " 0x08, 0x65, 0x3b, 0xbd, 0x55, 0xe0, 0x66, 0xbd, 0xd2, 0xe8, 0x9b, 0xbe,\n",
- " 0x86, 0xe3, 0x09, 0xbe, 0x93, 0x3d, 0xdd, 0x3e, 0x0f, 0x66, 0x18, 0x3f,\n",
- " 0x18, 0x05, 0x33, 0xbd, 0xde, 0x15, 0xd7, 0xbe, 0xaa, 0xcf, 0x49, 0xbe,\n",
- " 0xa2, 0xa5, 0x64, 0x3e, 0xe6, 0x9c, 0x42, 0xbe, 0x54, 0x42, 0xcc, 0x3d,\n",
- " 0xa0, 0xbd, 0x9d, 0xbe, 0xc2, 0x69, 0x48, 0x3e, 0x5b, 0x8b, 0xa2, 0xbe,\n",
- " 0xc0, 0x13, 0x87, 0x3d, 0x36, 0xfd, 0x69, 0x3e, 0x05, 0x86, 0x40, 0xbe,\n",
- " 0x1e, 0x7a, 0xce, 0xbe, 0x46, 0x13, 0xa7, 0xbe, 0x68, 0x52, 0x86, 0xbe,\n",
- " 0x04, 0x9e, 0x86, 0xbd, 0x8c, 0x54, 0xc1, 0x3d, 0xe0, 0x3b, 0xad, 0x3c,\n",
- " 0x42, 0x67, 0x85, 0xbd, 0xea, 0x97, 0x42, 0x3e, 0x6e, 0x13, 0x3b, 0xbf,\n",
- " 0x56, 0x5b, 0x16, 0x3e, 0xaa, 0xab, 0xdf, 0x3e, 0xc8, 0x41, 0x36, 0x3d,\n",
- " 0x24, 0x2d, 0x47, 0xbe, 0x77, 0xa5, 0xae, 0x3e, 0xc0, 0xc2, 0x5b, 0x3c,\n",
- " 0xac, 0xac, 0x4e, 0x3e, 0x99, 0xec, 0x13, 0xbe, 0xf2, 0xab, 0x73, 0x3e,\n",
- " 0xaa, 0xa1, 0x48, 0xbe, 0xe8, 0xd3, 0x01, 0xbe, 0x60, 0xb7, 0xc7, 0xbd,\n",
- " 0x64, 0x72, 0xd3, 0x3d, 0x83, 0xd3, 0x99, 0x3e, 0x0c, 0x76, 0x34, 0xbe,\n",
- " 0x42, 0xda, 0x0d, 0x3e, 0xfb, 0x47, 0x9a, 0x3e, 0x8b, 0xdc, 0x92, 0xbe,\n",
- " 0x56, 0x7f, 0x6b, 0x3e, 0x04, 0xd4, 0x88, 0xbd, 0x11, 0x9e, 0x80, 0x3e,\n",
- " 0x3c, 0x89, 0xff, 0x3d, 0xb3, 0x3e, 0x88, 0x3e, 0xf7, 0xf0, 0x88, 0x3e,\n",
- " 0x28, 0xfb, 0xc9, 0xbe, 0x53, 0x3e, 0xcf, 0x3e, 0xac, 0x75, 0xdc, 0xbe,\n",
- " 0xdd, 0xca, 0xd7, 0x3e, 0x01, 0x58, 0xa7, 0x3e, 0x29, 0xb8, 0x13, 0xbf,\n",
- " 0x76, 0x81, 0x12, 0xbc, 0x28, 0x8b, 0x16, 0xbf, 0x0e, 0xec, 0x0e, 0x3e,\n",
- " 0x40, 0x0a, 0xdb, 0xbd, 0x98, 0xec, 0xbf, 0xbd, 0x32, 0x55, 0x0c, 0xbe,\n",
- " 0xfb, 0xf9, 0xc9, 0x3e, 0x83, 0x4a, 0x6d, 0xbe, 0x76, 0x59, 0xe2, 0xbe,\n",
- " 0x54, 0x7d, 0x9f, 0xbb, 0x9d, 0xe8, 0x95, 0x3e, 0x5c, 0xd3, 0xd0, 0x3d,\n",
- " 0x19, 0x8a, 0xb0, 0x3e, 0xde, 0x6f, 0x2e, 0xbe, 0xd0, 0x16, 0x83, 0x3d,\n",
- " 0x9c, 0x7d, 0x11, 0xbf, 0x2b, 0xcc, 0x25, 0x3c, 0x2a, 0xa5, 0x27, 0xbe,\n",
- " 0x22, 0x14, 0xc7, 0xbe, 0x5e, 0x7a, 0xac, 0x3e, 0x4e, 0x41, 0x94, 0xbe,\n",
- " 0x5a, 0x68, 0x7b, 0x3e, 0x86, 0xfd, 0x4e, 0x3e, 0xa2, 0x56, 0x6a, 0xbe,\n",
- " 0xca, 0xfe, 0x81, 0xbe, 0x43, 0xc3, 0xb1, 0xbd, 0xc5, 0xb8, 0xa7, 0x3e,\n",
- " 0x55, 0x23, 0xcd, 0x3e, 0xaf, 0x2e, 0x76, 0x3e, 0x69, 0xa8, 0x90, 0xbe,\n",
- " 0x0d, 0xba, 0xb9, 0x3e, 0x66, 0xff, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00,\n",
- " 0x40, 0x00, 0x00, 0x00, 0x53, 0xd6, 0xe2, 0x3d, 0x66, 0xb6, 0xcc, 0x3e,\n",
- " 0x03, 0xe7, 0xf6, 0x3e, 0xe0, 0x28, 0x10, 0xbf, 0x00, 0x00, 0x00, 0x00,\n",
- " 0x3e, 0x3d, 0xb0, 0x3e, 0x00, 0x00, 0x00, 0x00, 0x62, 0xf0, 0x77, 0x3e,\n",
- " 0xa6, 0x9d, 0xa4, 0x3e, 0x3a, 0x4b, 0xf3, 0xbe, 0x71, 0x9e, 0xa7, 0x3e,\n",
- " 0x00, 0x00, 0x00, 0x00, 0x34, 0x39, 0xa2, 0x3e, 0x00, 0x00, 0x00, 0x00,\n",
- " 0xcc, 0x9c, 0x4a, 0x3e, 0xab, 0x40, 0xa3, 0x3e, 0xb2, 0xff, 0xff, 0xff,\n",
- " 0x04, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0xb3, 0x71, 0x67, 0x3f,\n",
- " 0x9a, 0x7a, 0x95, 0xbf, 0xe1, 0x48, 0xe8, 0xbe, 0x8a, 0x72, 0x96, 0x3e,\n",
- " 0x00, 0xd2, 0xd3, 0xbb, 0x1a, 0xc5, 0xd7, 0x3f, 0xac, 0x7e, 0xc8, 0xbe,\n",
- " 0x90, 0xa7, 0x95, 0xbe, 0x3b, 0xd7, 0xdc, 0xbe, 0x41, 0xa8, 0x16, 0x3f,\n",
- " 0x50, 0x5b, 0xcb, 0x3f, 0x52, 0xb9, 0xed, 0xbe, 0x2e, 0xa7, 0xc6, 0xbe,\n",
- " 0xaf, 0x0f, 0x14, 0xbf, 0xb3, 0xda, 0x59, 0x3f, 0x02, 0xec, 0xd7, 0xbe,\n",
+ " 0x1c, 0x00, 0x00, 0x00, 0x54, 0x46, 0x4c, 0x33, 0x00, 0x00, 0x12, 0x00,\n",
+ " 0x1c, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x14, 0x00,\n",
+ " 0x00, 0x00, 0x18, 0x00, 0x12, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,\n",
+ " 0x60, 0x09, 0x00, 0x00, 0xa8, 0x02, 0x00, 0x00, 0x90, 0x02, 0x00, 0x00,\n",
+ " 0x3c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x0c, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x04, 0x00, 0x08, 0x00,\n",
+ " 0x08, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00,\n",
+ " 0x13, 0x00, 0x00, 0x00, 0x6d, 0x69, 0x6e, 0x5f, 0x72, 0x75, 0x6e, 0x74,\n",
+ " 0x69, 0x6d, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x00,\n",
+ " 0x0c, 0x00, 0x00, 0x00, 0x48, 0x02, 0x00, 0x00, 0x34, 0x02, 0x00, 0x00,\n",
+ " 0x0c, 0x02, 0x00, 0x00, 0xfc, 0x00, 0x00, 0x00, 0xac, 0x00, 0x00, 0x00,\n",
+ " 0x8c, 0x00, 0x00, 0x00, 0x3c, 0x00, 0x00, 0x00, 0x34, 0x00, 0x00, 0x00,\n",
+ " 0x2c, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00,\n",
+ " 0x04, 0x00, 0x00, 0x00, 0xfe, 0xfd, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00,\n",
+ " 0x05, 0x00, 0x00, 0x00, 0x31, 0x2e, 0x35, 0x2e, 0x30, 0x00, 0x00, 0x00,\n",
+ " 0x7c, 0xfd, 0xff, 0xff, 0x80, 0xfd, 0xff, 0xff, 0x84, 0xfd, 0xff, 0xff,\n",
+ " 0x88, 0xfd, 0xff, 0xff, 0x22, 0xfe, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00,\n",
+ " 0x40, 0x00, 0x00, 0x00, 0xfd, 0x13, 0x00, 0x00, 0xd2, 0x0e, 0x00, 0x00,\n",
+ " 0x5e, 0x0e, 0x00, 0x00, 0x28, 0xfe, 0xff, 0xff, 0x30, 0x0e, 0x00, 0x00,\n",
+ " 0x61, 0xe9, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0xcd, 0x0a, 0x00, 0x00,\n",
+ " 0x0f, 0x13, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xaa, 0x0b, 0x00, 0x00,\n",
+ " 0x44, 0x0d, 0x00, 0x00, 0x2c, 0x0c, 0x00, 0x00, 0x91, 0xf0, 0xff, 0xff,\n",
+ " 0xb6, 0xef, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x6e, 0xfe, 0xff, 0xff,\n",
+ " 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x19, 0x40, 0x4a, 0x52,\n",
+ " 0xb5, 0x95, 0xa8, 0xd3, 0x6a, 0x7f, 0x7a, 0x2a, 0xdd, 0x46, 0xe4, 0xd5,\n",
+ " 0x8a, 0xfe, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00,\n",
+ " 0x15, 0xfa, 0xff, 0xff, 0x2f, 0xe2, 0xff, 0xff, 0x04, 0xe6, 0xff, 0xff,\n",
+ " 0x2c, 0xee, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n",
+ " 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xb7, 0xda, 0xff, 0xff,\n",
+ " 0xe4, 0x06, 0x00, 0x00, 0x86, 0xf2, 0xff, 0xff, 0x2e, 0xee, 0xff, 0xff,\n",
+ " 0x00, 0x00, 0x00, 0x00, 0x70, 0x20, 0x00, 0x00, 0xbd, 0x04, 0x00, 0x00,\n",
+ " 0x00, 0x00, 0x00, 0x00, 0xd6, 0xfe, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00,\n",
+ " 0x00, 0x01, 0x00, 0x00, 0x14, 0xc2, 0x10, 0xf6, 0xf7, 0xe0, 0xde, 0xce,\n",
+ " 0xee, 0x96, 0xb2, 0x2d, 0x34, 0x3b, 0x4b, 0x1b, 0xfd, 0x81, 0xd6, 0x0a,\n",
+ " 0x15, 0xca, 0x10, 0xc8, 0xee, 0xff, 0xc7, 0xf9, 0x1e, 0x40, 0xe3, 0xec,\n",
+ " 0x14, 0xac, 0xc7, 0xc7, 0x21, 0x3c, 0xf4, 0xf8, 0xe3, 0x2c, 0xc2, 0xff,\n",
+ " 0xdb, 0x3d, 0x2f, 0x39, 0x1d, 0xf2, 0x2e, 0x01, 0xdb, 0x13, 0x35, 0xe9,\n",
+ " 0xd8, 0xcf, 0x24, 0xda, 0xf4, 0xf7, 0x0b, 0xdc, 0x29, 0xcb, 0xc5, 0x12,\n",
+ " 0x02, 0xcc, 0x22, 0x2d, 0xbf, 0x0e, 0x36, 0x10, 0xf8, 0x35, 0x46, 0x0d,\n",
+ " 0x1c, 0x47, 0x35, 0xda, 0xd8, 0xfc, 0xcc, 0x15, 0x41, 0xe5, 0x36, 0x35,\n",
+ " 0x3b, 0xc8, 0xfd, 0xda, 0xcf, 0x15, 0xe4, 0xc5, 0x00, 0xd6, 0xce, 0xe3,\n",
+ " 0x03, 0x1b, 0xe2, 0x03, 0xc8, 0xde, 0xc6, 0xf2, 0xe6, 0xee, 0xe9, 0xbb,\n",
+ " 0x1b, 0xee, 0x21, 0x07, 0x0b, 0x07, 0x29, 0x3d, 0x13, 0xff, 0xf1, 0x2c,\n",
+ " 0x1b, 0xcc, 0x1b, 0x10, 0x21, 0xd6, 0x10, 0xf9, 0x0b, 0x89, 0xce, 0xc7,\n",
+ " 0xf4, 0x09, 0x3c, 0xe4, 0x21, 0xd1, 0x0d, 0x07, 0xd4, 0xec, 0x09, 0xea,\n",
+ " 0xdf, 0xe6, 0xe7, 0x33, 0xd3, 0xdd, 0xd8, 0xee, 0xea, 0xc2, 0xde, 0xf5,\n",
+ " 0x2c, 0x0d, 0xfc, 0xd2, 0xdd, 0x24, 0x27, 0x0c, 0xea, 0x0e, 0xf2, 0x2d,\n",
+ " 0x18, 0xc2, 0xe5, 0xb4, 0xdd, 0x15, 0xc4, 0x2e, 0xae, 0xe3, 0x20, 0x21,\n",
+ " 0xf3, 0x2d, 0x02, 0xfb, 0x19, 0xb1, 0xf3, 0xcd, 0x1a, 0xf1, 0x2f, 0x22,\n",
+ " 0x10, 0x05, 0x1e, 0xdf, 0xed, 0x3c, 0x24, 0xd6, 0xfb, 0x54, 0x43, 0x0d,\n",
+ " 0xd2, 0x10, 0x00, 0xdd, 0x00, 0x26, 0x02, 0x01, 0xf6, 0xc4, 0xc8, 0xcd,\n",
+ " 0x19, 0x21, 0x1e, 0x35, 0x3b, 0x1a, 0x27, 0xd1, 0xfc, 0x05, 0x0e, 0x11,\n",
+ " 0x06, 0xf8, 0xdf, 0x38, 0x27, 0xfe, 0x26, 0xd5, 0x13, 0xec, 0x39, 0x1d,\n",
+ " 0xcb, 0xc5, 0xd2, 0xd9, 0x0e, 0xe0, 0xdd, 0x09, 0xe2, 0xff, 0xff, 0xff,\n",
+ " 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xaf, 0x4f, 0x56, 0x1e,\n",
+ " 0xe8, 0x7f, 0xe0, 0xef, 0xc9, 0xdd, 0xe8, 0x42, 0xf7, 0x24, 0x1f, 0xdc,\n",
" 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00,\n",
- " 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x66, 0x11, 0x1f, 0xbf,\n",
- " 0xb8, 0xfb, 0xff, 0xff, 0x0f, 0x00, 0x00, 0x00, 0x54, 0x4f, 0x43, 0x4f,\n",
+ " 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xb1, 0xe9, 0xff, 0xff,\n",
+ " 0x80, 0xff, 0xff, 0xff, 0x0f, 0x00, 0x00, 0x00, 0x54, 0x4f, 0x43, 0x4f,\n",
" 0x20, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x00,\n",
- " 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x14, 0x00,\n",
- " 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x0c, 0x00, 0x00, 0x00,\n",
- " 0xf0, 0x00, 0x00, 0x00, 0xe4, 0x00, 0x00, 0x00, 0xd8, 0x00, 0x00, 0x00,\n",
- " 0x04, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x90, 0x00, 0x00, 0x00,\n",
- " 0x48, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xce, 0xff, 0xff, 0xff,\n",
- " 0x00, 0x00, 0x00, 0x08, 0x18, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00,\n",
- " 0x04, 0x00, 0x00, 0x00, 0x1c, 0xfc, 0xff, 0xff, 0x01, 0x00, 0x00, 0x00,\n",
- " 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00,\n",
- " 0x08, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00,\n",
- " 0x14, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x07, 0x00, 0x10, 0x00,\n",
- " 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x1c, 0x00, 0x00, 0x00,\n",
- " 0x10, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xba, 0xff, 0xff, 0xff,\n",
- " 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00,\n",
- " 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00,\n",
- " 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x16, 0x00, 0x00, 0x00,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xbc, 0xf9, 0xff, 0xff,\n",
+ " 0x48, 0x01, 0x00, 0x00, 0x3c, 0x01, 0x00, 0x00, 0x30, 0x01, 0x00, 0x00,\n",
+ " 0x04, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x04, 0x01, 0x00, 0x00,\n",
+ " 0xb8, 0x00, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00,\n",
+ " 0x04, 0x00, 0x00, 0x00, 0x1a, 0xff, 0xff, 0xff, 0x02, 0x00, 0x00, 0x00,\n",
+ " 0x10, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x0b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n",
+ " 0xca, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x08, 0x1c, 0x00, 0x00, 0x00,\n",
+ " 0x10, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x04, 0x00,\n",
+ " 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n",
+ " 0x03, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00,\n",
+ " 0x09, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x14, 0x00, 0x00, 0x00,\n",
" 0x08, 0x00, 0x0c, 0x00, 0x07, 0x00, 0x10, 0x00, 0x0e, 0x00, 0x00, 0x00,\n",
- " 0x00, 0x00, 0x00, 0x08, 0x24, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00,\n",
- " 0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x07, 0x00,\n",
- " 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00,\n",
- " 0x04, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
- " 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
- " 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
- " 0x0a, 0x00, 0x00, 0x00, 0x10, 0x03, 0x00, 0x00, 0xa4, 0x02, 0x00, 0x00,\n",
- " 0x40, 0x02, 0x00, 0x00, 0xf4, 0x01, 0x00, 0x00, 0xac, 0x01, 0x00, 0x00,\n",
- " 0x48, 0x01, 0x00, 0x00, 0xfc, 0x00, 0x00, 0x00, 0xb4, 0x00, 0x00, 0x00,\n",
- " 0x50, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x26, 0xfd, 0xff, 0xff,\n",
- " 0x3c, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00,\n",
- " 0x04, 0x00, 0x00, 0x00, 0x18, 0xfd, 0xff, 0xff, 0x20, 0x00, 0x00, 0x00,\n",
- " 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31,\n",
- " 0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x34, 0x2f, 0x4d, 0x61, 0x74,\n",
- " 0x4d, 0x75, 0x6c, 0x5f, 0x62, 0x69, 0x61, 0x73, 0x00, 0x00, 0x00, 0x00,\n",
- " 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x6e, 0xfd, 0xff, 0xff,\n",
- " 0x50, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00,\n",
- " 0x04, 0x00, 0x00, 0x00, 0x60, 0xfd, 0xff, 0xff, 0x34, 0x00, 0x00, 0x00,\n",
- " 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31,\n",
- " 0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x34, 0x2f, 0x4d, 0x61, 0x74,\n",
- " 0x4d, 0x75, 0x6c, 0x2f, 0x52, 0x65, 0x61, 0x64, 0x56, 0x61, 0x72, 0x69,\n",
- " 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x70, 0x2f, 0x74, 0x72, 0x61, 0x6e, 0x73,\n",
- " 0x70, 0x6f, 0x73, 0x65, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,\n",
- " 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xce, 0xfd, 0xff, 0xff,\n",
- " 0x34, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00,\n",
- " 0x04, 0x00, 0x00, 0x00, 0xc0, 0xfd, 0xff, 0xff, 0x19, 0x00, 0x00, 0x00,\n",
- " 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31,\n",
- " 0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x33, 0x2f, 0x52, 0x65, 0x6c,\n",
- " 0x75, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
- " 0x10, 0x00, 0x00, 0x00, 0x12, 0xfe, 0xff, 0xff, 0x3c, 0x00, 0x00, 0x00,\n",
- " 0x03, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,\n",
- " 0x04, 0xfe, 0xff, 0xff, 0x20, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75,\n",
+ " 0x00, 0x00, 0x00, 0x08, 0x1c, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,\n",
+ " 0x04, 0x00, 0x00, 0x00, 0xba, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x01,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,\n",
+ " 0x04, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00,\n",
+ " 0x00, 0x00, 0x0e, 0x00, 0x16, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0c, 0x00,\n",
+ " 0x07, 0x00, 0x10, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08,\n",
+ " 0x24, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00,\n",
+ " 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x07, 0x00, 0x06, 0x00, 0x00, 0x00,\n",
+ " 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,\n",
+ " 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,\n",
+ " 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x10, 0x00, 0x04, 0x00,\n",
+ " 0x08, 0x00, 0x0c, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x10, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x0a, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0xdc, 0x04, 0x00, 0x00,\n",
+ " 0x54, 0x04, 0x00, 0x00, 0xc4, 0x03, 0x00, 0x00, 0x54, 0x03, 0x00, 0x00,\n",
+ " 0xd0, 0x02, 0x00, 0x00, 0x4c, 0x02, 0x00, 0x00, 0xe0, 0x01, 0x00, 0x00,\n",
+ " 0x5c, 0x01, 0x00, 0x00, 0xd8, 0x00, 0x00, 0x00, 0x6c, 0x00, 0x00, 0x00,\n",
+ " 0x3c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xd8, 0xff, 0xff, 0xff,\n",
+ " 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00,\n",
+ " 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x00, 0x00, 0x00, 0x00,\n",
+ " 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x0c, 0x00, 0x0c, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00,\n",
+ " 0x0c, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,\n",
+ " 0x0d, 0x00, 0x00, 0x00, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x32, 0x5f,\n",
+ " 0x69, 0x6e, 0x70, 0x75, 0x74, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xc2, 0xfb, 0xff, 0xff,\n",
+ " 0x00, 0x00, 0x00, 0x02, 0x58, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x28, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xc4, 0xfc, 0xff, 0xff,\n",
+ " 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x68, 0xf6, 0x91, 0x38, 0x20, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75,\n",
" 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e,\n",
- " 0x73, 0x65, 0x5f, 0x33, 0x2f, 0x4d, 0x61, 0x74, 0x4d, 0x75, 0x6c, 0x5f,\n",
+ " 0x73, 0x65, 0x5f, 0x34, 0x2f, 0x4d, 0x61, 0x74, 0x4d, 0x75, 0x6c, 0x5f,\n",
" 0x62, 0x69, 0x61, 0x73, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
- " 0x10, 0x00, 0x00, 0x00, 0x5a, 0xfe, 0xff, 0xff, 0x50, 0x00, 0x00, 0x00,\n",
- " 0x04, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,\n",
- " 0x4c, 0xfe, 0xff, 0xff, 0x34, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75,\n",
- " 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e,\n",
- " 0x73, 0x65, 0x5f, 0x33, 0x2f, 0x4d, 0x61, 0x74, 0x4d, 0x75, 0x6c, 0x2f,\n",
- " 0x52, 0x65, 0x61, 0x64, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65,\n",
- " 0x4f, 0x70, 0x2f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f, 0x73, 0x65,\n",
- " 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,\n",
- " 0x10, 0x00, 0x00, 0x00, 0xba, 0xfe, 0xff, 0xff, 0x34, 0x00, 0x00, 0x00,\n",
- " 0x0a, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,\n",
- " 0xac, 0xfe, 0xff, 0xff, 0x19, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75,\n",
- " 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e,\n",
- " 0x73, 0x65, 0x5f, 0x32, 0x2f, 0x52, 0x65, 0x6c, 0x75, 0x00, 0x00, 0x00,\n",
- " 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,\n",
- " 0xfe, 0xfe, 0xff, 0xff, 0x3c, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00,\n",
- " 0x0c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xf0, 0xfe, 0xff, 0xff,\n",
- " 0x20, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69,\n",
- " 0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x32,\n",
- " 0x2f, 0x4d, 0x61, 0x74, 0x4d, 0x75, 0x6c, 0x5f, 0x62, 0x69, 0x61, 0x73,\n",
- " 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,\n",
- " 0x46, 0xff, 0xff, 0xff, 0x50, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00,\n",
- " 0x0c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x38, 0xff, 0xff, 0xff,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0x2a, 0xfc, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09,\n",
+ " 0x6c, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00,\n",
+ " 0x04, 0x00, 0x00, 0x00, 0x2c, 0xfd, 0xff, 0xff, 0x14, 0x00, 0x00, 0x00,\n",
+ " 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n",
+ " 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x35, 0xfc, 0x4c, 0x3c,\n",
" 0x34, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69,\n",
- " 0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x32,\n",
+ " 0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x34,\n",
" 0x2f, 0x4d, 0x61, 0x74, 0x4d, 0x75, 0x6c, 0x2f, 0x52, 0x65, 0x61, 0x64,\n",
" 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x70, 0x2f, 0x74,\n",
" 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f, 0x73, 0x65, 0x00, 0x00, 0x00, 0x00,\n",
- " 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
- " 0xa6, 0xff, 0xff, 0xff, 0x48, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00,\n",
- " 0x2c, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0c, 0x00,\n",
- " 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,\n",
- " 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7f, 0x43,\n",
- " 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00,\n",
- " 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x32, 0x5f, 0x69, 0x6e, 0x70, 0x75,\n",
- " 0x74, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
- " 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x14, 0x00, 0x04, 0x00,\n",
- " 0x00, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x0e, 0x00, 0x00, 0x00,\n",
- " 0x28, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,\n",
- " 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00, 0x00,\n",
- " 0x08, 0x00, 0x00, 0x00, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79,\n",
- " 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
- " 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,\n",
- " 0x00, 0x00, 0x0a, 0x00, 0x0c, 0x00, 0x07, 0x00, 0x00, 0x00, 0x08, 0x00,\n",
- " 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x03, 0x00, 0x00, 0x00\n",
+ " 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,\n",
+ " 0xaa, 0xfc, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09, 0x6c, 0x00, 0x00, 0x00,\n",
+ " 0x09, 0x00, 0x00, 0x00, 0x44, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,\n",
+ " 0x9c, 0xfc, 0xff, 0xff, 0x30, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00,\n",
+ " 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x80, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0xd0, 0x49, 0xb6, 0x3b, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x86, 0x93, 0xb5, 0x3f, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n",
+ " 0x19, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69,\n",
+ " 0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x33,\n",
+ " 0x2f, 0x52, 0x65, 0x6c, 0x75, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x2a, 0xfd, 0xff, 0xff,\n",
+ " 0x00, 0x00, 0x00, 0x02, 0x58, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00,\n",
+ " 0x28, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x2c, 0xfe, 0xff, 0xff,\n",
+ " 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x07, 0xcc, 0xb7, 0x38, 0x20, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75,\n",
+ " 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e,\n",
+ " 0x73, 0x65, 0x5f, 0x33, 0x2f, 0x4d, 0x61, 0x74, 0x4d, 0x75, 0x6c, 0x5f,\n",
+ " 0x62, 0x69, 0x61, 0x73, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x10, 0x00, 0x00, 0x00, 0x92, 0xfd, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09,\n",
+ " 0x6c, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00,\n",
+ " 0x04, 0x00, 0x00, 0x00, 0x94, 0xfe, 0xff, 0xff, 0x14, 0x00, 0x00, 0x00,\n",
+ " 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n",
+ " 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xa9, 0x9f, 0xea, 0x3b,\n",
+ " 0x34, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69,\n",
+ " 0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x33,\n",
+ " 0x2f, 0x4d, 0x61, 0x74, 0x4d, 0x75, 0x6c, 0x2f, 0x52, 0x65, 0x61, 0x64,\n",
+ " 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x70, 0x2f, 0x74,\n",
+ " 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f, 0x73, 0x65, 0x00, 0x00, 0x00, 0x00,\n",
+ " 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,\n",
+ " 0x12, 0xfe, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09, 0x6c, 0x00, 0x00, 0x00,\n",
+ " 0x07, 0x00, 0x00, 0x00, 0x44, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,\n",
+ " 0x04, 0xfe, 0xff, 0xff, 0x30, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00,\n",
+ " 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x80, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0xe4, 0x8a, 0x48, 0x3c, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x59, 0xc2, 0x47, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n",
+ " 0x19, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69,\n",
+ " 0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x32,\n",
+ " 0x2f, 0x52, 0x65, 0x6c, 0x75, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x92, 0xfe, 0xff, 0xff,\n",
+ " 0x00, 0x00, 0x00, 0x02, 0x5c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,\n",
+ " 0x2c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x94, 0xff, 0xff, 0xff,\n",
+ " 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0x11, 0xae, 0xbf, 0x38, 0x20, 0x00, 0x00, 0x00,\n",
+ " 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31,\n",
+ " 0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x32, 0x2f, 0x4d, 0x61, 0x74,\n",
+ " 0x4d, 0x75, 0x6c, 0x5f, 0x62, 0x69, 0x61, 0x73, 0x00, 0x00, 0x00, 0x00,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xfe, 0xfe, 0xff, 0xff,\n",
+ " 0x00, 0x00, 0x00, 0x09, 0x78, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00,\n",
+ " 0x34, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x0c, 0x00,\n",
+ " 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x00, 0x00,\n",
+ " 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x2b, 0x85, 0x73, 0x3b, 0x34, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75,\n",
+ " 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e,\n",
+ " 0x73, 0x65, 0x5f, 0x32, 0x2f, 0x4d, 0x61, 0x74, 0x4d, 0x75, 0x6c, 0x2f,\n",
+ " 0x52, 0x65, 0x61, 0x64, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65,\n",
+ " 0x4f, 0x70, 0x2f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f, 0x73, 0x65,\n",
+ " 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0x8a, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09,\n",
+ " 0x60, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00,\n",
+ " 0x04, 0x00, 0x00, 0x00, 0x7c, 0xff, 0xff, 0xff, 0x2c, 0x00, 0x00, 0x00,\n",
+ " 0x20, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0x80, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0xc9, 0x80, 0xc9, 0x3c, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x48, 0xb7, 0xc8, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n",
+ " 0x12, 0x00, 0x00, 0x00, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x32, 0x5f,\n",
+ " 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x69, 0x6e, 0x74, 0x38, 0x00, 0x00,\n",
+ " 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x00, 0x00, 0x0e, 0x00, 0x18, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0c, 0x00,\n",
+ " 0x10, 0x00, 0x14, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,\n",
+ " 0x6c, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00,\n",
+ " 0x10, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x14, 0x00, 0x04, 0x00, 0x08, 0x00,\n",
+ " 0x0c, 0x00, 0x10, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00,\n",
+ " 0x24, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,\n",
+ " 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xb3, 0x24, 0x01, 0x3c,\n",
+ " 0x01, 0x00, 0x00, 0x00, 0x8e, 0xee, 0x80, 0x3f, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x8e, 0x58, 0x80, 0xbf, 0x0d, 0x00, 0x00, 0x00, 0x49, 0x64, 0x65, 0x6e,\n",
+ " 0x74, 0x69, 0x74, 0x79, 0x5f, 0x69, 0x6e, 0x74, 0x38, 0x00, 0x00, 0x00,\n",
+ " 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,\n",
+ " 0x03, 0x00, 0x00, 0x00, 0x3c, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00,\n",
+ " 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x0e, 0x00, 0x07, 0x00,\n",
+ " 0x00, 0x00, 0x08, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06,\n",
+ " 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x06, 0x00, 0x05, 0x00,\n",
+ " 0x06, 0x00, 0x00, 0x00, 0x00, 0x72, 0x0a, 0x00, 0x0c, 0x00, 0x07, 0x00,\n",
+ " 0x00, 0x00, 0x08, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09,\n",
+ " 0x04, 0x00, 0x00, 0x00\n",
"};\n",
- "unsigned int sine_model_quantized_tflite_len = 2640;\n"
+ "unsigned int sine_model_quantized_tflite_len = 2512;\n"
],
"name": "stdout"
}
diff --git a/tensorflow/lite/experimental/micro/examples/hello_world/disco_f746ng/output_handler.cc b/tensorflow/lite/experimental/micro/examples/hello_world/disco_f746ng/output_handler.cc
index cbfe75a..3f642f8 100644
--- a/tensorflow/lite/experimental/micro/examples/hello_world/disco_f746ng/output_handler.cc
+++ b/tensorflow/lite/experimental/micro/examples/hello_world/disco_f746ng/output_handler.cc
@@ -26,8 +26,6 @@
const uint32_t foreground_color = 0xFFDB4437; // Red
// The size of the dot we'll draw
const int dot_radius = 10;
-// Track whether the function has run at least once
-bool initialized = false;
// Size of the drawable area
int width;
int height;
@@ -39,8 +37,11 @@
// Animates a dot across the screen to represent the current x and y values
void HandleOutput(tflite::ErrorReporter* error_reporter, float x_value,
float y_value) {
+ // Track whether the function has run at least once
+ static bool is_initialized = false;
+
// Do this only once
- if (!initialized) {
+ if (!is_initialized) {
// Set the background and foreground colors
lcd.Clear(background_color);
lcd.SetTextColor(foreground_color);
@@ -51,12 +52,9 @@
midpoint = height / 2;
// Calculate fractional pixels per unit of x_value
x_increment = static_cast<float>(width) / kXrange;
- initialized = true;
+ is_initialized = true;
}
- // Log the current X and Y values
- error_reporter->Report("x_value: %f, y_value: %f\n", x_value, y_value);
-
// Clear the previous drawing
lcd.Clear(background_color);
@@ -77,4 +75,7 @@
// Draw the dot
lcd.FillCircle(x_pos, y_pos, dot_radius);
+
+ // Log the current X and Y values
+ error_reporter->Report("x_value: %f, y_value: %f\n", x_value, y_value);
}
diff --git a/tensorflow/lite/experimental/micro/examples/hello_world/hello_world_test.cc b/tensorflow/lite/experimental/micro/examples/hello_world/hello_world_test.cc
index 508047a..273c301 100644
--- a/tensorflow/lite/experimental/micro/examples/hello_world/hello_world_test.cc
+++ b/tensorflow/lite/experimental/micro/examples/hello_world/hello_world_test.cc
@@ -44,7 +44,7 @@
// Create an area of memory to use for input, output, and intermediate arrays.
// Finding the minimum value for your model may require some trial and error.
- const int tensor_arena_size = 2 * 1024;
+ const int tensor_arena_size = 3 * 1024;
uint8_t tensor_arena[tensor_arena_size];
// Build an interpreter to run the model with
@@ -88,8 +88,8 @@
// Obtain the output value from the tensor
float value = output->data.f[0];
- // Check that the output value is within 0.05 of the expected value
- TF_LITE_MICRO_EXPECT_NEAR(0., value, 0.05);
+ // Check that the output value is within 0.07 of the expected value
+ TF_LITE_MICRO_EXPECT_NEAR(0., value, 0.07);
// Run inference on several more values and confirm the expected outputs
input->data.f[0] = 1.;
@@ -97,21 +97,21 @@
TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, invoke_status);
value = output->data.f[0];
- TF_LITE_MICRO_EXPECT_NEAR(0.841, value, 0.05);
+ TF_LITE_MICRO_EXPECT_NEAR(0.841, value, 0.07);
input->data.f[0] = 3.;
invoke_status = interpreter.Invoke();
TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, invoke_status);
value = output->data.f[0];
- TF_LITE_MICRO_EXPECT_NEAR(0.141, value, 0.05);
+ TF_LITE_MICRO_EXPECT_NEAR(0.141, value, 0.07);
input->data.f[0] = 5.;
invoke_status = interpreter.Invoke();
TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, invoke_status);
value = output->data.f[0];
- TF_LITE_MICRO_EXPECT_NEAR(-0.959, value, 0.05);
+ TF_LITE_MICRO_EXPECT_NEAR(-0.959, value, 0.07);
}
TF_LITE_MICRO_TESTS_END
diff --git a/tensorflow/lite/experimental/micro/examples/hello_world/sine_model_data.cc b/tensorflow/lite/experimental/micro/examples/hello_world/sine_model_data.cc
index c69c949..d3cd3a2 100644
--- a/tensorflow/lite/experimental/micro/examples/hello_world/sine_model_data.cc
+++ b/tensorflow/lite/experimental/micro/examples/hello_world/sine_model_data.cc
@@ -32,138 +32,69 @@
#endif
const unsigned char g_sine_model_data[] DATA_ALIGN_ATTRIBUTE = {
- 0x18, 0x00, 0x00, 0x00, 0x54, 0x46, 0x4c, 0x33, 0x00, 0x00, 0x0e, 0x00,
- 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x14, 0x00,
- 0x0e, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x0a, 0x00, 0x00,
- 0xb8, 0x05, 0x00, 0x00, 0xa0, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
- 0x0b, 0x00, 0x00, 0x00, 0x90, 0x05, 0x00, 0x00, 0x7c, 0x05, 0x00, 0x00,
- 0x24, 0x05, 0x00, 0x00, 0xd4, 0x04, 0x00, 0x00, 0xc4, 0x00, 0x00, 0x00,
- 0x74, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00,
- 0x14, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
- 0x54, 0xf6, 0xff, 0xff, 0x58, 0xf6, 0xff, 0xff, 0x5c, 0xf6, 0xff, 0xff,
- 0x60, 0xf6, 0xff, 0xff, 0xc2, 0xfa, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00,
- 0x40, 0x00, 0x00, 0x00, 0x7c, 0x19, 0xa7, 0x3e, 0x99, 0x81, 0xb9, 0x3e,
- 0x56, 0x8b, 0x9f, 0x3e, 0x88, 0xd8, 0x12, 0xbf, 0x74, 0x10, 0x56, 0x3e,
- 0xfe, 0xc6, 0xdf, 0xbe, 0xf2, 0x10, 0x5a, 0xbe, 0xf0, 0xe2, 0x0a, 0xbe,
- 0x10, 0x5a, 0x98, 0xbe, 0xb9, 0x36, 0xce, 0x3d, 0x8f, 0x7f, 0x87, 0x3e,
- 0x2c, 0xb1, 0xfd, 0xbd, 0xe6, 0xa6, 0x8a, 0xbe, 0xa5, 0x3e, 0xda, 0x3e,
- 0x50, 0x34, 0xed, 0xbd, 0x90, 0x91, 0x69, 0xbe, 0x0e, 0xfb, 0xff, 0xff,
- 0x04, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x67, 0x41, 0x48, 0xbf,
- 0x24, 0xcd, 0xa0, 0xbe, 0xb7, 0x92, 0x0c, 0xbf, 0x00, 0x00, 0x00, 0x00,
- 0x98, 0xfe, 0x3c, 0x3f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4a, 0x17, 0x9a, 0xbe,
- 0x41, 0xcb, 0xb6, 0xbe, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x13, 0xd6, 0x1e, 0x3e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
- 0x5a, 0xfb, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00,
- 0x4b, 0x98, 0xdd, 0xbd, 0x40, 0x6b, 0xcb, 0xbe, 0x36, 0x0c, 0xd4, 0x3c,
- 0xbd, 0x44, 0xb5, 0x3e, 0x95, 0x70, 0xe3, 0x3e, 0xe7, 0xac, 0x86, 0x3e,
- 0x00, 0xc4, 0x4e, 0x3d, 0x7e, 0xa6, 0x1d, 0x3e, 0xbd, 0x87, 0xbb, 0x3e,
- 0xb4, 0xb8, 0x09, 0xbf, 0xa1, 0x1f, 0xf8, 0xbe, 0x8d, 0x90, 0xdd, 0x3e,
- 0xde, 0xfa, 0x6f, 0xbe, 0xb2, 0x75, 0xe4, 0x3d, 0x6e, 0xfe, 0x36, 0x3e,
- 0x20, 0x18, 0xc2, 0xbe, 0x39, 0xc7, 0xfb, 0xbe, 0xfe, 0xa4, 0x30, 0xbe,
- 0xf7, 0x91, 0xde, 0xbe, 0xde, 0xab, 0x24, 0x3e, 0xfb, 0xbb, 0xce, 0x3e,
- 0xeb, 0x23, 0x80, 0xbe, 0x7b, 0x58, 0x73, 0xbe, 0x9a, 0x2e, 0x03, 0x3e,
- 0x10, 0x42, 0xa9, 0xbc, 0x10, 0x12, 0x64, 0xbd, 0xe3, 0x8d, 0x0c, 0x3d,
- 0x9e, 0x48, 0x97, 0xbe, 0x34, 0x51, 0xd4, 0xbe, 0x02, 0x3b, 0x0d, 0x3e,
- 0x62, 0x67, 0x89, 0xbe, 0x74, 0xdf, 0xa2, 0x3d, 0xf3, 0x25, 0xb3, 0xbe,
- 0xef, 0x34, 0x7b, 0x3d, 0x61, 0x70, 0xe3, 0x3d, 0xba, 0x76, 0xc0, 0xbe,
- 0x7d, 0xe9, 0xa7, 0x3e, 0xc3, 0xab, 0xd0, 0xbe, 0xcf, 0x7c, 0xdb, 0xbe,
- 0x70, 0x27, 0x9a, 0xbe, 0x98, 0xf5, 0x3c, 0xbd, 0xff, 0x4b, 0x4b, 0x3e,
- 0x7e, 0xa0, 0xf8, 0xbd, 0xd4, 0x6e, 0x86, 0x3d, 0x00, 0x4a, 0x07, 0x3a,
- 0x4c, 0x24, 0x61, 0xbe, 0x54, 0x68, 0xf7, 0xbd, 0x02, 0x3f, 0x77, 0xbe,
- 0x23, 0x79, 0xb3, 0x3e, 0x1c, 0x83, 0xad, 0xbd, 0xc8, 0x92, 0x8d, 0x3e,
- 0xa8, 0xf3, 0x15, 0xbd, 0xe6, 0x4d, 0x6c, 0x3d, 0xac, 0xe7, 0x98, 0xbe,
- 0x81, 0xec, 0xbd, 0x3e, 0xe2, 0x55, 0x73, 0x3e, 0xc1, 0x77, 0xc7, 0x3e,
- 0x6e, 0x1b, 0x5e, 0x3d, 0x27, 0x78, 0x02, 0x3f, 0xd4, 0x21, 0x90, 0x3d,
- 0x52, 0xdc, 0x1f, 0x3e, 0xbf, 0xda, 0x88, 0x3e, 0x80, 0x79, 0xe3, 0xbd,
- 0x40, 0x6f, 0x10, 0xbe, 0x20, 0x43, 0x2e, 0xbd, 0xf0, 0x76, 0xc5, 0xbd,
- 0xcc, 0xa0, 0x04, 0xbe, 0xf0, 0x69, 0xd7, 0xbe, 0xb1, 0xfe, 0x64, 0xbe,
- 0x20, 0x41, 0x84, 0xbe, 0xb2, 0xc3, 0x26, 0xbe, 0xd8, 0xf4, 0x09, 0xbe,
- 0x64, 0x44, 0xd1, 0x3d, 0xd5, 0xe1, 0xc8, 0xbe, 0x35, 0xbc, 0x3f, 0xbe,
- 0xc0, 0x94, 0x82, 0x3d, 0xdc, 0x2b, 0xb1, 0xbd, 0x02, 0xdb, 0xbf, 0xbe,
- 0xa5, 0x7f, 0x8a, 0x3e, 0x21, 0xb4, 0xa2, 0x3e, 0xcd, 0x86, 0x56, 0xbf,
- 0x9c, 0x3b, 0x76, 0xbc, 0x85, 0x6d, 0x60, 0xbf, 0x86, 0x00, 0x3c, 0xbe,
- 0xc1, 0x23, 0x7e, 0x3e, 0x96, 0xcd, 0x3f, 0x3e, 0x86, 0x91, 0x2d, 0x3e,
- 0x55, 0xef, 0x87, 0x3e, 0x7e, 0x97, 0x03, 0xbe, 0x2a, 0xcd, 0x01, 0x3e,
- 0x32, 0xc9, 0x8e, 0xbe, 0x72, 0x77, 0x3b, 0xbe, 0xe0, 0xa1, 0xbc, 0xbe,
- 0x8d, 0xb7, 0xa7, 0x3e, 0x1c, 0x05, 0x95, 0xbe, 0xf7, 0x1f, 0xbb, 0x3e,
- 0xc9, 0x3e, 0xd6, 0x3e, 0x80, 0x42, 0xe9, 0xbd, 0x27, 0x0c, 0xd2, 0xbe,
- 0x5c, 0x32, 0x34, 0xbe, 0x14, 0xcb, 0xca, 0xbd, 0xdd, 0x3a, 0x67, 0xbe,
- 0x1c, 0xbb, 0x8d, 0xbe, 0x91, 0xac, 0x5c, 0xbe, 0x52, 0x40, 0x6f, 0xbe,
- 0xd7, 0x71, 0x94, 0x3e, 0x18, 0x71, 0x09, 0xbe, 0x9b, 0x29, 0xd9, 0xbe,
- 0x7d, 0x66, 0xd2, 0xbe, 0x98, 0xd6, 0xb2, 0xbe, 0x00, 0xc9, 0x84, 0x3a,
- 0xbc, 0xda, 0xc2, 0xbd, 0x1d, 0xc2, 0x1b, 0xbf, 0xd4, 0xdd, 0x92, 0x3e,
- 0x07, 0x87, 0x6c, 0xbe, 0x40, 0xc2, 0x3b, 0xbe, 0xbd, 0xe2, 0x9c, 0x3e,
- 0x0a, 0xb5, 0xa0, 0xbe, 0xe2, 0xd5, 0x9c, 0xbe, 0x3e, 0xbb, 0x7c, 0x3e,
- 0x17, 0xb4, 0xcf, 0x3e, 0xd5, 0x8e, 0xc8, 0xbe, 0x7c, 0xf9, 0x5c, 0x3e,
- 0x80, 0xfc, 0x0d, 0x3d, 0xc5, 0xd5, 0x8b, 0x3e, 0xf5, 0x17, 0xa2, 0x3e,
- 0xc7, 0x60, 0x89, 0xbe, 0xec, 0x95, 0x87, 0x3d, 0x7a, 0xc2, 0x5d, 0xbf,
- 0x77, 0x94, 0x98, 0x3e, 0x77, 0x39, 0x07, 0xbc, 0x42, 0x29, 0x00, 0x3e,
- 0xaf, 0xd0, 0xa9, 0x3e, 0x31, 0x23, 0xc4, 0xbe, 0x95, 0x36, 0x5b, 0xbe,
- 0xc7, 0xdc, 0x83, 0xbe, 0x1e, 0x6b, 0x47, 0x3e, 0x5b, 0x24, 0x99, 0x3e,
- 0x99, 0x27, 0x54, 0x3e, 0xc8, 0x20, 0xdd, 0xbd, 0x5a, 0x86, 0x2f, 0x3e,
- 0x80, 0xf0, 0x69, 0xbe, 0x44, 0xfc, 0x84, 0xbd, 0x82, 0xa0, 0x2a, 0xbe,
- 0x87, 0xe6, 0x2a, 0x3e, 0xd8, 0x34, 0xae, 0x3d, 0x50, 0xbd, 0xb5, 0x3e,
- 0xc4, 0x8c, 0x88, 0xbe, 0xe3, 0xbc, 0xa5, 0x3e, 0xa9, 0xda, 0x9e, 0x3e,
- 0x3e, 0xb8, 0x23, 0xbe, 0x80, 0x90, 0x15, 0x3d, 0x97, 0x3f, 0xc3, 0x3e,
- 0xca, 0x5c, 0x9d, 0x3e, 0x21, 0xe8, 0xe1, 0x3e, 0xc0, 0x49, 0x01, 0xbc,
- 0x00, 0x0b, 0x88, 0xbd, 0x3f, 0xf7, 0xca, 0x3c, 0xfb, 0x5a, 0xb1, 0x3e,
- 0x60, 0xd2, 0x0d, 0x3c, 0xce, 0x23, 0x78, 0xbf, 0x8f, 0x4f, 0xb9, 0xbe,
- 0x69, 0x6a, 0x34, 0xbf, 0x4b, 0x5e, 0xa9, 0x3e, 0x64, 0x8c, 0xd9, 0x3e,
- 0x52, 0x77, 0x36, 0x3e, 0xeb, 0xaf, 0xbe, 0x3e, 0x40, 0xbe, 0x36, 0x3c,
- 0x08, 0x65, 0x3b, 0xbd, 0x55, 0xe0, 0x66, 0xbd, 0xd2, 0xe8, 0x9b, 0xbe,
- 0x86, 0xe3, 0x09, 0xbe, 0x93, 0x3d, 0xdd, 0x3e, 0x0f, 0x66, 0x18, 0x3f,
- 0x18, 0x05, 0x33, 0xbd, 0xde, 0x15, 0xd7, 0xbe, 0xaa, 0xcf, 0x49, 0xbe,
- 0xa2, 0xa5, 0x64, 0x3e, 0xe6, 0x9c, 0x42, 0xbe, 0x54, 0x42, 0xcc, 0x3d,
- 0xa0, 0xbd, 0x9d, 0xbe, 0xc2, 0x69, 0x48, 0x3e, 0x5b, 0x8b, 0xa2, 0xbe,
- 0xc0, 0x13, 0x87, 0x3d, 0x36, 0xfd, 0x69, 0x3e, 0x05, 0x86, 0x40, 0xbe,
- 0x1e, 0x7a, 0xce, 0xbe, 0x46, 0x13, 0xa7, 0xbe, 0x68, 0x52, 0x86, 0xbe,
- 0x04, 0x9e, 0x86, 0xbd, 0x8c, 0x54, 0xc1, 0x3d, 0xe0, 0x3b, 0xad, 0x3c,
- 0x42, 0x67, 0x85, 0xbd, 0xea, 0x97, 0x42, 0x3e, 0x6e, 0x13, 0x3b, 0xbf,
- 0x56, 0x5b, 0x16, 0x3e, 0xaa, 0xab, 0xdf, 0x3e, 0xc8, 0x41, 0x36, 0x3d,
- 0x24, 0x2d, 0x47, 0xbe, 0x77, 0xa5, 0xae, 0x3e, 0xc0, 0xc2, 0x5b, 0x3c,
- 0xac, 0xac, 0x4e, 0x3e, 0x99, 0xec, 0x13, 0xbe, 0xf2, 0xab, 0x73, 0x3e,
- 0xaa, 0xa1, 0x48, 0xbe, 0xe8, 0xd3, 0x01, 0xbe, 0x60, 0xb7, 0xc7, 0xbd,
- 0x64, 0x72, 0xd3, 0x3d, 0x83, 0xd3, 0x99, 0x3e, 0x0c, 0x76, 0x34, 0xbe,
- 0x42, 0xda, 0x0d, 0x3e, 0xfb, 0x47, 0x9a, 0x3e, 0x8b, 0xdc, 0x92, 0xbe,
- 0x56, 0x7f, 0x6b, 0x3e, 0x04, 0xd4, 0x88, 0xbd, 0x11, 0x9e, 0x80, 0x3e,
- 0x3c, 0x89, 0xff, 0x3d, 0xb3, 0x3e, 0x88, 0x3e, 0xf7, 0xf0, 0x88, 0x3e,
- 0x28, 0xfb, 0xc9, 0xbe, 0x53, 0x3e, 0xcf, 0x3e, 0xac, 0x75, 0xdc, 0xbe,
- 0xdd, 0xca, 0xd7, 0x3e, 0x01, 0x58, 0xa7, 0x3e, 0x29, 0xb8, 0x13, 0xbf,
- 0x76, 0x81, 0x12, 0xbc, 0x28, 0x8b, 0x16, 0xbf, 0x0e, 0xec, 0x0e, 0x3e,
- 0x40, 0x0a, 0xdb, 0xbd, 0x98, 0xec, 0xbf, 0xbd, 0x32, 0x55, 0x0c, 0xbe,
- 0xfb, 0xf9, 0xc9, 0x3e, 0x83, 0x4a, 0x6d, 0xbe, 0x76, 0x59, 0xe2, 0xbe,
- 0x54, 0x7d, 0x9f, 0xbb, 0x9d, 0xe8, 0x95, 0x3e, 0x5c, 0xd3, 0xd0, 0x3d,
- 0x19, 0x8a, 0xb0, 0x3e, 0xde, 0x6f, 0x2e, 0xbe, 0xd0, 0x16, 0x83, 0x3d,
- 0x9c, 0x7d, 0x11, 0xbf, 0x2b, 0xcc, 0x25, 0x3c, 0x2a, 0xa5, 0x27, 0xbe,
- 0x22, 0x14, 0xc7, 0xbe, 0x5e, 0x7a, 0xac, 0x3e, 0x4e, 0x41, 0x94, 0xbe,
- 0x5a, 0x68, 0x7b, 0x3e, 0x86, 0xfd, 0x4e, 0x3e, 0xa2, 0x56, 0x6a, 0xbe,
- 0xca, 0xfe, 0x81, 0xbe, 0x43, 0xc3, 0xb1, 0xbd, 0xc5, 0xb8, 0xa7, 0x3e,
- 0x55, 0x23, 0xcd, 0x3e, 0xaf, 0x2e, 0x76, 0x3e, 0x69, 0xa8, 0x90, 0xbe,
- 0x0d, 0xba, 0xb9, 0x3e, 0x66, 0xff, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00,
- 0x40, 0x00, 0x00, 0x00, 0x53, 0xd6, 0xe2, 0x3d, 0x66, 0xb6, 0xcc, 0x3e,
- 0x03, 0xe7, 0xf6, 0x3e, 0xe0, 0x28, 0x10, 0xbf, 0x00, 0x00, 0x00, 0x00,
- 0x3e, 0x3d, 0xb0, 0x3e, 0x00, 0x00, 0x00, 0x00, 0x62, 0xf0, 0x77, 0x3e,
- 0xa6, 0x9d, 0xa4, 0x3e, 0x3a, 0x4b, 0xf3, 0xbe, 0x71, 0x9e, 0xa7, 0x3e,
- 0x00, 0x00, 0x00, 0x00, 0x34, 0x39, 0xa2, 0x3e, 0x00, 0x00, 0x00, 0x00,
- 0xcc, 0x9c, 0x4a, 0x3e, 0xab, 0x40, 0xa3, 0x3e, 0xb2, 0xff, 0xff, 0xff,
- 0x04, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0xb3, 0x71, 0x67, 0x3f,
- 0x9a, 0x7a, 0x95, 0xbf, 0xe1, 0x48, 0xe8, 0xbe, 0x8a, 0x72, 0x96, 0x3e,
- 0x00, 0xd2, 0xd3, 0xbb, 0x1a, 0xc5, 0xd7, 0x3f, 0xac, 0x7e, 0xc8, 0xbe,
- 0x90, 0xa7, 0x95, 0xbe, 0x3b, 0xd7, 0xdc, 0xbe, 0x41, 0xa8, 0x16, 0x3f,
- 0x50, 0x5b, 0xcb, 0x3f, 0x52, 0xb9, 0xed, 0xbe, 0x2e, 0xa7, 0xc6, 0xbe,
- 0xaf, 0x0f, 0x14, 0xbf, 0xb3, 0xda, 0x59, 0x3f, 0x02, 0xec, 0xd7, 0xbe,
- 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x66, 0x11, 0x1f, 0xbf,
- 0xb8, 0xfb, 0xff, 0xff, 0x0f, 0x00, 0x00, 0x00, 0x54, 0x4f, 0x43, 0x4f,
- 0x20, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x00,
- 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x14, 0x00,
- 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x0c, 0x00, 0x00, 0x00,
- 0xf0, 0x00, 0x00, 0x00, 0xe4, 0x00, 0x00, 0x00, 0xd8, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x90, 0x00, 0x00, 0x00,
- 0x48, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xce, 0xff, 0xff, 0xff,
- 0x00, 0x00, 0x00, 0x08, 0x18, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x00, 0x00, 0x1c, 0xfc, 0xff, 0xff, 0x01, 0x00, 0x00, 0x00,
+ 0x1c, 0x00, 0x00, 0x00, 0x54, 0x46, 0x4c, 0x33, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x0e, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00,
+ 0x10, 0x00, 0x14, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
+ 0x10, 0x09, 0x00, 0x00, 0x58, 0x02, 0x00, 0x00, 0x40, 0x02, 0x00, 0x00,
+ 0x04, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x30, 0x02, 0x00, 0x00,
+ 0x1c, 0x02, 0x00, 0x00, 0xf4, 0x01, 0x00, 0x00, 0xa4, 0x01, 0x00, 0x00,
+ 0x94, 0x00, 0x00, 0x00, 0x44, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00,
+ 0x1c, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00,
+ 0x04, 0x00, 0x00, 0x00, 0x7c, 0xfd, 0xff, 0xff, 0x80, 0xfd, 0xff, 0xff,
+ 0x84, 0xfd, 0xff, 0xff, 0x88, 0xfd, 0xff, 0xff, 0x22, 0xfe, 0xff, 0xff,
+ 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xad, 0x67, 0x48, 0xc4,
+ 0x7f, 0x82, 0x9c, 0x47, 0x5f, 0x28, 0x36, 0x35, 0x89, 0x38, 0x8b, 0xed,
+ 0x3e, 0xfe, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x6f, 0x01, 0x00, 0x00, 0x13, 0xf6, 0xff, 0xff,
+ 0x00, 0x00, 0x00, 0x00, 0x25, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0xdd, 0xe9, 0xff, 0xff, 0x25, 0xef, 0xff, 0xff,
+ 0x36, 0xe5, 0xff, 0xff, 0xf8, 0xf2, 0xff, 0xff, 0x65, 0x15, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x38, 0xe9, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x8a, 0xfe, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00,
+ 0x00, 0x01, 0x00, 0x00, 0xe7, 0xf4, 0x03, 0xe5, 0x0e, 0x19, 0x0d, 0xe3,
+ 0x1a, 0xca, 0x16, 0x1e, 0xe3, 0x02, 0xf6, 0xff, 0xfb, 0x10, 0x1f, 0xf4,
+ 0xfa, 0xf1, 0xff, 0xff, 0x0f, 0xb6, 0xf5, 0x19, 0x0e, 0xf3, 0xe1, 0xf9,
+ 0xdc, 0x13, 0xf2, 0xea, 0xf4, 0xd9, 0xef, 0xd9, 0x1b, 0xfd, 0xe4, 0x14,
+ 0x20, 0xc9, 0x1c, 0x0e, 0xe2, 0xda, 0xfc, 0xfe, 0xe1, 0x0b, 0x06, 0xde,
+ 0xdf, 0xe3, 0xde, 0x1d, 0x11, 0xf5, 0xec, 0x1d, 0x18, 0xf9, 0xe4, 0xe9,
+ 0xe0, 0x16, 0xea, 0xfd, 0x1d, 0xf1, 0x08, 0x0e, 0x0f, 0x1d, 0x15, 0xfe,
+ 0x13, 0xd6, 0xe8, 0xec, 0xdd, 0xf4, 0xdd, 0xf9, 0xee, 0xdd, 0x09, 0x15,
+ 0x01, 0xec, 0x13, 0xdf, 0x13, 0xea, 0x17, 0x1d, 0xe3, 0x05, 0x1d, 0x09,
+ 0xe3, 0x0d, 0xfc, 0xda, 0xe9, 0xf6, 0x0b, 0xeb, 0x06, 0xf6, 0x10, 0xdc,
+ 0x09, 0xf8, 0x0f, 0x18, 0xda, 0x2b, 0xf2, 0x19, 0x09, 0xeb, 0x00, 0xee,
+ 0x01, 0xe8, 0x1c, 0xf1, 0x0c, 0xf2, 0x1b, 0xc4, 0x0c, 0xd2, 0xf0, 0x0b,
+ 0xe4, 0x87, 0xdc, 0x1b, 0x0d, 0xf1, 0x14, 0xe1, 0x28, 0x12, 0x16, 0xd0,
+ 0xf1, 0xca, 0x09, 0xf5, 0xdd, 0xbf, 0x19, 0x0d, 0xdc, 0x15, 0xea, 0x18,
+ 0x05, 0xf3, 0x12, 0xfb, 0x17, 0x3b, 0x1a, 0xf1, 0xf6, 0x32, 0x15, 0x10,
+ 0x04, 0x0d, 0x0e, 0x16, 0x20, 0x12, 0xff, 0x07, 0x2b, 0x04, 0xe7, 0x02,
+ 0xed, 0x17, 0xdb, 0x1b, 0xe9, 0xde, 0x07, 0x15, 0x17, 0xdc, 0x05, 0x21,
+ 0xdb, 0xdf, 0x0a, 0xf1, 0x0a, 0xff, 0xdd, 0xf4, 0xf7, 0x1c, 0xf1, 0x1f,
+ 0x34, 0xf4, 0x04, 0x81, 0xcc, 0x6f, 0xb2, 0x20, 0x08, 0x86, 0x20, 0x0c,
+ 0xea, 0x0f, 0xfe, 0xfb, 0xe8, 0xe1, 0xfb, 0xe3, 0xf6, 0xf3, 0xe4, 0xe7,
+ 0xe4, 0x07, 0xda, 0xf1, 0xe9, 0xd7, 0x04, 0xf8, 0x07, 0x18, 0x18, 0xde,
+ 0xed, 0xd7, 0xdf, 0x12, 0xfa, 0xef, 0xfc, 0xfc, 0x96, 0xff, 0xff, 0xff,
+ 0x04, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x22, 0x03, 0x00, 0x00,
+ 0x63, 0x03, 0x00, 0x00, 0xf8, 0x0c, 0x00, 0x00, 0x0a, 0xff, 0xff, 0xff,
+ 0x00, 0x00, 0x00, 0x00, 0x47, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x6c, 0xf8, 0xff, 0xff, 0x3b, 0x07, 0x00, 0x00, 0x5e, 0x0e, 0x00, 0x00,
+ 0x82, 0xed, 0xff, 0xff, 0x25, 0xfa, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00,
+ 0x09, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0xfd, 0xff, 0xff,
+ 0xe2, 0xff, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
+ 0xea, 0xe6, 0xb9, 0x08, 0xe0, 0x92, 0x01, 0x00, 0x23, 0xb3, 0x24, 0x19,
+ 0xd8, 0x7f, 0xf8, 0x17, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00,
+ 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x31, 0xf5, 0xff, 0xff, 0x80, 0xff, 0xff, 0xff, 0x0f, 0x00, 0x00, 0x00,
+ 0x54, 0x4f, 0x43, 0x4f, 0x20, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74,
+ 0x65, 0x64, 0x2e, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0xbc, 0xf9, 0xff, 0xff, 0x48, 0x01, 0x00, 0x00, 0x3c, 0x01, 0x00, 0x00,
+ 0x30, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00,
+ 0x04, 0x01, 0x00, 0x00, 0xb8, 0x00, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00,
+ 0x28, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x1a, 0xff, 0xff, 0xff,
+ 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0xca, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x08,
+ 0x1c, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00,
+ 0x04, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00,
0x08, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00,
0x14, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x07, 0x00, 0x10, 0x00,
@@ -177,79 +108,131 @@
0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x07, 0x00,
0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00,
0x04, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
- 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
- 0x0a, 0x00, 0x00, 0x00, 0x10, 0x03, 0x00, 0x00, 0xa4, 0x02, 0x00, 0x00,
- 0x40, 0x02, 0x00, 0x00, 0xf4, 0x01, 0x00, 0x00, 0xac, 0x01, 0x00, 0x00,
- 0x48, 0x01, 0x00, 0x00, 0xfc, 0x00, 0x00, 0x00, 0xb4, 0x00, 0x00, 0x00,
- 0x50, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x26, 0xfd, 0xff, 0xff,
- 0x3c, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x00, 0x00, 0x18, 0xfd, 0xff, 0xff, 0x20, 0x00, 0x00, 0x00,
+ 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0a, 0x00,
+ 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x0a, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+ 0x0a, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00,
+ 0xdc, 0x04, 0x00, 0x00, 0x54, 0x04, 0x00, 0x00, 0xc4, 0x03, 0x00, 0x00,
+ 0x54, 0x03, 0x00, 0x00, 0xd0, 0x02, 0x00, 0x00, 0x4c, 0x02, 0x00, 0x00,
+ 0xe0, 0x01, 0x00, 0x00, 0x5c, 0x01, 0x00, 0x00, 0xd8, 0x00, 0x00, 0x00,
+ 0x6c, 0x00, 0x00, 0x00, 0x3c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0xd8, 0xff, 0xff, 0xff, 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x08, 0x00, 0x00, 0x00, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79,
+ 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x0c, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00,
+ 0x04, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x64, 0x65, 0x6e, 0x73,
+ 0x65, 0x5f, 0x32, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x00, 0x00, 0x00,
+ 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+ 0xc2, 0xfb, 0xff, 0xff, 0x00, 0x00, 0x00, 0x02, 0x58, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0xc4, 0xfc, 0xff, 0xff, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x1d, 0xc4, 0x1c, 0x39, 0x20, 0x00, 0x00, 0x00,
0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31,
0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x34, 0x2f, 0x4d, 0x61, 0x74,
0x4d, 0x75, 0x6c, 0x5f, 0x62, 0x69, 0x61, 0x73, 0x00, 0x00, 0x00, 0x00,
- 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x6e, 0xfd, 0xff, 0xff,
- 0x50, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x00, 0x00, 0x60, 0xfd, 0xff, 0xff, 0x34, 0x00, 0x00, 0x00,
- 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31,
- 0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x34, 0x2f, 0x4d, 0x61, 0x74,
- 0x4d, 0x75, 0x6c, 0x2f, 0x52, 0x65, 0x61, 0x64, 0x56, 0x61, 0x72, 0x69,
- 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x70, 0x2f, 0x74, 0x72, 0x61, 0x6e, 0x73,
- 0x70, 0x6f, 0x73, 0x65, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
- 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xce, 0xfd, 0xff, 0xff,
- 0x34, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x00, 0x00, 0xc0, 0xfd, 0xff, 0xff, 0x19, 0x00, 0x00, 0x00,
- 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31,
- 0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x33, 0x2f, 0x52, 0x65, 0x6c,
- 0x75, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
- 0x10, 0x00, 0x00, 0x00, 0x12, 0xfe, 0xff, 0xff, 0x3c, 0x00, 0x00, 0x00,
- 0x03, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
- 0x04, 0xfe, 0xff, 0xff, 0x20, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75,
+ 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x2a, 0xfc, 0xff, 0xff,
+ 0x00, 0x00, 0x00, 0x09, 0x6c, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
+ 0x28, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x2c, 0xfd, 0xff, 0xff,
+ 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+ 0xae, 0x03, 0x63, 0x3c, 0x34, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75,
0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e,
- 0x73, 0x65, 0x5f, 0x33, 0x2f, 0x4d, 0x61, 0x74, 0x4d, 0x75, 0x6c, 0x5f,
- 0x62, 0x69, 0x61, 0x73, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
- 0x10, 0x00, 0x00, 0x00, 0x5a, 0xfe, 0xff, 0xff, 0x50, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
- 0x4c, 0xfe, 0xff, 0xff, 0x34, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75,
+ 0x73, 0x65, 0x5f, 0x34, 0x2f, 0x4d, 0x61, 0x74, 0x4d, 0x75, 0x6c, 0x2f,
+ 0x52, 0x65, 0x61, 0x64, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65,
+ 0x4f, 0x70, 0x2f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f, 0x73, 0x65,
+ 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+ 0x10, 0x00, 0x00, 0x00, 0xaa, 0xfc, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09,
+ 0x6c, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x44, 0x00, 0x00, 0x00,
+ 0x04, 0x00, 0x00, 0x00, 0x9c, 0xfc, 0xff, 0xff, 0x30, 0x00, 0x00, 0x00,
+ 0x24, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x80, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x42, 0xc8, 0x30, 0x3c,
+ 0x01, 0x00, 0x00, 0x00, 0x7a, 0x17, 0x30, 0x40, 0x01, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x19, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75,
+ 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e,
+ 0x73, 0x65, 0x5f, 0x33, 0x2f, 0x52, 0x65, 0x6c, 0x75, 0x00, 0x00, 0x00,
+ 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
+ 0x2a, 0xfd, 0xff, 0xff, 0x00, 0x00, 0x00, 0x02, 0x58, 0x00, 0x00, 0x00,
+ 0x03, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x2c, 0xfe, 0xff, 0xff, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x61, 0x33, 0x28, 0x39, 0x20, 0x00, 0x00, 0x00,
+ 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31,
+ 0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x33, 0x2f, 0x4d, 0x61, 0x74,
+ 0x4d, 0x75, 0x6c, 0x5f, 0x62, 0x69, 0x61, 0x73, 0x00, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x92, 0xfd, 0xff, 0xff,
+ 0x00, 0x00, 0x00, 0x09, 0x6c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x28, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x94, 0xfe, 0xff, 0xff,
+ 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+ 0xa0, 0xef, 0x36, 0x3c, 0x34, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75,
0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e,
0x73, 0x65, 0x5f, 0x33, 0x2f, 0x4d, 0x61, 0x74, 0x4d, 0x75, 0x6c, 0x2f,
0x52, 0x65, 0x61, 0x64, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65,
0x4f, 0x70, 0x2f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f, 0x73, 0x65,
0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
- 0x10, 0x00, 0x00, 0x00, 0xba, 0xfe, 0xff, 0xff, 0x34, 0x00, 0x00, 0x00,
- 0x0a, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
- 0xac, 0xfe, 0xff, 0xff, 0x19, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75,
+ 0x10, 0x00, 0x00, 0x00, 0x12, 0xfe, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09,
+ 0x6c, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x44, 0x00, 0x00, 0x00,
+ 0x04, 0x00, 0x00, 0x00, 0x04, 0xfe, 0xff, 0xff, 0x30, 0x00, 0x00, 0x00,
+ 0x24, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x80, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x22, 0x61, 0x6b, 0x3c,
+ 0x01, 0x00, 0x00, 0x00, 0xc1, 0x75, 0x6a, 0x40, 0x01, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x19, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75,
0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e,
0x73, 0x65, 0x5f, 0x32, 0x2f, 0x52, 0x65, 0x6c, 0x75, 0x00, 0x00, 0x00,
0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
- 0xfe, 0xfe, 0xff, 0xff, 0x3c, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00,
- 0x0c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xf0, 0xfe, 0xff, 0xff,
+ 0x92, 0xfe, 0xff, 0xff, 0x00, 0x00, 0x00, 0x02, 0x5c, 0x00, 0x00, 0x00,
+ 0x05, 0x00, 0x00, 0x00, 0x2c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x94, 0xff, 0xff, 0xff, 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x2d, 0x60, 0xd5, 0x38,
0x20, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69,
0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x32,
0x2f, 0x4d, 0x61, 0x74, 0x4d, 0x75, 0x6c, 0x5f, 0x62, 0x69, 0x61, 0x73,
0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
- 0x46, 0xff, 0xff, 0xff, 0x50, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00,
- 0x0c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x38, 0xff, 0xff, 0xff,
- 0x34, 0x00, 0x00, 0x00, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69,
- 0x61, 0x6c, 0x5f, 0x31, 0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x32,
- 0x2f, 0x4d, 0x61, 0x74, 0x4d, 0x75, 0x6c, 0x2f, 0x52, 0x65, 0x61, 0x64,
- 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x70, 0x2f, 0x74,
- 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f, 0x73, 0x65, 0x00, 0x00, 0x00, 0x00,
- 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
- 0xa6, 0xff, 0xff, 0xff, 0x48, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00,
- 0x2c, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0c, 0x00,
- 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
- 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7f, 0x43,
- 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00,
- 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x32, 0x5f, 0x69, 0x6e, 0x70, 0x75,
- 0x74, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
- 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x14, 0x00, 0x04, 0x00,
- 0x00, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x0e, 0x00, 0x00, 0x00,
- 0x28, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
- 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00, 0x00,
- 0x08, 0x00, 0x00, 0x00, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79,
- 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
- 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x0a, 0x00, 0x0c, 0x00, 0x07, 0x00, 0x00, 0x00, 0x08, 0x00,
- 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x03, 0x00, 0x00, 0x00};
-const int g_sine_model_data_len = 2640;
+ 0xfe, 0xfe, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09, 0x78, 0x00, 0x00, 0x00,
+ 0x06, 0x00, 0x00, 0x00, 0x34, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
+ 0x0c, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
+ 0x0c, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x37, 0xb9, 0x87, 0x3b, 0x34, 0x00, 0x00, 0x00,
+ 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x31,
+ 0x2f, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x32, 0x2f, 0x4d, 0x61, 0x74,
+ 0x4d, 0x75, 0x6c, 0x2f, 0x52, 0x65, 0x61, 0x64, 0x56, 0x61, 0x72, 0x69,
+ 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x70, 0x2f, 0x74, 0x72, 0x61, 0x6e, 0x73,
+ 0x70, 0x6f, 0x73, 0x65, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
+ 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x8a, 0xff, 0xff, 0xff,
+ 0x00, 0x00, 0x00, 0x09, 0x60, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00,
+ 0x40, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7c, 0xff, 0xff, 0xff,
+ 0x2c, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00,
+ 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x80, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0x01, 0x00, 0x00, 0x00, 0xba, 0x3b, 0xc9, 0x3c,
+ 0x01, 0x00, 0x00, 0x00, 0x7e, 0x72, 0xc8, 0x40, 0x01, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00, 0x64, 0x65, 0x6e, 0x73,
+ 0x65, 0x5f, 0x32, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x69, 0x6e,
+ 0x74, 0x38, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x18, 0x00, 0x08, 0x00,
+ 0x07, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x14, 0x00, 0x0e, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x09, 0x6c, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00,
+ 0x50, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x14, 0x00,
+ 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x0c, 0x00, 0x00, 0x00,
+ 0x30, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00,
+ 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+ 0xfb, 0x3a, 0xfd, 0x3b, 0x01, 0x00, 0x00, 0x00, 0x2e, 0xed, 0x76, 0x3f,
+ 0x01, 0x00, 0x00, 0x00, 0x29, 0xc7, 0x80, 0xbf, 0x0d, 0x00, 0x00, 0x00,
+ 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x5f, 0x69, 0x6e, 0x74,
+ 0x38, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x3c, 0x00, 0x00, 0x00,
+ 0x28, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0a, 0x00,
+ 0x0e, 0x00, 0x07, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0a, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x06, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00,
+ 0x06, 0x00, 0x05, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x72, 0x0a, 0x00,
+ 0x0c, 0x00, 0x07, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0a, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00};
+const int g_sine_model_data_len = 2432;
diff --git a/tensorflow/lite/experimental/micro/examples/magic_wand/README.md b/tensorflow/lite/experimental/micro/examples/magic_wand/README.md
index f7bdfb8..bf095fd 100644
--- a/tensorflow/lite/experimental/micro/examples/magic_wand/README.md
+++ b/tensorflow/lite/experimental/micro/examples/magic_wand/README.md
@@ -13,6 +13,7 @@
- [Getting started](#getting-started)
- [Deploy to Arduino](#deploy-to-arduino)
- [Deploy to SparkFun Edge](#deploy-to-sparkfun-edge)
+- [Deploy to Adafruit devices](#deploy-to-adafruit)
- [Run the tests on a development machine](#run-the-tests-on-a-development-machine)
## Deploy to Arduino
@@ -317,6 +318,16 @@
To stop viewing the debug output with `screen`, hit `Ctrl+A`, immediately
followed by the `K` key, then hit the `Y` key.
+## Deploy to Adafruit devices <a name="deploy-to-adafruit"></a>
+
+This sample has been tested with the following Adafruit devices. To deploy to
+each device, read the accompanying guide on Adafruit's website.
+
+| Device | Guide |
+|--------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------|
+| [Adafruit EdgeBadge](https://www.adafruit.com/product/4400) | [TensorFlow Lite for EdgeBadge Kit Quickstart](https://learn.adafruit.com/tensorflow-lite-for-edgebadge-kit-quickstart?view=all) |
+| [Adafruit TensorFlow Lite for Microcontrollers Kit](https://www.adafruit.com/product/4317) | [TensorFlow Lite for EdgeBadge Kit Quickstart](https://learn.adafruit.com/tensorflow-lite-for-edgebadge-kit-quickstart?view=all) |
+
## Run the tests on a development machine
To compile and test this example on a desktop Linux or macOS machine, first
diff --git a/tensorflow/lite/experimental/micro/examples/micro_speech/README.md b/tensorflow/lite/experimental/micro/examples/micro_speech/README.md
index f017a9e..94a05eb 100644
--- a/tensorflow/lite/experimental/micro/examples/micro_speech/README.md
+++ b/tensorflow/lite/experimental/micro/examples/micro_speech/README.md
@@ -20,6 +20,7 @@
- [Deploy to SparkFun Edge](#deploy-to-sparkfun-edge)
- [Deploy to STM32F746](#deploy-to-STM32F746)
- [Deploy to NXP FRDM K66F](#deploy-to-nxp-frdm-k66f)
+- [Deploy to Adafruit devices](#deploy-to-adafruit)
- [Run on macOS](#run-on-macos)
- [Run the tests on a development machine](#run-the-tests-on-a-development-machine)
- [Calculating the input to the neural network](#calculating-the-input-to-the-neural-network)
@@ -398,6 +399,16 @@
in black color. If there is no output on the serial port, you can connect
headphone to headphone port to check if audio loopback path is working.
+## Deploy to Adafruit devices <a name="deploy-to-adafruit"></a>
+
+This sample has been tested with the following Adafruit devices. To deploy to
+each device, read the accompanying guide on Adafruit's website.
+
+| Device | Guide |
+|--------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------|
+| [Adafruit EdgeBadge](https://www.adafruit.com/product/4400) | [TensorFlow Lite for EdgeBadge Kit Quickstart](https://learn.adafruit.com/tensorflow-lite-for-edgebadge-kit-quickstart?view=all) |
+| [Adafruit TensorFlow Lite for Microcontrollers Kit](https://www.adafruit.com/product/4317) | [TensorFlow Lite for EdgeBadge Kit Quickstart](https://learn.adafruit.com/tensorflow-lite-for-edgebadge-kit-quickstart?view=all) |
+
## Run on macOS
The example contains an audio provider compatible with macOS. If you have access
diff --git a/tensorflow/lite/experimental/micro/examples/person_detection/utils/BUILD b/tensorflow/lite/experimental/micro/examples/person_detection/utils/BUILD
index a9f0b3d..55ce96a 100644
--- a/tensorflow/lite/experimental/micro/examples/person_detection/utils/BUILD
+++ b/tensorflow/lite/experimental/micro/examples/person_detection/utils/BUILD
@@ -1,8 +1,5 @@
# Description:
# TensorFlow Lite for Microcontrollers Vision Example Utils.
-
-package(default_visibility = ["//visibility:public"])
-
licenses(["notice"]) # Apache 2.0
py_binary(
@@ -10,10 +7,7 @@
srcs = ["raw_to_bitmap.py"],
python_version = "PY3",
srcs_version = "PY3ONLY",
- deps = [
- ":raw_to_bitmap_lib",
- "//tensorflow:tensorflow_py",
- ],
+ deps = ["//third_party/py/numpy"],
)
py_library(
@@ -21,7 +15,7 @@
srcs = ["raw_to_bitmap.py"],
srcs_version = "PY2AND3",
deps = [
- "//tensorflow:tensorflow_py",
+ "//third_party/py/numpy",
],
)
@@ -30,5 +24,10 @@
srcs = ["raw_to_bitmap_test.py"],
data = glob(["testdata/**"]),
python_version = "PY3",
- deps = [":raw_to_bitmap_lib"],
+ tags = ["noubsan"], # TODO(b/144512025): Fix raw_to_bitmap_test to fix ubsan failure.
+ deps = [
+ ":raw_to_bitmap_lib",
+ "//tensorflow/python:client_testlib",
+ "//third_party/py/numpy",
+ ],
)
diff --git a/tensorflow/lite/experimental/micro/kernels/BUILD b/tensorflow/lite/experimental/micro/kernels/BUILD
index 797acbb..6dd245e 100644
--- a/tensorflow/lite/experimental/micro/kernels/BUILD
+++ b/tensorflow/lite/experimental/micro/kernels/BUILD
@@ -328,9 +328,6 @@
srcs = [
"mul_test.cc",
],
- tags = [
- "noasan", # b/144450349
- ],
deps = [
":all_ops_resolver",
"//tensorflow/lite/c:c_api_internal",
@@ -517,6 +514,7 @@
":all_ops_resolver",
"//tensorflow/lite/c:c_api_internal",
"//tensorflow/lite/experimental/micro:micro_framework",
+ "//tensorflow/lite/experimental/micro:micro_utils",
"//tensorflow/lite/experimental/micro/testing:micro_test",
"//tensorflow/lite/kernels/internal:tensor",
],
diff --git a/tensorflow/lite/experimental/micro/kernels/all_ops_resolver.cc b/tensorflow/lite/experimental/micro/kernels/all_ops_resolver.cc
index 45e6610..7a0f779 100644
--- a/tensorflow/lite/experimental/micro/kernels/all_ops_resolver.cc
+++ b/tensorflow/lite/experimental/micro/kernels/all_ops_resolver.cc
@@ -18,36 +18,16 @@
namespace ops {
namespace micro {
-namespace {
-
-// TODO(b/143180352): remove version 3 once we change hello_world sample. The
-// old versioning scheme made version 3 "work" because it fell between versions
-// 1 and 4. Adding version 3 back in is a temporary hack, and intermediate
-// versions were never guaranteed to work on micro.
-const int kFullyConnectedVersions[] = {1, 3, 4};
-const int kConv2dVersions[] = {1, 3};
-const int kDepthwiseConv2dVersions[] = {1, 3};
-const int kSplitVersions[] = {1, 2, 3};
-const int kDequantizeVersions[] = {1, 2};
-
-} // namespace
-// Each op resolver entry registration is as follows:
-// AddBuiltin(<operator name>, <registration>, <min version>, <max version>)
AllOpsResolver::AllOpsResolver() {
- AddBuiltin(
- BuiltinOperator_FULLY_CONNECTED, Register_FULLY_CONNECTED(),
- kFullyConnectedVersions,
- sizeof(kFullyConnectedVersions) / sizeof(kFullyConnectedVersions[0]));
+ AddBuiltin(BuiltinOperator_DEPTHWISE_CONV_2D, Register_DEPTHWISE_CONV_2D());
+ AddBuiltin(BuiltinOperator_FULLY_CONNECTED, Register_FULLY_CONNECTED(),
+ /* min_version */ 1,
+ /* max_version */ 4);
AddBuiltin(BuiltinOperator_MAX_POOL_2D, Register_MAX_POOL_2D());
AddBuiltin(BuiltinOperator_SOFTMAX, Register_SOFTMAX());
AddBuiltin(BuiltinOperator_LOGISTIC, Register_LOGISTIC());
AddBuiltin(BuiltinOperator_SVDF, Register_SVDF());
- AddBuiltin(BuiltinOperator_CONV_2D, Register_CONV_2D(), kConv2dVersions,
- sizeof(kConv2dVersions) / sizeof(kConv2dVersions[0]));
- AddBuiltin(
- BuiltinOperator_DEPTHWISE_CONV_2D, Register_DEPTHWISE_CONV_2D(),
- kDepthwiseConv2dVersions,
- sizeof(kDepthwiseConv2dVersions) / sizeof(kDepthwiseConv2dVersions[0]));
+ AddBuiltin(BuiltinOperator_CONV_2D, Register_CONV_2D());
AddBuiltin(BuiltinOperator_AVERAGE_POOL_2D, Register_AVERAGE_POOL_2D());
AddBuiltin(BuiltinOperator_ABS, Register_ABS());
AddBuiltin(BuiltinOperator_SIN, Register_SIN());
@@ -76,16 +56,15 @@
AddBuiltin(BuiltinOperator_ROUND, Register_ROUND());
AddBuiltin(BuiltinOperator_STRIDED_SLICE, Register_STRIDED_SLICE());
AddBuiltin(BuiltinOperator_PACK, Register_PACK());
- AddBuiltin(BuiltinOperator_SPLIT, Register_SPLIT(), kSplitVersions,
- sizeof(kSplitVersions) / sizeof(kSplitVersions[0]));
+ AddBuiltin(BuiltinOperator_SPLIT, Register_SPLIT(),
+ /* min_version */ 1,
+ /* max_version */ 3);
AddBuiltin(BuiltinOperator_UNPACK, Register_UNPACK());
AddBuiltin(BuiltinOperator_NEG, Register_NEG());
AddBuiltin(BuiltinOperator_ADD, Register_ADD());
AddBuiltin(BuiltinOperator_MUL, Register_MUL());
- AddBuiltin(BuiltinOperator_QUANTIZE, Register_QUANTIZE());
- AddBuiltin(BuiltinOperator_DEQUANTIZE, Register_DEQUANTIZE(),
- kDequantizeVersions,
- sizeof(kDequantizeVersions) / sizeof(kDequantizeVersions[0]));
+ AddBuiltin(BuiltinOperator_QUANTIZE, Register_QUANTIZE(), 1, 4);
+ AddBuiltin(BuiltinOperator_DEQUANTIZE, Register_DEQUANTIZE(), 1, 4);
AddBuiltin(BuiltinOperator_RELU, Register_RELU());
AddBuiltin(BuiltinOperator_RELU6, Register_RELU6());
}
diff --git a/tensorflow/lite/experimental/micro/kernels/cmsis-nn/conv.cc b/tensorflow/lite/experimental/micro/kernels/cmsis-nn/conv.cc
index 145f8fb..2db1a0f 100644
--- a/tensorflow/lite/experimental/micro/kernels/cmsis-nn/conv.cc
+++ b/tensorflow/lite/experimental/micro/kernels/cmsis-nn/conv.cc
@@ -35,7 +35,7 @@
constexpr int kFilterTensor = 1;
constexpr int kBiasTensor = 2;
constexpr int kOutputTensor = 0;
-constexpr int kMaxChannels = 64;
+constexpr int kMaxChannels = 256;
const int kTensorNotAllocated = -1;
@@ -153,7 +153,6 @@
TfLiteContext* context, TfLiteNode* node, TfLiteConvParams* params,
OpData* data, const TfLiteTensor* input, const TfLiteTensor* filter,
const TfLiteTensor* bias, TfLiteTensor* output, TfLiteTensor* im2col) {
-#if defined(ARM_MATH_DSP) && defined(ARM_MATH_LOOPUNROLL)
ConvParams op_params;
op_params.input_offset = -input->params.zero_point;
op_params.output_offset = output->params.zero_point;
@@ -164,12 +163,14 @@
op_params.padding_values.height = data->padding.height;
op_params.padding_values.width = data->padding.width;
+#if defined(ARM_MATH_DSP) && defined(ARM_MATH_LOOPUNROLL)
+
RuntimeShape filter_shape = GetTensorShape(filter);
RuntimeShape input_shape = GetTensorShape(input);
RuntimeShape output_shape = GetTensorShape(output);
RuntimeShape bias_shape = GetTensorShape(bias);
- // TODO(b/130439627): Use calculated value for clamping.
+ // Set min and max value of the output.
const int32 output_activation_min = std::numeric_limits<int8_t>::min();
const int32 output_activation_max = std::numeric_limits<int8_t>::max();
@@ -235,7 +236,17 @@
}
}
#else
-#error ARM_MATH_DSP and ARM_MATH_LOOPUNROLL must be set
+#pragma message( \
+ "CMSIS-NN optimization for conv not available for this target. Using reference kernel.")
+
+ reference_integer_ops::ConvPerChannel(
+ op_params, data->per_channel_output_multiplier,
+ data->per_channel_output_shift, GetTensorShape(input),
+ GetTensorData<int8>(input), GetTensorShape(filter),
+ GetTensorData<int8>(filter), GetTensorShape(bias),
+ GetTensorData<int32>(bias), GetTensorShape(output),
+ GetTensorData<int8>(output));
+
#endif
return kTfLiteOk;
}
@@ -285,7 +296,9 @@
int output_height = output->dims->data[1];
OpData data;
- if (input->type != kTfLiteFloat32) {
+
+ // All per-channel quantized tensors need valid zero point and scale arrays.
+ if (input->type == kTfLiteInt8) {
TF_LITE_ENSURE_EQ(context, filter->quantization.type,
kTfLiteAffineQuantization);
@@ -294,6 +307,13 @@
filter->quantization.params);
TF_LITE_ENSURE(context, affine_quantization);
TF_LITE_ENSURE(context, affine_quantization->scale);
+ TF_LITE_ENSURE(context, affine_quantization->zero_point);
+ // Conv is quantized along dimension 0:
+ // https://www.tensorflow.org/lite/performance/quantization_spec
+ TF_LITE_ENSURE_EQ(context, filter->dims->data[0],
+ affine_quantization->scale->size);
+ TF_LITE_ENSURE_EQ(context, filter->dims->data[0],
+ affine_quantization->zero_point->size);
}
TF_LITE_ENSURE_STATUS(CalculateOpData(
diff --git a/tensorflow/lite/experimental/micro/kernels/depthwise_conv.cc b/tensorflow/lite/experimental/micro/kernels/depthwise_conv.cc
index dad4cb0..15f6dac 100644
--- a/tensorflow/lite/experimental/micro/kernels/depthwise_conv.cc
+++ b/tensorflow/lite/experimental/micro/kernels/depthwise_conv.cc
@@ -35,7 +35,7 @@
constexpr int kFilterTensor = 1;
constexpr int kBiasTensor = 2;
constexpr int kOutputTensor = 0;
-constexpr int kMaxChannels = 64;
+constexpr int kMaxChannels = 256;
struct OpData {
TfLitePaddingValues padding;
@@ -79,6 +79,16 @@
GetOptionalInputTensor(context, node, kBiasTensor);
TfLiteTensor* output = GetOutput(context, node, kOutputTensor);
+ // Ensure filter and bias channel count does not exceed space reserved for
+ // quantization metadata.
+ const auto filter_quantization =
+ reinterpret_cast<TfLiteAffineQuantization*>(
+ filter->quantization.params);
+ const auto bias_quantization =
+ reinterpret_cast<TfLiteAffineQuantization*>(bias->quantization.params);
+ TF_LITE_ENSURE(context, filter_quantization->scale->size <= kMaxChannels);
+ TF_LITE_ENSURE(context, bias_quantization->scale->size <= kMaxChannels);
+
TF_LITE_ENSURE_STATUS(tflite::PopulateConvolutionQuantizationParams(
context, input, filter, bias, output, params->activation,
&data->output_multiplier, &data->output_shift,
@@ -233,7 +243,6 @@
TF_LITE_ENSURE_STATUS(CalculateOpData(context, node, params, width, height,
filter_width, filter_height, data_type,
&data));
-
// TODO(aselle): Consider whether float conv and quantized conv should be
// separate ops to avoid dispatch overhead here.
switch (input->type) { // Already know in/out types are same.
diff --git a/tensorflow/lite/experimental/micro/kernels/depthwise_conv_test.cc b/tensorflow/lite/experimental/micro/kernels/depthwise_conv_test.cc
index e6aefc7..e9049b7 100644
--- a/tensorflow/lite/experimental/micro/kernels/depthwise_conv_test.cc
+++ b/tensorflow/lite/experimental/micro/kernels/depthwise_conv_test.cc
@@ -163,6 +163,12 @@
IntArrayFromInts(filter_zero_points)};
tensors[1].quantization = {kTfLiteAffineQuantization, &filter_quant};
+ float bias_scales[] = {1, filter_scale * input_scale};
+ int bias_zero_points[] = {1, 128};
+ TfLiteAffineQuantization bias_quant = {FloatArrayFromFloats(bias_scales),
+ IntArrayFromInts(bias_zero_points)};
+ tensors[2].quantization = {kTfLiteAffineQuantization, &bias_quant};
+
AsymmetricQuantize(golden, golden_quantized, output_dims_count, output_scale,
output_zero_point);
ValidateDepthwiseConvGoldens(tensors, tensors_size, golden_quantized,
diff --git a/tensorflow/lite/experimental/micro/kernels/dequantize_test.cc b/tensorflow/lite/experimental/micro/kernels/dequantize_test.cc
index 0e566fa..127a924 100644
--- a/tensorflow/lite/experimental/micro/kernels/dequantize_test.cc
+++ b/tensorflow/lite/experimental/micro/kernels/dequantize_test.cc
@@ -56,9 +56,9 @@
TfLiteContext context;
PopulateContext(tensors, tensors_size, &context);
- // Version 2 of dequantize supports int8 quantization.
+ // Version 4 ops support int8 quantization.
const TfLiteRegistration* registration =
- resolver.FindOp(tflite::BuiltinOperator_DEQUANTIZE, 2);
+ resolver.FindOp(tflite::BuiltinOperator_DEQUANTIZE, 4);
TF_LITE_MICRO_EXPECT_NE(nullptr, registration);
diff --git a/tensorflow/lite/experimental/micro/kernels/mul.cc b/tensorflow/lite/experimental/micro/kernels/mul.cc
index cbd6251..3652f1c 100644
--- a/tensorflow/lite/experimental/micro/kernels/mul.cc
+++ b/tensorflow/lite/experimental/micro/kernels/mul.cc
@@ -60,20 +60,16 @@
&data->output_activation_max);
}
- double real_multiplier =
- input1->params.scale * input2->params.scale / output->params.scale;
- QuantizeMultiplier(real_multiplier, &data->output_multiplier,
- &data->output_shift);
+ if (output->type == kTfLiteUInt8 || output->type == kTfLiteInt8) {
+ double real_multiplier =
+ input1->params.scale * input2->params.scale / output->params.scale;
+ QuantizeMultiplier(real_multiplier, &data->output_multiplier,
+ &data->output_shift);
+ }
return kTfLiteOk;
}
-void* Init(TfLiteContext* context, const char* buffer, size_t length) {
- return nullptr;
-}
-
-void Free(TfLiteContext* context, void* buffer) {}
-
TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) {
return kTfLiteOk;
}
@@ -172,7 +168,7 @@
} // namespace mul
TfLiteRegistration* Register_MUL() {
- static TfLiteRegistration r = {mul::Init, mul::Free, mul::Prepare, mul::Eval};
+ static TfLiteRegistration r = {nullptr, nullptr, mul::Prepare, mul::Eval};
return &r;
}
diff --git a/tensorflow/lite/experimental/micro/kernels/quantize_test.cc b/tensorflow/lite/experimental/micro/kernels/quantize_test.cc
index 07b0e21..26b1948 100644
--- a/tensorflow/lite/experimental/micro/kernels/quantize_test.cc
+++ b/tensorflow/lite/experimental/micro/kernels/quantize_test.cc
@@ -55,9 +55,9 @@
TfLiteContext context;
PopulateContext(tensors, tensors_size, &context);
- // Version 1 of quantize supports int8 and uint8 quantization.
+ // Version 4 ops support int8 quantization.
const TfLiteRegistration* registration =
- resolver.FindOp(tflite::BuiltinOperator_QUANTIZE, 1);
+ resolver.FindOp(tflite::BuiltinOperator_QUANTIZE, 4);
TF_LITE_MICRO_EXPECT_NE(nullptr, registration);
diff --git a/tensorflow/lite/experimental/micro/kernels/reshape_test.cc b/tensorflow/lite/experimental/micro/kernels/reshape_test.cc
index 2e3e620..17f8f21 100644
--- a/tensorflow/lite/experimental/micro/kernels/reshape_test.cc
+++ b/tensorflow/lite/experimental/micro/kernels/reshape_test.cc
@@ -13,9 +13,14 @@
limitations under the License.
==============================================================================*/
-#include "tensorflow/lite/c/builtin_op_data.h"
+#include <stdint.h>
+
+#include <initializer_list>
+
#include "tensorflow/lite/c/c_api_internal.h"
#include "tensorflow/lite/experimental/micro/kernels/all_ops_resolver.h"
+#include "tensorflow/lite/experimental/micro/micro_utils.h"
+#include "tensorflow/lite/experimental/micro/test_helpers.h"
#include "tensorflow/lite/experimental/micro/testing/micro_test.h"
#include "tensorflow/lite/experimental/micro/testing/test_utils.h"
#include "tensorflow/lite/kernels/internal/tensor_ctypes.h"
@@ -24,14 +29,6 @@
namespace testing {
namespace {
-TfLiteReshapeParams create_params(int* shape_data) {
- TfLiteReshapeParams op_params = {};
- op_params.num_dimensions = shape_data[0];
- for (int i = 0; i < shape_data[0]; ++i)
- op_params.shape[i] = shape_data[i + 1];
- return op_params;
-}
-
// If expected output is empty, the test is expected to fail.
template <typename T>
void TestReshapeImpl(TfLiteTensor* input_tensor, TfLiteTensor* shape_tensor,
@@ -67,21 +64,18 @@
const TfLiteRegistration* registration =
resolver.FindOp(tflite::BuiltinOperator_RESHAPE, 1);
TF_LITE_MICRO_EXPECT_NE(nullptr, registration);
- TfLiteReshapeParams builtin_data =
- create_params(reinterpret_cast<int*>(output_tensor->dims));
- const char* init_data = reinterpret_cast<const char*>(&builtin_data);
- size_t init_data_size = 0;
+
void* user_data = nullptr;
node.temporaries = nullptr;
node.user_data = user_data;
- node.builtin_data = reinterpret_cast<void*>(&builtin_data);
+ node.builtin_data = nullptr;
node.custom_initial_data = nullptr;
node.custom_initial_data_size = 0;
node.delegate = nullptr;
- if (registration->init) {
- user_data = registration->init(&context, init_data, init_data_size);
- }
+ TF_LITE_MICRO_EXPECT_EQ(registration->init, nullptr);
+ TF_LITE_MICRO_EXPECT_EQ(registration->free, nullptr);
+
if (registration->prepare) {
TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, registration->prepare(&context, &node));
}
@@ -91,9 +85,6 @@
return;
}
TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, registration->invoke(&context, &node));
- if (registration->free) {
- registration->free(&context, user_data);
- }
const int output_dims_count = ElementCount(*output_tensor->dims);
const T* output_data = GetTensorData<T>(output_tensor);
diff --git a/tensorflow/lite/experimental/micro/micro_mutable_op_resolver.cc b/tensorflow/lite/experimental/micro/micro_mutable_op_resolver.cc
index 62e4c9f..c54e9e4 100644
--- a/tensorflow/lite/experimental/micro/micro_mutable_op_resolver.cc
+++ b/tensorflow/lite/experimental/micro/micro_mutable_op_resolver.cc
@@ -17,12 +17,6 @@
namespace tflite {
-namespace {
-
-const int kDefaultOpVersions[] = {1};
-
-} // namespace
-
const TfLiteRegistration* MicroMutableOpResolver::FindOp(
tflite::BuiltinOperator op, int version) const {
for (int i = 0; i < registrations_len_; ++i) {
@@ -48,17 +42,10 @@
return nullptr;
}
-void MicroMutableOpResolver::AddBuiltin(
- tflite::BuiltinOperator op, const TfLiteRegistration* registration) {
- return AddBuiltin(op, registration, kDefaultOpVersions, 1);
-}
-
void MicroMutableOpResolver::AddBuiltin(tflite::BuiltinOperator op,
- const TfLiteRegistration* registration,
- const int* supported_versions,
- int supported_versions_len) {
- for (int i = 0; i < supported_versions_len; ++i) {
- int version = supported_versions[i];
+ TfLiteRegistration* registration,
+ int min_version, int max_version) {
+ for (int version = min_version; version <= max_version; ++version) {
if (registrations_len_ >= TFLITE_REGISTRATIONS_MAX) {
// TODO(petewarden) - Add error reporting hooks so we can report this!
return;
@@ -73,16 +60,9 @@
}
void MicroMutableOpResolver::AddCustom(const char* name,
- const TfLiteRegistration* registration) {
- return AddCustom(name, registration, kDefaultOpVersions, 1);
-}
-
-void MicroMutableOpResolver::AddCustom(const char* name,
- const TfLiteRegistration* registration,
- const int* supported_versions,
- int supported_versions_len) {
- for (int i = 0; i < supported_versions_len; ++i) {
- int version = supported_versions[i];
+ TfLiteRegistration* registration,
+ int min_version, int max_version) {
+ for (int version = min_version; version <= max_version; ++version) {
if (registrations_len_ >= TFLITE_REGISTRATIONS_MAX) {
// TODO(petewarden) - Add error reporting hooks so we can report this!
return;
diff --git a/tensorflow/lite/experimental/micro/micro_mutable_op_resolver.h b/tensorflow/lite/experimental/micro/micro_mutable_op_resolver.h
index 4721ad5..f613203 100644
--- a/tensorflow/lite/experimental/micro/micro_mutable_op_resolver.h
+++ b/tensorflow/lite/experimental/micro/micro_mutable_op_resolver.h
@@ -15,10 +15,8 @@
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_MICRO_MUTABLE_OP_RESOLVER_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_MICRO_MUTABLE_OP_RESOLVER_H_
-#include "tensorflow/lite/c/c_api_internal.h"
#include "tensorflow/lite/core/api/op_resolver.h"
#include "tensorflow/lite/experimental/micro/compatibility.h"
-#include "tensorflow/lite/schema/schema_generated.h"
#ifndef TFLITE_REGISTRATIONS_MAX
#define TFLITE_REGISTRATIONS_MAX (128)
@@ -26,30 +24,15 @@
namespace tflite {
-// Op versions discussed in this file are enumerated here:
-// tensorflow/lite/tools/versioning/op_version.cc
-
class MicroMutableOpResolver : public OpResolver {
public:
const TfLiteRegistration* FindOp(tflite::BuiltinOperator op,
int version) const override;
const TfLiteRegistration* FindOp(const char* op, int version) const override;
-
- // Add a builtin op which supports only version 1.
- void AddBuiltin(tflite::BuiltinOperator op,
- const TfLiteRegistration* registration);
-
- // Add a builtin op which supports the specified version(s).
- void AddBuiltin(tflite::BuiltinOperator op,
- const TfLiteRegistration* registration,
- const int* supported_versions, int supported_versions_len);
-
- // Add a custom op which supports only version 1.
- void AddCustom(const char* name, const TfLiteRegistration* registration);
-
- // Add a custom op which supports the specified version(s).
- void AddCustom(const char* name, const TfLiteRegistration* registration,
- const int* supported_versions, int supported_versions_len);
+ void AddBuiltin(tflite::BuiltinOperator op, TfLiteRegistration* registration,
+ int min_version = 1, int max_version = 1);
+ void AddCustom(const char* name, TfLiteRegistration* registration,
+ int min_version = 1, int max_version = 1);
private:
TfLiteRegistration registrations_[TFLITE_REGISTRATIONS_MAX];
diff --git a/tensorflow/lite/experimental/micro/micro_mutable_op_resolver_test.cc b/tensorflow/lite/experimental/micro/micro_mutable_op_resolver_test.cc
index d96754b..f551830 100644
--- a/tensorflow/lite/experimental/micro/micro_mutable_op_resolver_test.cc
+++ b/tensorflow/lite/experimental/micro/micro_mutable_op_resolver_test.cc
@@ -50,11 +50,8 @@
tflite::MockPrepare, tflite::MockInvoke};
MicroMutableOpResolver micro_mutable_op_resolver;
- const int conv2d_versions[] = {0, 1, 2};
- const int custom_op_versions[] = {0, 1, 2, 3};
- micro_mutable_op_resolver.AddBuiltin(BuiltinOperator_CONV_2D, &r,
- conv2d_versions, 3);
- micro_mutable_op_resolver.AddCustom("mock_custom", &r, custom_op_versions, 4);
+ micro_mutable_op_resolver.AddBuiltin(BuiltinOperator_CONV_2D, &r, 0, 2);
+ micro_mutable_op_resolver.AddCustom("mock_custom", &r, 0, 3);
OpResolver* resolver = µ_mutable_op_resolver;
const TfLiteRegistration* registration =
diff --git a/tensorflow/lite/experimental/micro/micro_optional_debug_tools.cc b/tensorflow/lite/experimental/micro/micro_optional_debug_tools.cc
index e27317a..f36a219 100644
--- a/tensorflow/lite/experimental/micro/micro_optional_debug_tools.cc
+++ b/tensorflow/lite/experimental/micro/micro_optional_debug_tools.cc
@@ -14,6 +14,8 @@
==============================================================================*/
#include "tensorflow/lite/experimental/micro/micro_optional_debug_tools.h"
+#include <inttypes.h>
+
#include "tensorflow/lite/schema/schema_generated.h"
namespace tflite {
namespace {
@@ -122,7 +124,7 @@
printf("Node %3zu Operator Custom Name %s\n", node_index,
reg->custom_name);
} else {
- printf("Node %3zu Operator Builtin Code %3d %s\n", node_index,
+ printf("Node %3zu Operator Builtin Code %" PRIu32 " %s\n", node_index,
reg->builtin_code, EnumNamesBuiltinOperator()[reg->builtin_code]);
}
printf(" Inputs:");
diff --git a/tensorflow/lite/experimental/micro/simple_memory_allocator.cc b/tensorflow/lite/experimental/micro/simple_memory_allocator.cc
index 5a0fca5..4f8a324 100644
--- a/tensorflow/lite/experimental/micro/simple_memory_allocator.cc
+++ b/tensorflow/lite/experimental/micro/simple_memory_allocator.cc
@@ -43,7 +43,6 @@
// is not what we expected.
SimpleMemoryAllocator child = *this;
child.parent_allocator_ = this;
- // With C++ copy elision, &child should be available after return.
has_child_allocator_ = true;
return child;
}
diff --git a/tensorflow/lite/experimental/micro/tools/make/Makefile b/tensorflow/lite/experimental/micro/tools/make/Makefile
index 510bb81..fadaa78 100644
--- a/tensorflow/lite/experimental/micro/tools/make/Makefile
+++ b/tensorflow/lite/experimental/micro/tools/make/Makefile
@@ -70,6 +70,7 @@
# these settings are for the target compiler.
CXXFLAGS := -O3 -DNDEBUG
CXXFLAGS += -std=c++11 -g -DTF_LITE_STATIC_MEMORY
+CXXFLAGS += -fno-rtti
CCFLAGS := -DNDEBUG -g -DTF_LITE_STATIC_MEMORY
LDOPTS := -L/usr/local/lib
ARFLAGS := -r
diff --git a/tensorflow/lite/experimental/microfrontend/lib/frontend_main.c b/tensorflow/lite/experimental/microfrontend/lib/frontend_main.c
index 6b8c171..861778c 100644
--- a/tensorflow/lite/experimental/microfrontend/lib/frontend_main.c
+++ b/tensorflow/lite/experimental/microfrontend/lib/frontend_main.c
@@ -44,6 +44,7 @@
if (audio_file_size !=
fread(audio_data, sizeof(int16_t), audio_file_size, fp)) {
fprintf(stderr, "Failed to read in all audio data\n");
+ fclose(fp);
return 1;
}
@@ -65,5 +66,6 @@
FrontendFreeStateContents(&frontend_state);
free(original_audio_data);
+ fclose(fp);
return 0;
}
diff --git a/tensorflow/lite/experimental/microfrontend/lib/frontend_memmap_main.c b/tensorflow/lite/experimental/microfrontend/lib/frontend_memmap_main.c
index 21d660c..e9c89b5 100644
--- a/tensorflow/lite/experimental/microfrontend/lib/frontend_memmap_main.c
+++ b/tensorflow/lite/experimental/microfrontend/lib/frontend_memmap_main.c
@@ -34,6 +34,7 @@
if (audio_file_size !=
fread(audio_data, sizeof(int16_t), audio_file_size, fp)) {
fprintf(stderr, "Failed to read in all audio data\n");
+ fclose(fp);
return 1;
}
@@ -54,5 +55,6 @@
}
free(original_audio_data);
+ fclose(fp);
return 0;
}
diff --git a/tensorflow/lite/experimental/resource/BUILD b/tensorflow/lite/experimental/resource/BUILD
new file mode 100644
index 0000000..b7dd0be
--- /dev/null
+++ b/tensorflow/lite/experimental/resource/BUILD
@@ -0,0 +1,25 @@
+package(
+ default_visibility = ["//visibility:public"],
+ licenses = ["notice"], # Apache 2.0
+)
+
+cc_library(
+ name = "resource",
+ srcs = [
+ "resource_variable.cc",
+ "static_hashtable.cc",
+ ],
+ hdrs = [
+ "lookup_interfaces.h",
+ "lookup_util.h",
+ "resource_base.h",
+ "resource_variable.h",
+ "static_hashtable.h",
+ ],
+ deps = [
+ "//tensorflow/lite:string_util",
+ "//tensorflow/lite/c:c_api_internal",
+ "//tensorflow/lite/kernels/internal:compatibility",
+ "//tensorflow/lite/kernels/internal:tensor",
+ ],
+)
diff --git a/tensorflow/lite/experimental/resource/lookup_interfaces.h b/tensorflow/lite/experimental/resource/lookup_interfaces.h
new file mode 100644
index 0000000..42bb8e4
--- /dev/null
+++ b/tensorflow/lite/experimental/resource/lookup_interfaces.h
@@ -0,0 +1,64 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+#ifndef TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_LOOKUP_INTERFACES_H_
+#define TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_LOOKUP_INTERFACES_H_
+
+#include <unordered_map>
+
+#include "tensorflow/lite/c/c_api_internal.h"
+#include "tensorflow/lite/experimental/resource/lookup_util.h"
+#include "tensorflow/lite/experimental/resource/resource_base.h"
+#include "tensorflow/lite/kernels/internal/tensor_ctypes.h"
+#include "tensorflow/lite/string_util.h"
+
+namespace tflite {
+namespace resource {
+
+/// WARNING: Experimental interface, subject to change.
+// A resource hash table interface. It's similar to TensorFlow core's
+// LookupInterface class. But it's identified with int32 ID in TFLite (instead
+// of using Resource handle like TensorFlow).
+class LookupInterface : public ResourceBase {
+ public:
+ virtual TfLiteStatus Lookup(TfLiteContext* context, const TfLiteTensor* keys,
+ TfLiteTensor* values,
+ const TfLiteTensor* default_value) = 0;
+ virtual TfLiteStatus Import(TfLiteContext* context, const TfLiteTensor* keys,
+ const TfLiteTensor* values) = 0;
+ virtual size_t Size() = 0;
+
+ virtual TfLiteType GetKeyType() const = 0;
+ virtual TfLiteType GetValueType() const = 0;
+ virtual TfLiteStatus CheckKeyAndValueTypes(TfLiteContext* context,
+ const TfLiteTensor* keys,
+ const TfLiteTensor* values) = 0;
+};
+
+// Creates an resource hash table, shared among all the subgraphs with the
+// given resource id if there is an existing one.
+// WARNING: Experimental interface, subject to change.
+void CreateHashtableResourceIfNotAvailable(ResourceMap* resources,
+ int resource_id,
+ TfLiteType key_dtype,
+ TfLiteType value_dtype);
+
+// Returns the corresponding resource hash table, or nullptr if none.
+// WARNING: Experimental interface, subject to change.
+LookupInterface* GetHashtableResource(ResourceMap* resources, int resource_id);
+
+} // namespace resource
+} // namespace tflite
+
+#endif // TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_LOOKUP_INTERFACES_H_
diff --git a/tensorflow/lite/experimental/resource/lookup_util.h b/tensorflow/lite/experimental/resource/lookup_util.h
new file mode 100644
index 0000000..bb2c1c5
--- /dev/null
+++ b/tensorflow/lite/experimental/resource/lookup_util.h
@@ -0,0 +1,114 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+#ifndef TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_LOOKUP_UTIL_H_
+#define TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_LOOKUP_UTIL_H_
+
+#include "tensorflow/lite/kernels/internal/tensor_ctypes.h"
+#include "tensorflow/lite/string_util.h"
+
+namespace tflite {
+namespace resource {
+namespace internal {
+
+/// Helper class for accessing TFLite tensor data.
+template <typename T>
+class TensorReader {
+ public:
+ explicit TensorReader(const TfLiteTensor* input) {
+ input_data_ = GetTensorData<T>(input);
+ }
+
+ // Returns the corresponding scalar data at the given index position.
+ // In here, it does not check the validity of the index should be guaranteed
+ // in order not to harm the performance. Caller should take care of it.
+ T GetData(int index) { return input_data_[index]; }
+
+ private:
+ const T* input_data_;
+};
+
+/// Helper class for accesing TFLite tensor data. This specialized class is for
+/// std::string type.
+template <>
+class TensorReader<std::string> {
+ public:
+ explicit TensorReader(const TfLiteTensor* input) : input_(input) {}
+
+ // Returns the corresponding string data at the given index position.
+ // In here, it does not check the validity of the index should be guaranteed
+ // in order not to harm the performance. Caller should take care of it.
+ std::string GetData(int index) {
+ auto string_ref = GetString(input_, index);
+ return std::string(string_ref.str, string_ref.len);
+ }
+
+ private:
+ const TfLiteTensor* input_;
+};
+
+/// WARNING: Experimental interface, subject to change.
+/// Helper class for writing TFLite tensor data.
+template <typename ValueType>
+class TensorWriter {
+ public:
+ explicit TensorWriter(TfLiteTensor* values) {
+ output_data_ = GetTensorData<ValueType>(values);
+ }
+
+ // Sets the given value to the given index position of the tensor storage.
+ // In here, it does not check the validity of the index should be guaranteed
+ // in order not to harm the performance. Caller should take care of it.
+ void SetData(int index, ValueType& value) { output_data_[index] = value; }
+
+ // Commit updates. In this case, it does nothing since the SetData method
+ // writes data directly.
+ void Commit() {
+ // Noop.
+ }
+
+ private:
+ ValueType* output_data_;
+};
+
+/// WARNING: Experimental interface, subject to change.
+/// Helper class for writing TFLite tensor data. This specialized class is for
+/// std::string type.
+template <>
+class TensorWriter<std::string> {
+ public:
+ explicit TensorWriter(TfLiteTensor* values) : values_(values) {}
+
+ // Queues the given string value to the buffer regardless of the provided
+ // index.
+ // In here, it does not check the validity of the index should be guaranteed
+ // in order not to harm the performance. Caller should take care of it.
+ void SetData(int index, const std::string& value) {
+ buf_.AddString(value.data(), value.length());
+ }
+
+ // Commit updates. The stored data in DynamicBuffer will be written into the
+ // tensor storage.
+ void Commit() { buf_.WriteToTensor(values_, nullptr); }
+
+ private:
+ TfLiteTensor* values_;
+ DynamicBuffer buf_;
+};
+
+} // namespace internal
+} // namespace resource
+} // namespace tflite
+
+#endif // TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_LOOKUP_UTIL_H_
diff --git a/tensorflow/lite/experimental/resource/resource_base.h b/tensorflow/lite/experimental/resource/resource_base.h
new file mode 100644
index 0000000..48a00b9
--- /dev/null
+++ b/tensorflow/lite/experimental/resource/resource_base.h
@@ -0,0 +1,43 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+#ifndef TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_RESOURCE_BASE_H_
+#define TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_RESOURCE_BASE_H_
+
+#include <memory>
+#include <unordered_map>
+
+#include "tensorflow/lite/kernels/internal/compatibility.h"
+
+namespace tflite {
+namespace resource {
+
+// ResourceBase is an abstract base class for resources.
+/// WARNING: Experimental interface, subject to change.
+class ResourceBase {
+ public:
+ explicit ResourceBase() {}
+ virtual ~ResourceBase() {}
+
+ // Returns true if it is initialized.
+ virtual bool IsInitialized() = 0;
+};
+
+/// WARNING: Experimental interface, subject to change.
+using ResourceMap = std::unordered_map<int32, std::unique_ptr<ResourceBase>>;
+
+} // namespace resource
+} // namespace tflite
+
+#endif // TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_RESOURCE_BASE_H_
diff --git a/tensorflow/lite/experimental/resource_variable/resource_variable.cc b/tensorflow/lite/experimental/resource/resource_variable.cc
similarity index 78%
rename from tensorflow/lite/experimental/resource_variable/resource_variable.cc
rename to tensorflow/lite/experimental/resource/resource_variable.cc
index 502ca27..9fa0f17 100644
--- a/tensorflow/lite/experimental/resource_variable/resource_variable.cc
+++ b/tensorflow/lite/experimental/resource/resource_variable.cc
@@ -13,13 +13,14 @@
limitations under the License.
==============================================================================*/
-#include "tensorflow/lite/experimental/resource_variable/resource_variable.h"
+#include "tensorflow/lite/experimental/resource/resource_variable.h"
#include <cstdlib>
#include <cstring>
#include <map>
namespace tflite {
+namespace resource {
ResourceVariable::ResourceVariable() {
memset(&tensor_, 0, sizeof(TfLiteTensor));
@@ -75,4 +76,21 @@
return kTfLiteOk;
}
+void CreateResourceVariableIfNotAvailable(ResourceMap* resources,
+ int resource_id) {
+ if (resources->count(resource_id) != 0) {
+ return;
+ }
+ resources->emplace(resource_id, new ResourceVariable());
+}
+
+ResourceVariable* GetResourceVariable(ResourceMap* resources, int resource_id) {
+ auto it = resources->find(resource_id);
+ if (it != resources->end()) {
+ return static_cast<ResourceVariable*>(it->second.get());
+ }
+ return nullptr;
+}
+
+} // namespace resource
} // namespace tflite
diff --git a/tensorflow/lite/experimental/resource_variable/resource_variable.h b/tensorflow/lite/experimental/resource/resource_variable.h
similarity index 65%
rename from tensorflow/lite/experimental/resource_variable/resource_variable.h
rename to tensorflow/lite/experimental/resource/resource_variable.h
index 6a93848..1e832c7 100644
--- a/tensorflow/lite/experimental/resource_variable/resource_variable.h
+++ b/tensorflow/lite/experimental/resource/resource_variable.h
@@ -12,14 +12,14 @@
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
-#ifndef TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_VARIABLE_RESOURCE_VARIABLE_H_
-#define TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_VARIABLE_RESOURCE_VARIABLE_H_
-
-#include <unordered_map>
+#ifndef TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_RESOURCE_VARIABLE_H_
+#define TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_RESOURCE_VARIABLE_H_
#include "tensorflow/lite/c/c_api_internal.h"
+#include "tensorflow/lite/experimental/resource/resource_base.h"
namespace tflite {
+namespace resource {
/// WARNING: Experimental interface, subject to change.
// A resource variable class. It's similar to TensorFlow Resource
@@ -28,7 +28,7 @@
//
// TODO(b/137042749): TFLite converter cannot convert variables yet.
// Variable functionalities are only tested with unit tests now.
-class ResourceVariable {
+class ResourceVariable : public ResourceBase {
public:
ResourceVariable();
ResourceVariable(ResourceVariable&& other);
@@ -36,7 +36,7 @@
ResourceVariable(const ResourceVariable&) = delete;
ResourceVariable& operator=(const ResourceVariable&) = delete;
- ~ResourceVariable();
+ ~ResourceVariable() override;
// Assigns data from a tensor. Copies its type, shape and data over.
TfLiteStatus AssignFrom(const TfLiteTensor* tensor);
@@ -46,6 +46,9 @@
// `AssignFrom`.
TfLiteTensor* GetTensor() { return is_initialized_ ? &tensor_ : nullptr; }
+ // Returns true if this resource variable is initialized.
+ bool IsInitialized() override { return is_initialized_; }
+
private:
// The tensor (and its buffer stored in `tensor_.data` is fully owned by
// the `ResourceVariable` object.
@@ -55,8 +58,17 @@
bool is_initialized_ = false;
};
-using ResourceVariableMap = std::unordered_map<int, ResourceVariable>;
+// Creates a resource variable, shared among all the subgraphs with the given
+// resource id if there is an existing one.
+// WARNING: Experimental interface, subject to change.
+void CreateResourceVariableIfNotAvailable(ResourceMap* resources,
+ int resource_id);
+// Returns the corresponding resource variable, or nullptr if none.
+// WARNING: Experimental interface, subject to change.
+ResourceVariable* GetResourceVariable(ResourceMap* resources, int resource_id);
+
+} // namespace resource
} // namespace tflite
-#endif // TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_VARIABLE_RESOURCE_VARIABLE_H_
+#endif // TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_RESOURCE_VARIABLE_H_
diff --git a/tensorflow/lite/experimental/resource/static_hashtable.cc b/tensorflow/lite/experimental/resource/static_hashtable.cc
new file mode 100644
index 0000000..d619fa8
--- /dev/null
+++ b/tensorflow/lite/experimental/resource/static_hashtable.cc
@@ -0,0 +1,129 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+#include "tensorflow/lite/experimental/resource/static_hashtable.h"
+
+#include "tensorflow/lite/experimental/resource/lookup_interfaces.h"
+
+namespace tflite {
+namespace resource {
+namespace internal {
+
+template <typename KeyType, typename ValueType>
+TfLiteStatus StaticHashtable<KeyType, ValueType>::Lookup(
+ TfLiteContext* context, const TfLiteTensor* keys, TfLiteTensor* values,
+ const TfLiteTensor* default_value) {
+ TF_LITE_ENSURE(context, is_initialized_);
+ const int size =
+ MatchingFlatSize(GetTensorShape(keys), GetTensorShape(values));
+
+ auto key_tensor_reader = TensorReader<KeyType>(keys);
+ auto value_tensor_writer = TensorWriter<ValueType>(values);
+ auto default_value_tensor_reader = TensorReader<ValueType>(default_value);
+ ValueType first_default_value = default_value_tensor_reader.GetData(0);
+
+ for (int i = 0; i < size; ++i) {
+ auto result = map_.find(key_tensor_reader.GetData(i));
+ if (result != map_.end()) {
+ value_tensor_writer.SetData(i, result->second);
+ } else {
+ value_tensor_writer.SetData(i, first_default_value);
+ }
+ }
+
+ // This is for a string tensor case in order to write buffer back to the
+ // actual tensor destination. Otherwise, it does nothing since the scalar data
+ // will be written into the tensor storage directly.
+ value_tensor_writer.Commit();
+
+ return kTfLiteOk;
+}
+
+template <typename KeyType, typename ValueType>
+TfLiteStatus StaticHashtable<KeyType, ValueType>::Import(
+ TfLiteContext* context, const TfLiteTensor* keys,
+ const TfLiteTensor* values) {
+ // Import nodes can be invoked twice because the converter will not extract
+ // the initializer graph separately from the original graph. The invocations
+ // after the first call will be ignored.
+ if (is_initialized_) {
+ return kTfLiteOk;
+ }
+
+ const int size =
+ MatchingFlatSize(GetTensorShape(keys), GetTensorShape(values));
+
+ auto key_tensor_reader = TensorReader<KeyType>(keys);
+ auto value_tensor_writer = TensorReader<ValueType>(values);
+ for (int i = 0; i < size; ++i) {
+ map_.insert({key_tensor_reader.GetData(i), value_tensor_writer.GetData(i)});
+ }
+
+ is_initialized_ = true;
+ return kTfLiteOk;
+}
+
+template <typename KeyType>
+LookupInterface* CreateStaticHashtableWithGivenKey(TfLiteType key_type,
+ TfLiteType value_type) {
+ switch (value_type) {
+ case kTfLiteInt32:
+ return new StaticHashtable<KeyType, int32>(key_type, value_type);
+ case kTfLiteString:
+ return new StaticHashtable<KeyType, std::string>(key_type, value_type);
+ case kTfLiteFloat32:
+ return new StaticHashtable<KeyType, float>(key_type, value_type);
+ default:
+ return nullptr;
+ }
+}
+
+LookupInterface* CreateStaticHashtable(TfLiteType key_type,
+ TfLiteType value_type) {
+ switch (key_type) {
+ case kTfLiteInt32:
+ return CreateStaticHashtableWithGivenKey<int32>(key_type, value_type);
+ case kTfLiteString:
+ return CreateStaticHashtableWithGivenKey<std::string>(key_type,
+ value_type);
+ default:
+ return nullptr;
+ }
+}
+
+} // namespace internal
+
+void CreateHashtableResourceIfNotAvailable(ResourceMap* resources,
+ int resource_id,
+ TfLiteType key_dtype,
+ TfLiteType value_dtype) {
+ if (resources->count(resource_id) != 0) {
+ return;
+ }
+ resources->emplace(resource_id,
+ tflite::resource::internal::CreateStaticHashtable(
+ key_dtype, value_dtype));
+}
+
+LookupInterface* GetHashtableResource(ResourceMap* resources, int resource_id) {
+ auto it = resources->find(resource_id);
+ if (it != resources->end()) {
+ return static_cast<LookupInterface*>(it->second.get());
+ }
+ return nullptr;
+}
+
+} // namespace resource
+} // namespace tflite
diff --git a/tensorflow/lite/experimental/resource/static_hashtable.h b/tensorflow/lite/experimental/resource/static_hashtable.h
new file mode 100644
index 0000000..84e68b7
--- /dev/null
+++ b/tensorflow/lite/experimental/resource/static_hashtable.h
@@ -0,0 +1,84 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+#ifndef TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_STATIC_HASHTABLE_H_
+#define TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_STATIC_HASHTABLE_H_
+
+#include <unordered_map>
+
+#include "tensorflow/lite/c/c_api_internal.h"
+#include "tensorflow/lite/experimental/resource/lookup_interfaces.h"
+#include "tensorflow/lite/experimental/resource/lookup_util.h"
+#include "tensorflow/lite/experimental/resource/resource_base.h"
+#include "tensorflow/lite/kernels/internal/tensor_ctypes.h"
+#include "tensorflow/lite/string_util.h"
+
+namespace tflite {
+namespace resource {
+namespace internal {
+
+// A static hash table class. This hash table allows initialization one time in
+// its life cycle. This hash table implements Tensorflow core's HashTableV2 op.
+template <typename KeyType, typename ValueType>
+class StaticHashtable : public tflite::resource::LookupInterface {
+ public:
+ explicit StaticHashtable(TfLiteType key_type, TfLiteType value_type)
+ : key_type_(key_type), value_type_(value_type) {}
+ ~StaticHashtable() override {}
+
+ // Finds the corresponding value of the given keys tensor in the map and
+ // copies the result data to the given values tensor. If there is no matching
+ // value, it will write the default value into the matched position instead.
+ TfLiteStatus Lookup(TfLiteContext* context, const TfLiteTensor* keys,
+ TfLiteTensor* values,
+ const TfLiteTensor* default_value) override;
+
+ // Inserts the given key and value tensor data into the hash table.
+ TfLiteStatus Import(TfLiteContext* context, const TfLiteTensor* keys,
+ const TfLiteTensor* values) override;
+
+ // Returns the item size of the hash table.
+ size_t Size() override { return map_.size(); }
+
+ TfLiteType GetKeyType() const override { return key_type_; }
+ TfLiteType GetValueType() const override { return value_type_; }
+
+ TfLiteStatus CheckKeyAndValueTypes(TfLiteContext* context,
+ const TfLiteTensor* keys,
+ const TfLiteTensor* values) override {
+ TF_LITE_ENSURE_EQ(context, keys->type, key_type_);
+ TF_LITE_ENSURE_EQ(context, values->type, value_type_);
+ return kTfLiteOk;
+ }
+
+ // Returns true if the hash table is initialized.
+ bool IsInitialized() override { return is_initialized_; }
+
+ private:
+ TfLiteType key_type_;
+ TfLiteType value_type_;
+
+ std::unordered_map<KeyType, ValueType> map_;
+ bool is_initialized_ = false;
+};
+
+::tflite::resource::LookupInterface* CreateStaticHashtable(
+ TfLiteType key_type, TfLiteType value_type);
+
+} // namespace internal
+
+} // namespace resource
+} // namespace tflite
+
+#endif // TENSORFLOW_LITE_EXPERIMENTAL_RESOURCE_STATIC_HASHTABLE_H_
diff --git a/tensorflow/lite/experimental/resource_variable/BUILD b/tensorflow/lite/experimental/resource_variable/BUILD
deleted file mode 100644
index af2ed19..0000000
--- a/tensorflow/lite/experimental/resource_variable/BUILD
+++ /dev/null
@@ -1,17 +0,0 @@
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-cc_library(
- name = "resource_variable",
- srcs = [
- "resource_variable.cc",
- ],
- hdrs = [
- "resource_variable.h",
- ],
- deps = [
- "//tensorflow/lite/c:c_api_internal",
- ],
-)
diff --git a/tensorflow/lite/experimental/writer/writer_lib.cc b/tensorflow/lite/experimental/writer/writer_lib.cc
index de75e14..a4828fb 100644
--- a/tensorflow/lite/experimental/writer/writer_lib.cc
+++ b/tensorflow/lite/experimental/writer/writer_lib.cc
@@ -274,7 +274,10 @@
FILE* fp = fopen(filename.c_str(), "wb");
if (!fp) return kTfLiteError;
- if (fwrite(buffer.get(), 1, size, fp) != size) return kTfLiteError;
+ if (fwrite(buffer.get(), 1, size, fp) != size) {
+ fclose(fp);
+ return kTfLiteError;
+ }
if (fclose(fp)) return kTfLiteError;
return kTfLiteOk;
diff --git a/tensorflow/lite/g3doc/guide/build_ios.md b/tensorflow/lite/g3doc/guide/build_ios.md
index 25ffb6e..df7c229 100644
--- a/tensorflow/lite/g3doc/guide/build_ios.md
+++ b/tensorflow/lite/g3doc/guide/build_ios.md
@@ -118,8 +118,8 @@
#### Using local TensorFlow Lite core
You can set up a private CocoaPods specs repository, and publish your custom
-`TensorFlowLiteC` framework to your private repo. You can copy this [podspec
-file][tflite-podspec] and modify a few values:
+`TensorFlowLiteC` framework to your private repo. You can copy this
+[podspec file][tflite-podspec] and modify a few values:
```ruby
...
@@ -203,4 +203,4 @@
[objc-api]: https://github.com/tensorflow/tensorflow/tree/master/tensorflow/lite/experimental/objc
[private-cocoapods]: https://guides.cocoapods.org/making/private-cocoapods.html
[swift-api]: https://github.com/tensorflow/tensorflow/tree/master/tensorflow/lite/experimental/swift
-[tflitec-podspec]: https://github.com/tensorflow/tensorflow/blob/master/tensorflow/lite/experimental/ios/TensorFlowLiteC.podspec
+[tflite-podspec]: https://github.com/tensorflow/tensorflow/blob/master/tensorflow/lite/experimental/ios/TensorFlowLiteC.podspec
diff --git a/tensorflow/lite/g3doc/microcontrollers/get_started.md b/tensorflow/lite/g3doc/microcontrollers/get_started.md
index 375d0c1..6387e8b 100644
--- a/tensorflow/lite/g3doc/microcontrollers/get_started.md
+++ b/tensorflow/lite/g3doc/microcontrollers/get_started.md
@@ -16,6 +16,10 @@
from source)
* [STM32F746 Discovery kit](https://www.st.com/en/evaluation-tools/32f746gdiscovery.html)
(using Mbed)
+* [Adafruit EdgeBadge](https://www.adafruit.com/product/4400) (using Arduino
+ IDE)
+* [Adafruit TensorFlow Lite for Microcontrollers Kit](https://www.adafruit.com/product/4317)
+ (using Arduino IDE)
Learn more about supported platforms in
[TensorFlow Lite for Microcontrollers](index.md).
diff --git a/tensorflow/lite/g3doc/performance/benchmarks.md b/tensorflow/lite/g3doc/performance/benchmarks.md
index b310d0f..e825f7c 100644
--- a/tensorflow/lite/g3doc/performance/benchmarks.md
+++ b/tensorflow/lite/g3doc/performance/benchmarks.md
@@ -141,7 +141,8 @@
[benchmark app](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/lite/tools/benchmark/ios)
was modified to include the appropriate model and `benchmark_params.json` was
modified to set `num_threads` to 2. For GPU delegate, `"use_gpu" : "1"` and
-`"wait_type" : "aggressive"` options were also added to `benchmark_params.json`.
+`"gpu_wait_type" : "aggressive"` options were also added to
+`benchmark_params.json`.
<table>
<thead>
diff --git a/tensorflow/lite/g3doc/performance/quantization_spec.md b/tensorflow/lite/g3doc/performance/quantization_spec.md
index 9ac2ea9..d6b7029 100644
--- a/tensorflow/lite/g3doc/performance/quantization_spec.md
+++ b/tensorflow/lite/g3doc/performance/quantization_spec.md
@@ -149,7 +149,7 @@
Input 2 (Bias):
data_type : int32
range : [int32_min, int32_max]
- granularity: per-axis (dim = 0)
+ granularity: per-axis
restriction: (scale, zero_point) = (input0_scale * input1_scale[...], 0)
Output 0:
data_type : int8
@@ -169,7 +169,7 @@
Input 2 (Bias):
data_type : int32
range : [int32_min, int32_max]
- granularity: per-axis (dim = 3)
+ granularity: per-axis
restriction: (scale, zero_point) = (input0_scale * input1_scale[...], 0)
Output 0:
data_type : int8
diff --git a/tensorflow/lite/interpreter.cc b/tensorflow/lite/interpreter.cc
index 10d857c..07a8af1 100644
--- a/tensorflow/lite/interpreter.cc
+++ b/tensorflow/lite/interpreter.cc
@@ -155,7 +155,7 @@
subgraphs_.reserve(base_index + subgraphs_to_add);
for (int i = 0; i < subgraphs_to_add; ++i) {
Subgraph* subgraph = new Subgraph(error_reporter_, external_contexts_,
- &subgraphs_, &resource_variables_);
+ &subgraphs_, &resources_);
subgraphs_.emplace_back(subgraph);
}
}
diff --git a/tensorflow/lite/interpreter.h b/tensorflow/lite/interpreter.h
index 1d2664a..3976c27 100644
--- a/tensorflow/lite/interpreter.h
+++ b/tensorflow/lite/interpreter.h
@@ -28,7 +28,7 @@
#include "tensorflow/lite/core/api/error_reporter.h"
#include "tensorflow/lite/core/api/profiler.h"
#include "tensorflow/lite/core/subgraph.h"
-#include "tensorflow/lite/experimental/resource_variable/resource_variable.h"
+#include "tensorflow/lite/experimental/resource/resource_base.h"
#include "tensorflow/lite/external_cpu_backend_context.h"
#include "tensorflow/lite/memory_planner.h"
#include "tensorflow/lite/stderr_reporter.h"
@@ -522,9 +522,8 @@
// Subgraphs
std::vector<std::unique_ptr<Subgraph>> subgraphs_;
- // A map of resource variables. Owned by interpreter and shared by multiple
- // subgraphs.
- ResourceVariableMap resource_variables_;
+ // A map of resources. Owned by interpreter and shared by multiple subgraphs.
+ resource::ResourceMap resources_;
};
} // namespace tflite
diff --git a/tensorflow/lite/java/aar_with_jni.bzl b/tensorflow/lite/java/aar_with_jni.bzl
index 9e7c8dd..56a7e2f 100644
--- a/tensorflow/lite/java/aar_with_jni.bzl
+++ b/tensorflow/lite/java/aar_with_jni.bzl
@@ -13,7 +13,7 @@
android_library: The `android_library` target to package. Note that the
AAR will contain *only that library's .jar` sources. It does not
package the transitive closure of all Java source dependencies.
- headers: Optional Fileset of headers that will be included in the
+ headers: Optional list of headers that will be included in the
generated .aar file. This is useful for distributing self-contained
.aars with native libs that can be used directly by native clients.
"""
@@ -64,11 +64,15 @@
""".format(android_library, name)
if headers:
- srcs += [headers]
+ srcs += headers
cmd += """
-cp -rL $$origdir/$(location {0}) headers
-zip -r $$origdir/$(location :{1}.aar) headers
-""".format(headers, name)
+ mkdir headers
+ """
+ for src in headers:
+ cmd += """
+ cp -rL $$origdir/$(location {0}) headers/$$(basename $(location {0}))
+ """.format(src)
+ cmd += "zip -r $$origdir/$(location :{0}.aar) headers".format(name)
native.genrule(
name = name,
diff --git a/tensorflow/lite/java/demo/app/src/main/BUILD b/tensorflow/lite/java/demo/app/src/main/BUILD
index 420a81c..49a5f64 100644
--- a/tensorflow/lite/java/demo/app/src/main/BUILD
+++ b/tensorflow/lite/java/demo/app/src/main/BUILD
@@ -8,7 +8,6 @@
android_binary(
name = "TfLiteCameraDemo",
srcs = glob(["java/**/*.java"]),
- aapt_version = "aapt2",
assets = [
"//tensorflow/lite/java/demo/app/src/main/assets:labels_mobilenet_quant_v1_224.txt",
"@tflite_mobilenet_quant//:mobilenet_v1_1.0_224_quant.tflite",
diff --git a/tensorflow/lite/java/ovic/demo/app/BUILD b/tensorflow/lite/java/ovic/demo/app/BUILD
index e0bd534..7f3b5fc 100644
--- a/tensorflow/lite/java/ovic/demo/app/BUILD
+++ b/tensorflow/lite/java/ovic/demo/app/BUILD
@@ -11,7 +11,6 @@
srcs = [
"OvicBenchmarkerActivity.java",
],
- aapt_version = "aapt2",
assets = [
"//tensorflow/lite/java/ovic/src/testdata:coco_labels.txt",
"//tensorflow/lite/java/ovic/src/testdata:labels.txt",
diff --git a/tensorflow/lite/kernels/BUILD b/tensorflow/lite/kernels/BUILD
index 65bb0ea..d5fae3c 100644
--- a/tensorflow/lite/kernels/BUILD
+++ b/tensorflow/lite/kernels/BUILD
@@ -528,6 +528,7 @@
"//tensorflow/lite:framework",
"//tensorflow/lite:string_util",
"//tensorflow/lite/c:c_api_internal",
+ "//tensorflow/lite/experimental/resource",
"//tensorflow/lite/kernels/internal:audio_utils",
"//tensorflow/lite/kernels/internal:common",
"//tensorflow/lite/kernels/internal:compatibility",
@@ -561,6 +562,7 @@
":op_macros",
"//tensorflow/lite:framework",
"//tensorflow/lite/c:c_api_internal",
+ "//tensorflow/lite/experimental/resource",
"//tensorflow/lite/kernels/internal:tensor",
],
)
diff --git a/tensorflow/lite/kernels/assign_variable.cc b/tensorflow/lite/kernels/assign_variable.cc
index 099b8e1..ac4ce79 100644
--- a/tensorflow/lite/kernels/assign_variable.cc
+++ b/tensorflow/lite/kernels/assign_variable.cc
@@ -20,6 +20,7 @@
#include "tensorflow/lite/c/builtin_op_data.h"
#include "tensorflow/lite/c/c_api_internal.h"
#include "tensorflow/lite/core/subgraph.h"
+#include "tensorflow/lite/experimental/resource/resource_variable.h"
#include "tensorflow/lite/kernels/internal/tensor.h"
#include "tensorflow/lite/kernels/kernel_util.h"
#include "tensorflow/lite/kernels/op_macros.h"
@@ -43,10 +44,10 @@
// everything still works fine when variable ops aren't used.
TF_LITE_ENSURE_EQ(context, NumOutputs(node), 0);
- const TfLiteTensor* input_variable_id_tensor =
+ const TfLiteTensor* input_resource_id_tensor =
GetInput(context, node, kInputVariableId);
- TF_LITE_ENSURE_EQ(context, input_variable_id_tensor->type, kTfLiteInt32);
- TF_LITE_ENSURE_EQ(context, NumElements(input_variable_id_tensor), 1);
+ TF_LITE_ENSURE_EQ(context, input_resource_id_tensor->type, kTfLiteInt32);
+ TF_LITE_ENSURE_EQ(context, NumElements(input_resource_id_tensor), 1);
return kTfLiteOk;
}
@@ -54,21 +55,16 @@
TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) {
Subgraph* subgraph = reinterpret_cast<Subgraph*>(context->impl_);
- const TfLiteTensor* input_variable_id_tensor =
+ const TfLiteTensor* input_resource_id_tensor =
GetInput(context, node, kInputVariableId);
const TfLiteTensor* input_value_tensor = GetInput(context, node, kInputValue);
- int variable_id = input_variable_id_tensor->data.i32[0];
- auto& resource_variables = subgraph->resource_variables();
-
- auto variable_iterator = resource_variables.find(variable_id);
- if (variable_iterator == resource_variables.end()) {
- auto ret = resource_variables.emplace(variable_id, ResourceVariable());
- variable_iterator = ret.first;
- }
-
- auto& variable = variable_iterator->second;
- variable.AssignFrom(input_value_tensor);
+ int resource_id = input_resource_id_tensor->data.i32[0];
+ auto& resources = subgraph->resources();
+ resource::CreateResourceVariableIfNotAvailable(&resources, resource_id);
+ auto* variable = resource::GetResourceVariable(&resources, resource_id);
+ TF_LITE_ENSURE(context, variable != nullptr);
+ variable->AssignFrom(input_value_tensor);
return kTfLiteOk;
}
diff --git a/tensorflow/lite/kernels/comparisons.cc b/tensorflow/lite/kernels/comparisons.cc
index 15333fd..79e9f9c 100644
--- a/tensorflow/lite/kernels/comparisons.cc
+++ b/tensorflow/lite/kernels/comparisons.cc
@@ -54,71 +54,59 @@
return context->ResizeTensor(context, output, output_size);
}
-// TODO(ruic): optimize macros below to using template functions.
-#define TF_LITE_QUANTIZE_COMPARISON(opname) \
- template <typename input_dtype> \
- void EvalQuantized##opname(TfLiteContext* context, TfLiteNode* node, \
- const TfLiteTensor* input1, \
- const TfLiteTensor* input2, TfLiteTensor* output, \
- bool requires_broadcast) { \
- if (input1->type == kTfLiteUInt8 || input1->type == kTfLiteInt8) { \
- auto input1_offset = -input1->params.zero_point; \
- auto input2_offset = -input2->params.zero_point; \
- const int left_shift = 8; \
- \
- int32 input1_multiplier; \
- int input1_shift; \
- QuantizeMultiplierSmallerThanOneExp(input1->params.scale, \
- &input1_multiplier, &input1_shift); \
- int32 input2_multiplier; \
- int input2_shift; \
- QuantizeMultiplierSmallerThanOneExp(input2->params.scale, \
- &input2_multiplier, &input2_shift); \
- \
- ComparisonParams op_params; \
- op_params.left_shift = left_shift; \
- op_params.input1_offset = input1_offset; \
- op_params.input1_multiplier = input1_multiplier; \
- op_params.input1_shift = input1_shift; \
- op_params.input2_offset = input2_offset; \
- op_params.input2_multiplier = input2_multiplier; \
- op_params.input2_shift = input2_shift; \
- if (requires_broadcast) { \
- reference_ops::Broadcast4DSlow##opname##WithScaling( \
- op_params, GetTensorShape(input1), \
- GetTensorData<input_dtype>(input1), GetTensorShape(input2), \
- GetTensorData<input_dtype>(input2), GetTensorShape(output), \
- GetTensorData<bool>(output)); \
- } else { \
- reference_ops::opname##WithScaling( \
- op_params, GetTensorShape(input1), \
- GetTensorData<input_dtype>(input1), GetTensorShape(input2), \
- GetTensorData<input_dtype>(input2), GetTensorShape(output), \
- GetTensorData<bool>(output)); \
- } \
- } \
- }
-TF_LITE_QUANTIZE_COMPARISON(Equal);
-TF_LITE_QUANTIZE_COMPARISON(NotEqual);
-TF_LITE_QUANTIZE_COMPARISON(Greater);
-TF_LITE_QUANTIZE_COMPARISON(GreaterEqual);
-TF_LITE_QUANTIZE_COMPARISON(Less);
-TF_LITE_QUANTIZE_COMPARISON(LessEqual);
-#undef TF_LITE_QUANTIZE_COMPARISON
+template <typename input_dtype, reference_ops::ComparisonFn<int32> opname>
+void ComparisonQuantized(const TfLiteTensor* input1, const TfLiteTensor* input2,
+ TfLiteTensor* output, bool requires_broadcast) {
+ if (input1->type == kTfLiteUInt8 || input1->type == kTfLiteInt8) {
+ auto input1_offset = -input1->params.zero_point;
+ auto input2_offset = -input2->params.zero_point;
+ const int left_shift = 8;
-#define TF_LITE_COMPARISON(type, opname, requires_broadcast) \
- { \
- ComparisonParams op_params; \
- requires_broadcast \
- ? reference_ops::Broadcast4DSlow##opname##NoScaling( \
- op_params, GetTensorShape(input1), GetTensorData<type>(input1), \
- GetTensorShape(input2), GetTensorData<type>(input2), \
- GetTensorShape(output), GetTensorData<bool>(output)) \
- : reference_ops::opname##NoScaling( \
- op_params, GetTensorShape(input1), GetTensorData<type>(input1), \
- GetTensorShape(input2), GetTensorData<type>(input2), \
- GetTensorShape(output), GetTensorData<bool>(output)); \
+ int32 input1_multiplier;
+ int input1_shift;
+ QuantizeMultiplierSmallerThanOneExp(input1->params.scale,
+ &input1_multiplier, &input1_shift);
+ int32 input2_multiplier;
+ int input2_shift;
+ QuantizeMultiplierSmallerThanOneExp(input2->params.scale,
+ &input2_multiplier, &input2_shift);
+
+ ComparisonParams op_params;
+ op_params.left_shift = left_shift;
+ op_params.input1_offset = input1_offset;
+ op_params.input1_multiplier = input1_multiplier;
+ op_params.input1_shift = input1_shift;
+ op_params.input2_offset = input2_offset;
+ op_params.input2_multiplier = input2_multiplier;
+ op_params.input2_shift = input2_shift;
+ if (requires_broadcast) {
+ reference_ops::BroadcastComparison4DSlowWithScaling<input_dtype, opname>(
+ op_params, GetTensorShape(input1), GetTensorData<input_dtype>(input1),
+ GetTensorShape(input2), GetTensorData<input_dtype>(input2),
+ GetTensorShape(output), GetTensorData<bool>(output));
+ } else {
+ reference_ops::ComparisonWithScaling<input_dtype, opname>(
+ op_params, GetTensorShape(input1), GetTensorData<input_dtype>(input1),
+ GetTensorShape(input2), GetTensorData<input_dtype>(input2),
+ GetTensorShape(output), GetTensorData<bool>(output));
+ }
}
+}
+
+template <typename T, reference_ops::ComparisonFn<T> opname>
+void Comparison(const TfLiteTensor* input1, const TfLiteTensor* input2,
+ TfLiteTensor* output, bool requires_broadcast) {
+ ComparisonParams op_params;
+ requires_broadcast
+ ? reference_ops::BroadcastComparison4DSlowImpl<T, opname>(
+ op_params, GetTensorShape(input1), GetTensorData<T>(input1),
+ GetTensorShape(input2), GetTensorData<T>(input2),
+ GetTensorShape(output), GetTensorData<bool>(output))
+ : reference_ops::ComparisonImpl<T, opname>(
+ op_params, GetTensorShape(input1), GetTensorData<T>(input1),
+ GetTensorShape(input2), GetTensorData<T>(input2),
+ GetTensorShape(output), GetTensorData<bool>(output));
+}
TfLiteStatus EqualEval(TfLiteContext* context, TfLiteNode* node) {
const TfLiteTensor* input1 = GetInput(context, node, kInputTensor1);
@@ -127,24 +115,28 @@
bool requires_broadcast = !HaveSameShapes(input1, input2);
switch (input1->type) {
case kTfLiteBool:
- TF_LITE_COMPARISON(bool, Equal, requires_broadcast);
+ Comparison<bool, reference_ops::EqualFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteFloat32:
- TF_LITE_COMPARISON(float, Equal, requires_broadcast);
+ Comparison<float, reference_ops::EqualFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteInt32:
- TF_LITE_COMPARISON(int32_t, Equal, requires_broadcast);
+ Comparison<int32_t, reference_ops::EqualFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteInt64:
- TF_LITE_COMPARISON(int64_t, Equal, requires_broadcast);
+ Comparison<int64_t, reference_ops::EqualFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteUInt8:
- EvalQuantizedEqual<uint8_t>(context, node, input1, input2, output,
- requires_broadcast);
+ ComparisonQuantized<uint8_t, reference_ops::EqualFn>(
+ input1, input2, output, requires_broadcast);
break;
case kTfLiteInt8:
- EvalQuantizedEqual<int8_t>(context, node, input1, input2, output,
- requires_broadcast);
+ ComparisonQuantized<int8_t, reference_ops::EqualFn>(
+ input1, input2, output, requires_broadcast);
break;
default:
context->ReportError(
@@ -155,7 +147,6 @@
return kTfLiteOk;
}
-// TODO(renjieliu): Refactor the logic to avoid duplications.
TfLiteStatus NotEqualEval(TfLiteContext* context, TfLiteNode* node) {
const TfLiteTensor* input1 = GetInput(context, node, kInputTensor1);
const TfLiteTensor* input2 = GetInput(context, node, kInputTensor2);
@@ -163,24 +154,28 @@
bool requires_broadcast = !HaveSameShapes(input1, input2);
switch (input1->type) {
case kTfLiteBool:
- TF_LITE_COMPARISON(bool, NotEqual, requires_broadcast);
+ Comparison<bool, reference_ops::NotEqualFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteFloat32:
- TF_LITE_COMPARISON(float, NotEqual, requires_broadcast);
+ Comparison<float, reference_ops::NotEqualFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteInt32:
- TF_LITE_COMPARISON(int32_t, NotEqual, requires_broadcast);
+ Comparison<int32_t, reference_ops::NotEqualFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteInt64:
- TF_LITE_COMPARISON(int64_t, NotEqual, requires_broadcast);
+ Comparison<int64_t, reference_ops::NotEqualFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteUInt8:
- EvalQuantizedNotEqual<uint8_t>(context, node, input1, input2, output,
- requires_broadcast);
+ ComparisonQuantized<uint8_t, reference_ops::NotEqualFn>(
+ input1, input2, output, requires_broadcast);
break;
case kTfLiteInt8:
- EvalQuantizedNotEqual<int8_t>(context, node, input1, input2, output,
- requires_broadcast);
+ ComparisonQuantized<int8_t, reference_ops::NotEqualFn>(
+ input1, input2, output, requires_broadcast);
break;
default:
context->ReportError(
@@ -198,21 +193,24 @@
bool requires_broadcast = !HaveSameShapes(input1, input2);
switch (input1->type) {
case kTfLiteFloat32:
- TF_LITE_COMPARISON(float, Greater, requires_broadcast);
+ Comparison<float, reference_ops::GreaterFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteInt32:
- TF_LITE_COMPARISON(int32_t, Greater, requires_broadcast);
+ Comparison<int32_t, reference_ops::GreaterFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteInt64:
- TF_LITE_COMPARISON(int64_t, Greater, requires_broadcast);
+ Comparison<int64_t, reference_ops::GreaterFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteUInt8:
- EvalQuantizedGreater<uint8_t>(context, node, input1, input2, output,
- requires_broadcast);
+ ComparisonQuantized<uint8_t, reference_ops::GreaterFn>(
+ input1, input2, output, requires_broadcast);
break;
case kTfLiteInt8:
- EvalQuantizedGreater<int8_t>(context, node, input1, input2, output,
- requires_broadcast);
+ ComparisonQuantized<int8_t, reference_ops::GreaterFn>(
+ input1, input2, output, requires_broadcast);
break;
default:
context->ReportError(context,
@@ -230,21 +228,24 @@
bool requires_broadcast = !HaveSameShapes(input1, input2);
switch (input1->type) {
case kTfLiteFloat32:
- TF_LITE_COMPARISON(float, GreaterEqual, requires_broadcast);
+ Comparison<float, reference_ops::GreaterEqualFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteInt32:
- TF_LITE_COMPARISON(int32_t, GreaterEqual, requires_broadcast);
+ Comparison<int32_t, reference_ops::GreaterEqualFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteInt64:
- TF_LITE_COMPARISON(int64_t, GreaterEqual, requires_broadcast);
+ Comparison<int64_t, reference_ops::GreaterEqualFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteUInt8:
- EvalQuantizedGreaterEqual<uint8_t>(context, node, input1, input2, output,
- requires_broadcast);
+ ComparisonQuantized<uint8_t, reference_ops::GreaterEqualFn>(
+ input1, input2, output, requires_broadcast);
break;
case kTfLiteInt8:
- EvalQuantizedGreaterEqual<int8_t>(context, node, input1, input2, output,
- requires_broadcast);
+ ComparisonQuantized<int8_t, reference_ops::GreaterEqualFn>(
+ input1, input2, output, requires_broadcast);
break;
default:
context->ReportError(context,
@@ -262,21 +263,24 @@
bool requires_broadcast = !HaveSameShapes(input1, input2);
switch (input1->type) {
case kTfLiteFloat32:
- TF_LITE_COMPARISON(float, Less, requires_broadcast);
+ Comparison<float, reference_ops::LessFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteInt32:
- TF_LITE_COMPARISON(int32_t, Less, requires_broadcast);
+ Comparison<int32_t, reference_ops::LessFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteInt64:
- TF_LITE_COMPARISON(int64_t, Less, requires_broadcast);
+ Comparison<int64_t, reference_ops::LessFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteUInt8:
- EvalQuantizedLess<uint8_t>(context, node, input1, input2, output,
- requires_broadcast);
+ ComparisonQuantized<uint8_t, reference_ops::LessFn>(
+ input1, input2, output, requires_broadcast);
break;
case kTfLiteInt8:
- EvalQuantizedLess<int8_t>(context, node, input1, input2, output,
- requires_broadcast);
+ ComparisonQuantized<int8_t, reference_ops::LessFn>(input1, input2, output,
+ requires_broadcast);
break;
default:
context->ReportError(context,
@@ -294,21 +298,24 @@
bool requires_broadcast = !HaveSameShapes(input1, input2);
switch (input1->type) {
case kTfLiteFloat32:
- TF_LITE_COMPARISON(float, LessEqual, requires_broadcast);
+ Comparison<float, reference_ops::LessEqualFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteInt32:
- TF_LITE_COMPARISON(int32_t, LessEqual, requires_broadcast);
+ Comparison<int32_t, reference_ops::LessEqualFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteInt64:
- TF_LITE_COMPARISON(int64_t, LessEqual, requires_broadcast);
+ Comparison<int64_t, reference_ops::LessEqualFn>(input1, input2, output,
+ requires_broadcast);
break;
case kTfLiteUInt8:
- EvalQuantizedLessEqual<uint8_t>(context, node, input1, input2, output,
- requires_broadcast);
+ ComparisonQuantized<uint8_t, reference_ops::LessEqualFn>(
+ input1, input2, output, requires_broadcast);
break;
case kTfLiteInt8:
- EvalQuantizedLessEqual<int8_t>(context, node, input1, input2, output,
- requires_broadcast);
+ ComparisonQuantized<int8_t, reference_ops::LessEqualFn>(
+ input1, input2, output, requires_broadcast);
break;
default:
context->ReportError(context,
diff --git a/tensorflow/lite/kernels/fill_test.cc b/tensorflow/lite/kernels/fill_test.cc
index b98d5b2..5e359a8 100644
--- a/tensorflow/lite/kernels/fill_test.cc
+++ b/tensorflow/lite/kernels/fill_test.cc
@@ -58,11 +58,11 @@
TEST(FillOpModel, FillInt64) {
FillOpModel m({TensorType_INT32, {2}}, {TensorType_INT64});
m.PopulateTensor<int32_t>(m.input1(), {2, 4});
- m.PopulateTensor<int64_t>(m.input2(), {2 ^ 45});
+ m.PopulateTensor<int64_t>(m.input2(), {1LL << 45});
m.Invoke();
EXPECT_THAT(m.ExtractVector<int64_t>(m.output()),
- ElementsAreArray({2 ^ 45, 2 ^ 45, 2 ^ 45, 2 ^ 45, 2 ^ 45, 2 ^ 45,
- 2 ^ 45, 2 ^ 45}));
+ ElementsAreArray({1LL << 45, 1LL << 45, 1LL << 45, 1LL << 45,
+ 1LL << 45, 1LL << 45, 1LL << 45, 1LL << 45}));
EXPECT_THAT(m.GetTensorShape(m.output()), ElementsAreArray({2, 4}));
}
diff --git a/tensorflow/lite/kernels/internal/BUILD b/tensorflow/lite/kernels/internal/BUILD
index b30da13..8c32072 100644
--- a/tensorflow/lite/kernels/internal/BUILD
+++ b/tensorflow/lite/kernels/internal/BUILD
@@ -421,6 +421,7 @@
"reference/integer_ops/pooling.h",
"reference/integer_ops/softmax.h",
"reference/integer_ops/tanh.h",
+ "reference/integer_ops/transpose_conv.h",
"reference/logistic.h",
"reference/maximum_minimum.h",
"reference/mul.h",
diff --git a/tensorflow/lite/kernels/internal/optimized/neon_tensor_utils.cc b/tensorflow/lite/kernels/internal/optimized/neon_tensor_utils.cc
index 97a1b80..b23e030 100644
--- a/tensorflow/lite/kernels/internal/optimized/neon_tensor_utils.cc
+++ b/tensorflow/lite/kernels/internal/optimized/neon_tensor_utils.cc
@@ -316,6 +316,15 @@
return reinterpret_cast<const int8_t*>(shuffled_vectors);
}
+// Notes about the speed of this version vs. the baseline (from memory):
+// - With 256K of L1, we can keep a lot of vectors in cache.
+// I recall a reasonable speedup just by rearranging the loop to have
+// row on the outside and batch on the inside.
+// - I also recall getting a nice speedup from sdot.
+// - I tried many times to do better than the current implementation, using
+// loop unrolling and instruction reordering to avoid stalls, etc.
+// but I was not able to do significantly better. This code is, however,
+// much worse than what the processor spec sheet suggests is possible.
static void DotprodMatrixBatchFourVectorMultiplyAccumulate(
const int8_t* __restrict__ matrix, const int m_rows, const int m_cols,
const int8_t* vectors, const float* scaling_factors, int n_batch,
@@ -334,6 +343,8 @@
const int8* vec_ptr = shuffled_vectors + (batch * m_cols);
const float* scaling_factors_ptr = scaling_factors + batch;
const uint64_t wide_rows = m_rows * sizeof(float);
+ const int8* mat_ptr2 = matrix + ((row + 2) * m_cols);
+ const int8* mat_ptr3 = matrix + ((row + 3) * m_cols);
asm volatile(
// Zero out the accumulator registers.
@@ -347,6 +358,10 @@
// Read 16 more bytes from a pair of matrix rows.
"ld1 {v12.16b}, [%[mat_ptr0]], #16\n"
+ // Prefetch two rows ahead.
+ "prfm pldl1strm, [%[mat_ptr2]]\n"
+ "prfm pldl1strm, [%[mat_ptr3]]\n"
+
// Read from input vectors 4 times; 64 bytes total.
// Each 16-byte register contains parts of 4 vectors; see the
// shuffle logic above.
@@ -364,6 +379,10 @@
"ld1 {v11.16b}, [%[vec_ptr]], #16\n"
".word 0x4face961 // sdot v1.4s, v11.16b, v12.4b[3]\n"
+ // Update prefetch pointers.
+ "add %[mat_ptr2], %[mat_ptr2], #16\n"
+ "add %[mat_ptr3], %[mat_ptr3], #16\n"
+
// Re-use those vectors for the next row as well.
"ld1 {v13.16b}, [%[mat_ptr1]], #16\n"
".word 0x4f8de102 // sdot v2.4s, v8.16b, v13.4b[0]\n"
@@ -421,7 +440,8 @@
"st2 {v9.s, v10.s}[2], [%[result_ptr]], %[wide_rows]\n"
"st2 {v9.s, v10.s}[3], [%[result_ptr]], %[wide_rows]\n"
: [ mat_ptr0 ] "+r"(mat_ptr0), [ mat_ptr1 ] "+r"(mat_ptr1),
- [ vec_ptr ] "+r"(vec_ptr), [ result_ptr ] "+r"(result_ptr)
+ [ vec_ptr ] "+r"(vec_ptr), [ result_ptr ] "+r"(result_ptr),
+ [ mat_ptr2 ] "+r"(mat_ptr2), [ mat_ptr3 ] "+r"(mat_ptr3)
: [ mat_ptr0_end ] "r"(mat_ptr0_end),
[ scaling_factors_ptr ] "r"(scaling_factors_ptr),
[ wide_rows ] "r"(wide_rows)
@@ -546,6 +566,83 @@
free(shuffled_vectors_free);
}
+// The DotprodMatrixBatchFourVectorMultiplyAccumulate kernel processes 4
+// vectors in the same time as the baseline processes 1 vector. However, it
+// requires 4 vectors of input.
+//
+// To take advantage of this speed difference, we add some zero-valued
+// vectors to the batch so that n_batch is a multiple of 4. Then we execute
+// DotprodMatrixBatchPaddedFourVectorMultiplyAccumulate on that padded batch,
+// then extract just the results we want at the end (ignoring the extra padding
+// outputs).
+//
+// The relative cost of the padding is large when the matrix is smaller than
+// 128x128, so we don't use this code path on small matrices. On larger
+// matrices, the computation cost dwarfs the padding cost, making this code
+// viable.
+//
+// If we ignore the cost of padding, this kernel is:
+// 1x the speed of NeonMatrixBatchVectorMultiplyImpl for n_batch = 1
+// 2x the speed of NeonMatrixBatchVectorMultiplyImpl for n_batch = 2
+// 3x the speed of NeonMatrixBatchVectorMultiplyImpl for n_batch = 3
+// ...
+//
+// We don't use this kernel when n_batch = 1 because the baseline kernel
+// is fine for that case.
+void DotprodMatrixBatchPaddedFourVectorMultiplyAccumulate(
+ const int8_t* __restrict__ matrix, const int m_rows, const int m_cols,
+ const int8_t* vectors, const float* scaling_factors, int n_batch,
+ float* __restrict__ result) {
+ const int kWeightsPerUint32 = 4;
+
+ // Round to the nearest multiple of 4.
+ int batch_round_up = n_batch;
+ if (n_batch % 4 != 0) {
+ batch_round_up += (4 - n_batch % 4);
+ }
+ TFLITE_CHECK_LE(n_batch, batch_round_up);
+
+ void* padded_vectors_free;
+ const int padded_vectors_size = batch_round_up * m_cols;
+ int8_t* padded_vectors = reinterpret_cast<int8_t*>(aligned_alloc(
+ kWeightsPerUint32, padded_vectors_size, &padded_vectors_free));
+ memset(padded_vectors, 0, padded_vectors_size);
+
+ void* padded_result_free;
+ const int result_size = n_batch * m_rows * sizeof(float);
+ const int padded_result_size = batch_round_up * m_rows * sizeof(float);
+ float* padded_result = reinterpret_cast<float*>(aligned_alloc(
+ kWeightsPerUint32, padded_result_size, &padded_result_free));
+ memcpy(padded_result, result, result_size);
+ memset(reinterpret_cast<char*>(padded_result) + result_size, 0,
+ padded_result_size - result_size);
+
+ // Copy the input into the padded data structure.
+ TFLITE_CHECK_LE(n_batch * m_cols, padded_vectors_size);
+ memcpy(padded_vectors, vectors, n_batch * m_cols);
+
+ void* padded_scaling_factors_free;
+ const int padded_scaling_factors_size = batch_round_up * sizeof(float);
+ float* padded_scaling_factors = reinterpret_cast<float*>(
+ aligned_alloc(kWeightsPerUint32, padded_scaling_factors_size,
+ &padded_scaling_factors_free));
+ TFLITE_CHECK_LE(n_batch * sizeof(float), padded_scaling_factors_size);
+ TFLITE_CHECK_LE(batch_round_up * sizeof(float), padded_scaling_factors_size);
+ memset(padded_scaling_factors, 0, batch_round_up * sizeof(float));
+ memcpy(padded_scaling_factors, scaling_factors, n_batch * sizeof(float));
+
+ // Call the main kernel.
+ DotprodMatrixBatchFourVectorMultiplyAccumulate(
+ matrix, m_rows, m_cols, padded_vectors, padded_scaling_factors,
+ batch_round_up, padded_result);
+
+ memcpy(result, padded_result, result_size);
+
+ free(padded_result_free);
+ free(padded_vectors_free);
+ free(padded_scaling_factors_free);
+}
+
static void DotprodSparseMatrixBatchVectorMultiplyAccumulate(
const int8_t* __restrict__ matrix, const uint8_t* ledger, const int m_rows,
const int m_cols, const int8_t* __restrict__ vectors,
@@ -937,6 +1034,11 @@
DotprodMatrixBatchFourVectorMultiplyAccumulate(
matrix, m_rows, m_cols, vectors, scaling_factors, n_batch, result);
return;
+ } else if (result_stride == 1 && n_batch >= 2 &&
+ m_rows * m_cols >= 128 * 128) {
+ DotprodMatrixBatchPaddedFourVectorMultiplyAccumulate(
+ matrix, m_rows, m_cols, vectors, scaling_factors, n_batch, result);
+ return;
}
}
#endif // __aarch64__
diff --git a/tensorflow/lite/kernels/internal/reference/integer_ops/transpose_conv.h b/tensorflow/lite/kernels/internal/reference/integer_ops/transpose_conv.h
new file mode 100644
index 0000000..1ad6e20
--- /dev/null
+++ b/tensorflow/lite/kernels/internal/reference/integer_ops/transpose_conv.h
@@ -0,0 +1,118 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_TRANSPOSE_CONV_H_
+#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_TRANSPOSE_CONV_H_
+
+#include "tensorflow/lite/kernels/internal/common.h"
+
+namespace tflite {
+namespace reference_integer_ops {
+
+// Fixed-point per-channel-quantization transpose convolution reference kernel.
+inline void TransposeConv(
+ const ConvParams& params, const int32* output_multiplier,
+ const int32* output_shift, const RuntimeShape& input_shape,
+ const int8* input_data, const RuntimeShape& filter_shape,
+ const int8* filter_data, const RuntimeShape& output_shape,
+ int8* output_data, const RuntimeShape& im2col_shape, int8* im2col_data,
+ int32* scratch_buffer) {
+ const int stride_width = params.stride_width;
+ const int stride_height = params.stride_height;
+ const int pad_width = params.padding_values.width;
+ const int pad_height = params.padding_values.height;
+ TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4);
+ TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4);
+ TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4);
+ (void)im2col_data; // only used in optimized code.
+ (void)im2col_shape; // only used in optimized code.
+
+ const int batches = MatchingDim(input_shape, 0, output_shape, 0);
+ const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3);
+ const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3);
+ const int input_height = input_shape.Dims(1);
+ const int input_width = input_shape.Dims(2);
+ const int filter_height = filter_shape.Dims(1);
+ const int filter_width = filter_shape.Dims(2);
+ const int output_height = output_shape.Dims(1);
+ const int output_width = output_shape.Dims(2);
+ const int32 input_offset = params.input_offset;
+ const int32 output_offset = params.output_offset;
+ const int32 output_activation_min = std::numeric_limits<int8_t>::min();
+ const int32 output_activation_max = std::numeric_limits<int8_t>::max();
+ TFLITE_DCHECK_LE(output_activation_min, output_activation_max);
+
+ const int num_elements = output_shape.FlatSize();
+ // We need to initialize scratch_buffer to all 0s, as we apply the same
+ // 'scatter' based trick as in float version.
+ memset(scratch_buffer, 0, num_elements * sizeof(int32));
+
+ // Loop through input elements one at a time.
+ for (int batch = 0; batch < batches; ++batch) {
+ for (int in_y = 0; in_y < input_height; ++in_y) {
+ for (int in_x = 0; in_x < input_width; ++in_x) {
+ for (int in_channel = 0; in_channel < input_depth; ++in_channel) {
+ // Loop through the output elements it will influence.
+ const int out_x_origin = (in_x * stride_width) - pad_width;
+ const int out_y_origin = (in_y * stride_height) - pad_height;
+ for (int filter_y = 0; filter_y < filter_height; ++filter_y) {
+ for (int filter_x = 0; filter_x < filter_width; ++filter_x) {
+ for (int out_channel = 0; out_channel < output_depth;
+ ++out_channel) {
+ // Compute output element location.
+ const int out_x = out_x_origin + filter_x;
+ const int out_y = out_y_origin + filter_y;
+ // We cannot accumulate out of bounds.
+ if ((out_x >= 0) && (out_x < output_width) && (out_y >= 0) &&
+ (out_y < output_height)) {
+ const int8 input_value = input_data[Offset(
+ input_shape, batch, in_y, in_x, in_channel)];
+ const int8 filter_value =
+ filter_data[Offset(filter_shape, out_channel, filter_y,
+ filter_x, in_channel)];
+ scratch_buffer[Offset(output_shape, batch, out_y, out_x,
+ out_channel)] +=
+ (input_value + input_offset) * filter_value;
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ for (int batch = 0; batch < batches; ++batch) {
+ for (int out_y = 0; out_y < output_height; ++out_y) {
+ for (int out_x = 0; out_x < output_width; ++out_x) {
+ for (int out_channel = 0; out_channel < output_depth; ++out_channel) {
+ int32 acc = scratch_buffer[Offset(output_shape, batch, out_y, out_x,
+ out_channel)];
+ acc = MultiplyByQuantizedMultiplier(
+ acc, output_multiplier[out_channel], output_shift[out_channel]);
+ acc += output_offset;
+ acc = std::max(acc, output_activation_min);
+ acc = std::min(acc, output_activation_max);
+ output_data[Offset(output_shape, batch, out_y, out_x, out_channel)] =
+ static_cast<int8_t>(acc);
+ }
+ }
+ }
+ }
+}
+
+} // namespace reference_integer_ops
+} // namespace tflite
+
+#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_TRANSPOSE_CONV_H_
diff --git a/tensorflow/lite/kernels/internal/tensor_utils_test.cc b/tensorflow/lite/kernels/internal/tensor_utils_test.cc
index 5f4c8fb..6f4a804 100644
--- a/tensorflow/lite/kernels/internal/tensor_utils_test.cc
+++ b/tensorflow/lite/kernels/internal/tensor_utils_test.cc
@@ -643,7 +643,8 @@
MatrixVectorData SetupMatrixVectorData(int rows, int cols, int batch,
bool negative = false,
- bool is_per_channel = false) {
+ bool is_per_channel = false,
+ bool init_to_one = false) {
MatrixVectorData data;
data.rows = rows;
data.cols = cols;
@@ -660,7 +661,7 @@
data.vectors.push_back(sign * (i % 50));
}
data.scale_factors = {1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8};
- data.results.resize(rows * batch, 0);
+ data.results.resize(rows * batch, init_to_one ? 1 : 0);
data.zeroed_matrix = data.matrix;
@@ -715,10 +716,11 @@
return data;
}
-std::vector<float> TestDotprodMatrixBatchVectorMultiply(int rows, int cols,
- int batch,
- bool negative = false) {
- MatrixVectorData data = SetupMatrixVectorData(rows, cols, batch, negative);
+std::vector<float> TestDotprodMatrixBatchVectorMultiply(
+ int rows, int cols, int batch, bool negative = false,
+ bool init_to_one = false) {
+ MatrixVectorData data =
+ SetupMatrixVectorData(rows, cols, batch, negative, false, init_to_one);
// All partial sums in this computation are small enough to fit in the
// mantissa of a float, and the scale factors are all integers, so we expect
@@ -777,6 +779,13 @@
ASSERT_THAT(
TestDotprodMatrixBatchVectorMultiply(4, 32, 2, kNegative),
testing::ElementsAre(3436, 3522, 1590, 6972, 2516, 20520, 456, 10628));
+
+ // Initialize the results vector with 1s to verify that the code adds
+ // to the results vector instead of zero-ing it first.
+ const bool kInitToOne = true;
+ ASSERT_THAT(
+ TestDotprodMatrixBatchVectorMultiply(4, 32, 2, kNegative, kInitToOne),
+ testing::ElementsAre(3437, 3523, 1591, 6973, 2517, 20521, 457, 10629));
}
TEST(uKernels, PerChannelDotprodMatrixBatchVectorMultiplyAccumulateTest) {
@@ -1553,10 +1562,21 @@
const int rows = state.range(0);
const int cols = state.range(1);
const int batch = state.range(2);
+ const int copies = state.range(3);
- tflite::tensor_utils::MatrixVectorData data =
- tflite::tensor_utils::SetupMatrixVectorData(rows, cols, batch);
+ // For some benchmarks we make multiple matrix copies. This allows us to
+ // measure the performance differences of being entirely in cache vs.
+ // out of cache.
+ std::vector<tflite::tensor_utils::MatrixVectorData> datas;
+ for (int i = 0; i < copies; i++) {
+ datas.push_back(
+ tflite::tensor_utils::SetupMatrixVectorData(rows, cols, batch));
+ }
+
+ int copy = 0;
for (auto _ : state) {
+ copy = (copy + 1) % datas.size();
+ auto& data = datas[copy];
for (int i = 0; i < batch; i++) {
tflite::tensor_utils::MatrixBatchVectorMultiplyAccumulate(
data.matrix.data(), data.rows, data.cols,
@@ -1567,33 +1587,48 @@
}
}
BENCHMARK(BM_DotprodBatchOneMultiply)
- ->Args({16, 16, 1})
- ->Args({16, 16, 4})
- ->Args({32, 32, 1})
- ->Args({32, 32, 4})
- ->Args({64, 64, 1})
- ->Args({64, 64, 4})
- ->Args({128, 128, 1})
- ->Args({128, 128, 4})
- ->Args({992, 992, 1})
- ->Args({992, 992, 8})
- ->Args({1024, 1024, 1})
- ->Args({1024, 1024, 4})
- ->Args({1024, 1024, 8})
- ->Args({640, 2048, 1})
- ->Args({640, 2048, 4})
- ->Args({640, 2048, 8})
- ->Args({2048, 2048, 1})
- ->Args({2048, 2048, 8});
+ ->Args({16, 16, 1, 1})
+ ->Args({16, 16, 4, 1})
+ ->Args({32, 32, 1, 1})
+ ->Args({32, 32, 4, 1})
+ ->Args({64, 64, 1, 1})
+ ->Args({64, 64, 4, 1})
+ ->Args({128, 128, 1, 1})
+ ->Args({128, 128, 4, 1})
+ ->Args({992, 992, 1, 1})
+ ->Args({992, 992, 8, 1})
+ ->Args({1024, 1024, 1, 1})
+ ->Args({1024, 1024, 1, 8})
+ ->Args({1024, 1024, 4, 1})
+ ->Args({1024, 1024, 4, 8})
+ ->Args({1024, 1024, 8, 1})
+ ->Args({640, 2048, 1, 1})
+ ->Args({640, 2048, 4, 1})
+ ->Args({640, 2048, 8, 1})
+ ->Args({640, 2048, 8, 8})
+ ->Args({2048, 2048, 1, 1})
+ ->Args({2048, 2048, 1, 8})
+ ->Args({2048, 2048, 8, 1});
void BM_DotprodBatchFourMultiply(benchmark::State& state) {
const int rows = state.range(0);
const int cols = state.range(1);
const int batch = state.range(2);
+ const int copies = state.range(3);
- tflite::tensor_utils::MatrixVectorData data =
- tflite::tensor_utils::SetupMatrixVectorData(rows, cols, batch);
+ // For some benchmarks we make multiple matrix copies. This allows us to
+ // measure the performance differences of being entirely in cache vs.
+ // out of cache.
+ std::vector<tflite::tensor_utils::MatrixVectorData> datas;
+ for (int i = 0; i < copies; i++) {
+ datas.push_back(
+ tflite::tensor_utils::SetupMatrixVectorData(rows, cols, batch));
+ }
+
+ int copy = 0;
for (auto _ : state) {
+ copy = (copy + 1) % datas.size();
+ auto& data = datas[copy];
tflite::tensor_utils::MatrixBatchVectorMultiplyAccumulate(
data.matrix.data(), data.rows, data.cols, data.vectors.data(),
data.scale_factors.data(), data.batch, &data.results[0], 1);
@@ -1601,32 +1636,57 @@
}
}
BENCHMARK(BM_DotprodBatchFourMultiply)
- ->Args({16, 16, 4})
- ->Args({32, 32, 4})
- ->Args({64, 64, 4})
- ->Args({64, 256, 64})
- ->Args({64, 256, 256})
- ->Args({64, 256, 1024})
- ->Args({64, 256, 12544})
- ->Args({128, 128, 4})
- ->Args({640, 640, 4})
- ->Args({992, 992, 8})
- ->Args({1024, 1024, 4})
- ->Args({1024, 1024, 8})
- ->Args({1024, 1024, 256})
- ->Args({640, 2048, 4})
- ->Args({640, 2048, 8})
- ->Args({2048, 2048, 4})
- ->Args({2048, 2048, 8});
+ ->Args({16, 16, 4, 1})
+ ->Args({32, 32, 4, 1})
+ ->Args({64, 64, 4, 1})
+ ->Args({64, 256, 64, 1})
+ ->Args({64, 256, 256, 1})
+ ->Args({64, 256, 1024, 1})
+ ->Args({64, 256, 12544, 1})
+ ->Args({128, 128, 2, 1})
+ ->Args({128, 128, 3, 1})
+ ->Args({128, 128, 4, 1})
+ ->Args({128, 128, 5, 1})
+ ->Args({640, 640, 4, 1})
+ ->Args({992, 992, 8, 1})
+ ->Args({1024, 1024, 2, 1})
+ ->Args({1024, 1024, 3, 1})
+ ->Args({1024, 1024, 4, 1})
+ ->Args({1024, 1024, 5, 1})
+ ->Args({1024, 1024, 8, 1})
+ ->Args({1024, 1024, 8, 8})
+ ->Args({1024, 1024, 256, 1})
+ ->Args({640, 2048, 2, 1})
+ ->Args({640, 2048, 3, 1})
+ ->Args({640, 2048, 4, 1})
+ ->Args({640, 2048, 4, 8})
+ ->Args({640, 2048, 8, 1})
+ ->Args({2048, 2048, 3, 1})
+ ->Args({2048, 2048, 4, 1})
+ ->Args({2048, 2048, 4, 8})
+ ->Args({2048, 2048, 5, 1})
+ ->Args({2048, 2048, 8, 1});
void BM_DotprodSparseMultiply(benchmark::State& state) {
const int rows = state.range(0);
const int cols = state.range(1);
const int batch = state.range(2);
- tflite::tensor_utils::MatrixVectorData data =
- tflite::tensor_utils::SetupMatrixVectorData(rows, cols, batch);
+ const int copies = state.range(3);
+
+ // For some benchmarks we make multiple matrix copies. This allows us to
+ // measure the performance differences of being entirely in cache vs.
+ // out of cache.
+ std::vector<tflite::tensor_utils::MatrixVectorData> datas;
+ for (int i = 0; i < copies; i++) {
+ datas.push_back(
+ tflite::tensor_utils::SetupMatrixVectorData(rows, cols, batch));
+ }
+
+ int copy = 0;
for (auto _ : state) {
+ copy = (copy + 1) % datas.size();
+ auto& data = datas[copy];
tflite::tensor_utils::SparseMatrixBatchVectorMultiplyAccumulate(
data.sparse_matrix.data(), data.ledger.data(), data.rows, data.cols,
data.vectors.data(), data.scale_factors.data(), data.batch,
@@ -1635,17 +1695,17 @@
}
}
BENCHMARK(BM_DotprodSparseMultiply)
- ->Args({128, 128, 1})
- ->Args({128, 128, 4})
- ->Args({640, 640, 4})
- ->Args({992, 992, 8})
- ->Args({1024, 1024, 1})
- ->Args({1024, 1024, 4})
- ->Args({1024, 1024, 8})
- ->Args({640, 2048, 1})
- ->Args({640, 2048, 4})
- ->Args({640, 2048, 8})
- ->Args({2048, 2048, 1})
- ->Args({2048, 2048, 8});
+ ->Args({128, 128, 1, 1})
+ ->Args({128, 128, 4, 1})
+ ->Args({640, 640, 4, 1})
+ ->Args({992, 992, 8, 1})
+ ->Args({1024, 1024, 1, 1})
+ ->Args({1024, 1024, 4, 1})
+ ->Args({1024, 1024, 8, 1})
+ ->Args({640, 2048, 1, 1})
+ ->Args({640, 2048, 4, 1})
+ ->Args({640, 2048, 8, 1})
+ ->Args({2048, 2048, 1, 1})
+ ->Args({2048, 2048, 8, 1});
#endif // DOTPROD_BENCHMARKS
diff --git a/tensorflow/lite/kernels/kernel_util_test.cc b/tensorflow/lite/kernels/kernel_util_test.cc
index d410d2b..79e19eb 100644
--- a/tensorflow/lite/kernels/kernel_util_test.cc
+++ b/tensorflow/lite/kernels/kernel_util_test.cc
@@ -146,7 +146,7 @@
TEST_F(KernelUtilTest, CheckAndPopulate) {
// Create input.
- TfLiteTensor input;
+ TfLiteTensor input = {};
input.type = kTfLiteInt8;
input.allocation_type = kTfLiteArenaRw;
input.dims = TfLiteIntArrayCreate(1);
@@ -163,7 +163,7 @@
input.quantization.params = reinterpret_cast<void*>(input_params);
// Create filter.
- TfLiteTensor filter;
+ TfLiteTensor filter = {};
filter.type = kTfLiteInt8;
filter.allocation_type = kTfLiteArenaRw;
filter.dims = TfLiteIntArrayCreate(4);
@@ -188,7 +188,7 @@
filter.quantization.params = reinterpret_cast<void*>(filter_params);
// Create bias.
- TfLiteTensor bias;
+ TfLiteTensor bias = {};
bias.type = kTfLiteInt32;
bias.allocation_type = kTfLiteArenaRw;
bias.dims = TfLiteIntArrayCreate(4);
@@ -208,7 +208,7 @@
bias.quantization.params = reinterpret_cast<void*>(bias_params);
// Create output.
- TfLiteTensor output;
+ TfLiteTensor output = {};
output.type = kTfLiteInt8;
output.allocation_type = kTfLiteArenaRw;
output.dims = nullptr;
@@ -252,7 +252,7 @@
TEST_F(KernelUtilTest, CheckAndPopulateShift) {
// Create input of type kTfLiteUInt8.
- TfLiteTensor input;
+ TfLiteTensor input = {};
input.type = kTfLiteUInt8;
input.allocation_type = kTfLiteArenaRw;
input.dims = TfLiteIntArrayCreate(1);
@@ -269,7 +269,7 @@
input.quantization.params = reinterpret_cast<void*>(input_params);
// Create filter of type kTfLiteUInt8.
- TfLiteTensor filter;
+ TfLiteTensor filter = {};
filter.type = kTfLiteUInt8;
filter.allocation_type = kTfLiteArenaRw;
filter.dims = TfLiteIntArrayCreate(4);
@@ -291,7 +291,7 @@
filter.quantization.params = reinterpret_cast<void*>(filter_params);
// Create bias for kTfLiteUInt8.
- TfLiteTensor bias;
+ TfLiteTensor bias = {};
bias.type = kTfLiteUInt8;
bias.allocation_type = kTfLiteArenaRw;
bias.dims = TfLiteIntArrayCreate(4);
@@ -311,7 +311,7 @@
bias.quantization.params = reinterpret_cast<void*>(bias_params);
// Create output for kTfLiteUInt8.
- TfLiteTensor output;
+ TfLiteTensor output = {};
output.type = kTfLiteUInt8;
output.allocation_type = kTfLiteArenaRw;
output.dims = nullptr;
@@ -359,7 +359,7 @@
#ifndef __APPLE__ // Some Apple toolchains don't support std::ldexp
TEST_F(KernelUtilTest, CheckAndPopulateZeroValue) {
// Create input.
- TfLiteTensor input;
+ TfLiteTensor input = {};
input.type = kTfLiteInt8;
input.allocation_type = kTfLiteArenaRw;
input.dims = TfLiteIntArrayCreate(1);
@@ -376,7 +376,7 @@
input.quantization.params = reinterpret_cast<void*>(input_params);
// Create filter.
- TfLiteTensor filter;
+ TfLiteTensor filter = {};
filter.type = kTfLiteInt8;
filter.allocation_type = kTfLiteArenaRw;
filter.dims = TfLiteIntArrayCreate(4);
@@ -401,7 +401,7 @@
filter.quantization.params = reinterpret_cast<void*>(filter_params);
// Create bias.
- TfLiteTensor bias;
+ TfLiteTensor bias = {};
bias.type = kTfLiteInt32;
bias.allocation_type = kTfLiteArenaRw;
bias.dims = TfLiteIntArrayCreate(4);
@@ -421,7 +421,7 @@
bias.quantization.params = reinterpret_cast<void*>(bias_params);
// Create output.
- TfLiteTensor output;
+ TfLiteTensor output = {};
output.type = kTfLiteInt8;
output.allocation_type = kTfLiteArenaRw;
output.dims = nullptr;
@@ -466,7 +466,7 @@
TEST_F(KernelUtilTest, CheckAndPopulateUint8) {
// Create input.
- TfLiteTensor input;
+ TfLiteTensor input = {};
input.type = kTfLiteUInt8;
input.allocation_type = kTfLiteArenaRw;
input.dims = TfLiteIntArrayCreate(1);
@@ -483,7 +483,7 @@
input.quantization.params = reinterpret_cast<void*>(input_params);
// Create filter.
- TfLiteTensor filter;
+ TfLiteTensor filter = {};
filter.type = kTfLiteUInt8;
filter.allocation_type = kTfLiteArenaRw;
filter.dims = TfLiteIntArrayCreate(4);
@@ -505,7 +505,7 @@
filter.quantization.params = reinterpret_cast<void*>(filter_params);
// Create bias.
- TfLiteTensor bias;
+ TfLiteTensor bias = {};
bias.type = kTfLiteInt32;
bias.allocation_type = kTfLiteArenaRw;
bias.dims = TfLiteIntArrayCreate(4);
@@ -521,7 +521,7 @@
bias.quantization.params = reinterpret_cast<void*>(bias_params);
// Create output.
- TfLiteTensor output;
+ TfLiteTensor output = {};
output.type = kTfLiteUInt8;
output.allocation_type = kTfLiteArenaRw;
output.dims = nullptr;
@@ -564,7 +564,7 @@
TEST_F(KernelUtilTest, CheckAndPopulateWithoutBias) {
// Create input.
- TfLiteTensor input;
+ TfLiteTensor input = {};
input.type = kTfLiteUInt8;
input.allocation_type = kTfLiteArenaRw;
input.dims = TfLiteIntArrayCreate(1);
@@ -581,7 +581,7 @@
input.quantization.params = reinterpret_cast<void*>(input_params);
// Create filter.
- TfLiteTensor filter;
+ TfLiteTensor filter = {};
filter.type = kTfLiteUInt8;
filter.allocation_type = kTfLiteArenaRw;
filter.dims = TfLiteIntArrayCreate(4);
@@ -603,7 +603,7 @@
filter.quantization.params = reinterpret_cast<void*>(filter_params);
// Create output.
- TfLiteTensor output;
+ TfLiteTensor output = {};
output.type = kTfLiteUInt8;
output.allocation_type = kTfLiteArenaRw;
output.dims = nullptr;
diff --git a/tensorflow/lite/kernels/read_variable.cc b/tensorflow/lite/kernels/read_variable.cc
index 4996bcc..891cad9 100644
--- a/tensorflow/lite/kernels/read_variable.cc
+++ b/tensorflow/lite/kernels/read_variable.cc
@@ -20,6 +20,7 @@
#include "tensorflow/lite/c/builtin_op_data.h"
#include "tensorflow/lite/c/c_api_internal.h"
#include "tensorflow/lite/core/subgraph.h"
+#include "tensorflow/lite/experimental/resource/resource_variable.h"
#include "tensorflow/lite/kernels/internal/tensor.h"
#include "tensorflow/lite/kernels/kernel_util.h"
#include "tensorflow/lite/kernels/op_macros.h"
@@ -36,10 +37,10 @@
TF_LITE_ENSURE_EQ(context, node->inputs->size, 1);
TF_LITE_ENSURE_EQ(context, node->outputs->size, 1);
- const TfLiteTensor* input_variable_id_tensor =
+ const TfLiteTensor* input_resource_id_tensor =
GetInput(context, node, kInputVariableId);
- TF_LITE_ENSURE_EQ(context, input_variable_id_tensor->type, kTfLiteInt32);
- TF_LITE_ENSURE_EQ(context, NumElements(input_variable_id_tensor), 1);
+ TF_LITE_ENSURE_EQ(context, input_resource_id_tensor->type, kTfLiteInt32);
+ TF_LITE_ENSURE_EQ(context, NumElements(input_resource_id_tensor), 1);
TfLiteTensor* output = GetOutput(context, node, kOutputValue);
SetTensorToDynamic(output);
@@ -50,20 +51,14 @@
TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) {
Subgraph* subgraph = reinterpret_cast<Subgraph*>(context->impl_);
- const TfLiteTensor* input_variable_id_tensor =
+ const TfLiteTensor* input_resource_id_tensor =
GetInput(context, node, kInputVariableId);
- int variable_id = input_variable_id_tensor->data.i32[0];
- auto& resource_variables = subgraph->resource_variables();
+ int resource_id = input_resource_id_tensor->data.i32[0];
+ auto& resources = subgraph->resources();
+ auto* variable = resource::GetResourceVariable(&resources, resource_id);
+ TF_LITE_ENSURE(context, variable != nullptr);
- const auto& variable_iterator = resource_variables.find(variable_id);
- if (variable_iterator == resource_variables.end()) {
- context->ReportError(context, "Variable ID %d is read before initialized.",
- variable_id);
- return kTfLiteError;
- }
- auto& variable = variable_iterator->second;
-
- TfLiteTensor* variable_tensor = variable.GetTensor();
+ TfLiteTensor* variable_tensor = variable->GetTensor();
TfLiteTensor* output = GetOutput(context, node, kOutputValue);
TF_LITE_ENSURE_EQ(context, variable_tensor->type, output->type);
diff --git a/tensorflow/lite/kernels/register.cc b/tensorflow/lite/kernels/register.cc
index 8f2c3e4..68e1025 100644
--- a/tensorflow/lite/kernels/register.cc
+++ b/tensorflow/lite/kernels/register.cc
@@ -205,7 +205,9 @@
/* max_version */ 3);
AddBuiltin(BuiltinOperator_SIN, Register_SIN());
AddBuiltin(BuiltinOperator_COS, Register_COS());
- AddBuiltin(BuiltinOperator_TRANSPOSE_CONV, Register_TRANSPOSE_CONV());
+ AddBuiltin(BuiltinOperator_TRANSPOSE_CONV, Register_TRANSPOSE_CONV(),
+ /* min_version */ 1,
+ /* max_version */ 2);
AddBuiltin(BuiltinOperator_TILE, Register_TILE());
AddBuiltin(BuiltinOperator_SUM, Register_SUM(),
/* min_version */ 1,
diff --git a/tensorflow/lite/kernels/test_util.h b/tensorflow/lite/kernels/test_util.h
index 61197b7..2180405 100644
--- a/tensorflow/lite/kernels/test_util.h
+++ b/tensorflow/lite/kernels/test_util.h
@@ -168,7 +168,12 @@
// Templated version of AddConstInput().
template <typename T>
int AddConstInput(const TensorData& t, std::initializer_list<T> data) {
- int id = AddTensor(t, data);
+ int id = 0;
+ if (t.per_channel_quantization) {
+ id = AddTensorPerChannelQuant(t);
+ } else {
+ id = AddTensor(t, data);
+ }
inputs_.push_back(id);
return id;
}
diff --git a/tensorflow/lite/kernels/transpose_conv.cc b/tensorflow/lite/kernels/transpose_conv.cc
index 51b51bf..4ae74e6 100644
--- a/tensorflow/lite/kernels/transpose_conv.cc
+++ b/tensorflow/lite/kernels/transpose_conv.cc
@@ -24,6 +24,8 @@
#include "tensorflow/lite/kernels/cpu_backend_context.h"
#include "tensorflow/lite/kernels/eigen_support.h"
#include "tensorflow/lite/kernels/internal/optimized/optimized_ops.h"
+// NOLINTNEXTLINE - This header file should't go to the top.
+#include "tensorflow/lite/kernels/internal/reference/integer_ops/transpose_conv.h"
#include "tensorflow/lite/kernels/internal/tensor.h"
#include "tensorflow/lite/kernels/internal/tensor_ctypes.h"
#include "tensorflow/lite/kernels/internal/types.h"
@@ -75,6 +77,12 @@
int32_t output_multiplier;
int output_shift;
+ // Per channel output multiplier and shift.
+ // TODO(b/144846950): Add channel dimension index for the kernel to be more
+ // flexible.
+ std::vector<int32_t> per_channel_output_multiplier;
+ std::vector<int32_t> per_channel_output_shift;
+
// The range of the fused activation layer. For example for kNone and
// uint8_t these would be 0 and 255.
int32_t output_activation_min;
@@ -144,7 +152,7 @@
}
// Allocate scratch buffer tensor for UInt8 inputs.
- if (input_type == kTfLiteUInt8) {
+ if (input_type == kTfLiteUInt8 || input_type == kTfLiteInt8) {
if (data->scratch_tensor_id == kTensorNotAllocated) {
context->AddTensors(context, 1, &data->scratch_tensor_id);
}
@@ -214,6 +222,11 @@
GetTensorData<uint8>(weights),
GetTensorShape(transposed_weights),
GetTensorData<uint8>(transposed_weights));
+ } else if (weights->type == kTfLiteInt8) {
+ optimized_ops::Transpose(transpose_params, input_shape,
+ GetTensorData<int8>(weights),
+ GetTensorShape(transposed_weights),
+ GetTensorData<int8>(transposed_weights));
} else {
context->ReportError(
context, "Transpose conv only support float & uint8 right now.");
@@ -242,8 +255,9 @@
TF_LITE_ENSURE_EQ(context, NumDimensions(output_shape), 1);
TF_LITE_ENSURE_EQ(context, NumDimensions(input), 4);
TF_LITE_ENSURE_EQ(context, NumDimensions(weights), 4);
- TF_LITE_ENSURE(context,
- input->type == kTfLiteFloat32 || input->type == kTfLiteUInt8);
+ TF_LITE_ENSURE(context, input->type == kTfLiteFloat32 ||
+ input->type == kTfLiteUInt8 ||
+ input->type == kTfLiteInt8);
TF_LITE_ENSURE_EQ(context, weights->type, input->type);
TF_LITE_ENSURE_EQ(context, output->type, input->type);
// Ensure that weights and inputs have the same channel dimension.
@@ -288,7 +302,7 @@
}
}
- if (input->type == kTfLiteUInt8) {
+ if (input->type == kTfLiteUInt8 || input->type == kTfLiteInt8) {
node->temporaries->data[data->scratch_tensor_index] =
data->scratch_tensor_id;
TfLiteTensor* scratch_buffer =
@@ -302,19 +316,24 @@
ResizeTensor(context, output_shape, scratch_buffer));
}
- // Calcuate output multiplier for quantization.
- double real_multiplier = 0.0;
- TF_LITE_ENSURE_STATUS(GetQuantizedConvolutionMultipler(
- context, input, weights, output, &real_multiplier));
- int exponent;
- // Populate quantization parameteters with multiplier and shift.
- QuantizeMultiplier(real_multiplier, &data->output_multiplier, &exponent);
- data->output_shift = -exponent;
- // Populate max and min activation range.
- CalculateActivationRangeUint8(kTfLiteActNone, output,
- &data->output_activation_min,
- &data->output_activation_max);
+ TF_LITE_ENSURE_EQ(context, weights->quantization.type,
+ kTfLiteAffineQuantization);
+ const auto* affine_quantization =
+ reinterpret_cast<TfLiteAffineQuantization*>(
+ weights->quantization.params);
+ TF_LITE_ENSURE(context, affine_quantization);
+ TF_LITE_ENSURE(context, affine_quantization->scale);
+ const int number_channel = affine_quantization->scale->size;
+ data->per_channel_output_multiplier.resize(number_channel);
+ data->per_channel_output_shift.resize(number_channel);
+ TF_LITE_ENSURE_STATUS(tflite::PopulateConvolutionQuantizationParams(
+ context, input, weights, nullptr, output, kTfLiteActNone,
+ &data->output_multiplier, &data->output_shift,
+ &data->output_activation_min, &data->output_activation_max,
+ data->per_channel_output_multiplier.data(),
+ data->per_channel_output_shift.data()));
}
+
return kTfLiteOk;
}
@@ -403,6 +422,39 @@
}
}
+void EvalQuantizedPerChannel(TfLiteContext* context,
+ const TfLiteTransposeConvParams* params,
+ OpData* data, const TfLiteTensor* input,
+ const TfLiteTensor* weights,
+ const TfLiteTensor* transposed_weights,
+ TfLiteTensor* col2im, TfLiteTensor* output,
+ TfLiteTensor* scratch_buffer) {
+ tflite::ConvParams op_params;
+ op_params.padding_type = PaddingType::kSame;
+ op_params.padding_values.width = data->padding.width;
+ op_params.padding_values.height = data->padding.height;
+ op_params.padding_values.width_offset = data->padding.width_offset;
+ op_params.padding_values.height_offset = data->padding.height_offset;
+ op_params.stride_width = params->stride_width;
+ op_params.stride_height = params->stride_height;
+ // Need to flip the sign of input offset to add it directly to the quantized
+ // buffer.
+ op_params.input_offset = -input->params.zero_point;
+ op_params.output_offset = output->params.zero_point;
+ op_params.quantized_activation_min = data->output_activation_min;
+ op_params.quantized_activation_max = data->output_activation_max;
+
+ // TODO(b/143380105): Need to add optimized kernel for int8 quantized
+ // transpose conv.
+ reference_integer_ops::TransposeConv(
+ op_params, data->per_channel_output_multiplier.data(),
+ data->per_channel_output_shift.data(), GetTensorShape(input),
+ GetTensorData<int8>(input), GetTensorShape(weights),
+ GetTensorData<int8>(weights), GetTensorShape(output),
+ GetTensorData<int8>(output), GetTensorShape(col2im),
+ GetTensorData<int8>(col2im), GetTensorData<int32_t>(scratch_buffer));
+}
+
template <KernelType kernel_type>
TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) {
// Retrieve tensors (All should be allocated by now)
@@ -473,6 +525,21 @@
scratch_buffer);
break;
}
+ case kTfLiteInt8: {
+ TfLiteTensor* scratch_buffer =
+ GetTemporary(context, node, data->scratch_tensor_index);
+ if (IsDynamicTensor(scratch_buffer)) {
+ TF_LITE_ENSURE_OK(context,
+ ResizeTensor(context, output_shape, scratch_buffer));
+ }
+ if (data->weights_are_transposed && !IsConstantTensor(weights)) {
+ ResizeAndTransposeWeights(context, weights, transposed_weights);
+ }
+ EvalQuantizedPerChannel(context, params, data, input, weights,
+ transposed_weights, col2im, output,
+ scratch_buffer);
+ break;
+ }
default:
context->ReportError(context, "Type '%s' is not currently supported.",
TfLiteTypeGetName(input->type));
diff --git a/tensorflow/lite/kernels/transpose_conv_test.cc b/tensorflow/lite/kernels/transpose_conv_test.cc
index 8f89630..9a1a950 100644
--- a/tensorflow/lite/kernels/transpose_conv_test.cc
+++ b/tensorflow/lite/kernels/transpose_conv_test.cc
@@ -50,7 +50,7 @@
std::initializer_list<InputType> filter_data,
const TensorData& input, const TensorData& output,
Padding padding, int stride_w, int stride_h,
- TestType test_type) {
+ TestType test_type, int version = 1) {
// Just to be confusing, transpose_conv has an _input_ named "output_shape"
// that sets the shape of the output tensor of the op :). It must always be
// an int32 1D four element tensor.
@@ -70,7 +70,7 @@
CreateTransposeConvOptions(builder_, padding, stride_w, stride_h)
.Union());
resolver_ = absl::make_unique<SingleOpResolver>(
- BuiltinOperator_TRANSPOSE_CONV, registration);
+ BuiltinOperator_TRANSPOSE_CONV, registration, version);
BuildInterpreter(
{GetShape(output_shape_), GetShape(filter_), GetShape(input_)});
@@ -83,6 +83,8 @@
void SetInput(std::initializer_list<float> data) {
if (std::is_same<InputType, uint8_t>::value) {
QuantizeAndPopulate<uint8_t>(input_, data);
+ } else if (std::is_same<InputType, int8_t>::value) {
+ QuantizeAndPopulate<int8_t>(input_, data);
} else {
PopulateTensor(input_, data);
}
@@ -313,6 +315,92 @@
EXPECT_THAT(model.GetOutputShape(), ElementsAreArray({1, 4, 4, 1}));
}
+class PerChannelQuantizedTransposeConvOpModel
+ : public BaseTransposeConvOpModel<int8_t> {
+ public:
+ using BaseTransposeConvOpModel::BaseTransposeConvOpModel;
+
+ std::vector<float> GetDequantizedOutput() {
+ return Dequantize<int8_t>(ExtractVector<int8_t>(output_), GetScale(output_),
+ GetZeroPoint(output_));
+ }
+
+ void SetInput(const std::initializer_list<float>& data) {
+ QuantizeAndPopulate<int8_t>(input_, data);
+ }
+
+ void SetFilter(const std::initializer_list<float>& data) {
+ PerChannelSymmetricQuantizeAndPopulate(filter_, data);
+ }
+};
+
+TEST_P(TransposeConvOpTest, SimpleTestQuantizedPerChannelSingleChannel) {
+ // TODO(b/138722124): Enable these tests on NNAPI.
+ if (SingleOpModel::GetForceUseNnapi()) {
+ return;
+ }
+
+ const std::initializer_list<float> filter_data = {1, 2, 3, 4, 5, 6, 7, 8, 9};
+ PerChannelQuantizedTransposeConvOpModel model(
+ GetRegistration(), {1, 4, 4, 1},
+ {TensorType_INT8, {1, 3, 3, 1}, 0, 0, 0, 0, true, {9.0 / 127}, {0}, 0},
+ {}, {TensorType_INT8, {1, 4, 4, 1}, 0, 0, 16.0 / 255, -128},
+ {TensorType_INT8, {}, 0, 0, 2, -128}, Padding_SAME, 1, 1, GetTestType(),
+ /* version */ 2);
+ model.SetInput({1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16});
+ model.SetFilter(filter_data);
+ model.Invoke();
+
+ EXPECT_THAT(
+ model.GetDequantizedOutput(),
+ ElementsAreArray(ArrayFloatNear({28, 62, 82, 76, 98, 192, 238, 198, 206,
+ 372, 416, 330, 262, 446, 486, 366},
+ 1e-5)));
+
+ // GetOutputShape() should always be same as model.SetOutputShape(...);
+ EXPECT_THAT(model.GetOutputShape(), ElementsAreArray({1, 4, 4, 1}));
+}
+
+// Test data copied from the float multi-channel test above.
+TEST_P(TransposeConvOpTest, TestQuantizedPerChannelMultiChannel) {
+ // TODO(b/138722124): Enable these tests on NNAPI.
+ if (SingleOpModel::GetForceUseNnapi()) {
+ return;
+ }
+
+ const std::initializer_list<float> filter_data = {
+ 1, 3, 5, 7, 9, 11, 13, 15, 17, 2, 4, 6, 8, 10, 12, 14, 16, 18};
+ PerChannelQuantizedTransposeConvOpModel model(
+ GetRegistration(), {1, 5, 5, 2},
+ {TensorType_INT8,
+ {2, 3, 3, 1},
+ 0,
+ 0,
+ 0,
+ 0,
+ true,
+ {17.0 / 127, 18.0 / 127},
+ {0, 0},
+ 0},
+ {}, {TensorType_INT8, {1, 2, 2, 1}, 0, 0, 4.0 / 255, -128},
+ {TensorType_INT8, {}, 0, 0, 1, -128}, Padding_VALID, 2, 2, GetTestType(),
+ /* version */ 2);
+ model.SetInput({1, 2, 3, 4});
+ model.SetFilter(filter_data);
+ model.Invoke();
+
+ EXPECT_THAT(
+ model.GetDequantizedOutput(),
+ ElementsAreArray(ArrayFloatNear(
+ {1, 2, 3, 4, 7, 10, 6, 8, 10, 12, 7, 8, 9, 10, 25, 28, 18,
+ 20, 22, 24, 16, 20, 24, 28, 62, 72, 42, 48, 54, 60, 21, 24, 27, 30,
+ 61, 68, 36, 40, 44, 48, 39, 42, 45, 48, 103, 110, 60, 64, 68, 72},
+ 1e-5)));
+
+ // GetOutputShape() should always be same as model.SetOutputShape(...);
+ EXPECT_THAT(model.GetOutputShape(), ElementsAreArray({1, 5, 5, 2}));
+}
+
TEST_P(TransposeConvOpTest, TwoFiltersTestQuantized) {
// Float would be {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
// 18}
diff --git a/tensorflow/lite/models/BUILD b/tensorflow/lite/models/BUILD
index 8730160..6332636 100644
--- a/tensorflow/lite/models/BUILD
+++ b/tensorflow/lite/models/BUILD
@@ -7,8 +7,6 @@
exports_files(["LICENSE"])
-load("//tensorflow/lite:build_def.bzl", "tflite_copts")
-
exports_files(glob([
"testdata/*",
]))
diff --git a/tensorflow/lite/models/smartreply/demo/app/src/main/BUILD b/tensorflow/lite/models/smartreply/demo/app/src/main/BUILD
index 1f07459..8c489cf 100644
--- a/tensorflow/lite/models/smartreply/demo/app/src/main/BUILD
+++ b/tensorflow/lite/models/smartreply/demo/app/src/main/BUILD
@@ -20,7 +20,6 @@
android_binary(
name = "SmartReplyDemo",
srcs = glob(["java/**/*.java"]),
- aapt_version = "aapt2",
assets = [":assets"],
assets_dir = "",
custom_package = "com.example.android.smartreply",
diff --git a/tensorflow/lite/models/speech_test.cc b/tensorflow/lite/models/speech_test.cc
deleted file mode 100644
index 4b40858..0000000
--- a/tensorflow/lite/models/speech_test.cc
+++ /dev/null
@@ -1,217 +0,0 @@
-/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-==============================================================================*/
-// Unit test for speech models (Hotword, SpeakerId) using TFLite Ops.
-
-#include <memory>
-#include <string>
-
-#include <fstream>
-
-#include <gtest/gtest.h>
-#include "tensorflow/lite/testing/parse_testdata.h"
-#include "tensorflow/lite/testing/split.h"
-#include "tensorflow/lite/testing/tflite_driver.h"
-
-namespace tflite {
-namespace {
-
-const char kDataPath[] = "third_party/tensorflow/lite/models/testdata/";
-
-bool Init(const string& in_file_name, testing::TfLiteDriver* driver,
- std::ifstream* in_file) {
- driver->SetModelBaseDir(kDataPath);
- in_file->open(string(kDataPath) + in_file_name, std::ifstream::in);
- return in_file->is_open();
-}
-
-// Converts a set of test files provided by the speech team into a single
-// test_spec. Input CSV files are supposed to contain a number of sequences per
-// line. Each sequence maps to a single invocation of the interpreter and the
-// output tensor after all sequences have run is compared to the corresponding
-// line in the output CSV file.
-bool ConvertCsvData(const string& model_name, const string& in_name,
- const string& out_name, const string& input_tensor,
- const string& output_tensor,
- const string& persistent_tensors, int sequence_size,
- std::ostream* out) {
- auto data_path = [](const string& s) { return string(kDataPath) + s; };
-
- *out << "load_model: \"" << data_path(model_name) << "\"" << std::endl;
-
- *out << "init_state: \"" << persistent_tensors << "\"" << std::endl;
-
- string in_file_name = data_path(in_name);
- std::ifstream in_file(in_file_name);
- if (!in_file.is_open()) {
- std::cerr << "Failed to open " << in_file_name << std::endl;
- return false;
- }
- string out_file_name = data_path(out_name);
- std::ifstream out_file(out_file_name);
- if (!out_file.is_open()) {
- std::cerr << "Failed to open " << out_file_name << std::endl;
- return false;
- }
-
- int invocation_count = 0;
- string in_values;
- while (std::getline(in_file, in_values, '\n')) {
- std::vector<string> input = testing::Split<string>(in_values, ",");
- int num_sequences = input.size() / sequence_size;
-
- for (int j = 0; j < num_sequences; ++j) {
- *out << "invoke {" << std::endl;
- *out << " id: " << invocation_count << std::endl;
- *out << " input: \"";
- for (int k = 0; k < sequence_size; ++k) {
- *out << input[k + j * sequence_size] << ",";
- }
- *out << "\"" << std::endl;
-
- if (j == num_sequences - 1) {
- string out_values;
- if (!std::getline(out_file, out_values, '\n')) {
- std::cerr << "Not enough lines in " << out_file_name << std::endl;
- return false;
- }
- *out << " output: \"" << out_values << "\"" << std::endl;
- }
-
- *out << "}" << std::endl;
- ++invocation_count;
- }
- }
- return true;
-}
-
-class SpeechTest : public ::testing::TestWithParam<int> {
- protected:
- int GetMaxInvocations() { return GetParam(); }
-};
-
-TEST_P(SpeechTest, DISABLED_HotwordOkGoogleRank1Test) {
- std::stringstream os;
- ASSERT_TRUE(ConvertCsvData(
- "speech_hotword_model_rank1.tflite", "speech_hotword_model_in.csv",
- "speech_hotword_model_out_rank1.csv", /*input_tensor=*/"0",
- /*output_tensor=*/"18", /*persistent_tensors=*/"4",
- /*sequence_size=*/40, &os));
- testing::TfLiteDriver test_driver;
- ASSERT_TRUE(testing::ParseAndRunTests(&os, &test_driver, GetMaxInvocations()))
- << test_driver.GetErrorMessage();
-}
-
-TEST_P(SpeechTest, DISABLED_HotwordOkGoogleRank2Test) {
- std::stringstream os;
- ASSERT_TRUE(ConvertCsvData(
- "speech_hotword_model_rank2.tflite", "speech_hotword_model_in.csv",
- "speech_hotword_model_out_rank2.csv", /*input_tensor=*/"17",
- /*output_tensor=*/"18", /*persistent_tensors=*/"1",
- /*sequence_size=*/40, &os));
- testing::TfLiteDriver test_driver;
- ASSERT_TRUE(testing::ParseAndRunTests(&os, &test_driver, GetMaxInvocations()))
- << test_driver.GetErrorMessage();
-}
-
-TEST_P(SpeechTest, DISABLED_SpeakerIdOkGoogleTest) {
- std::stringstream os;
- ASSERT_TRUE(ConvertCsvData(
- "speech_speakerid_model.tflite", "speech_speakerid_model_in.csv",
- "speech_speakerid_model_out.csv", /*input_tensor=*/"0",
- /*output_tensor=*/"63",
- /*persistent_tensors=*/"18,19,38,39,58,59",
- /*sequence_size=*/80, &os));
- testing::TfLiteDriver test_driver;
- ASSERT_TRUE(testing::ParseAndRunTests(&os, &test_driver, GetMaxInvocations()))
- << test_driver.GetErrorMessage();
-}
-
-TEST_P(SpeechTest, AsrAmTest) {
- std::stringstream os;
- ASSERT_TRUE(
- ConvertCsvData("speech_asr_am_model.tflite", "speech_asr_am_model_in.csv",
- "speech_asr_am_model_out.csv", /*input_tensor=*/"0",
- /*output_tensor=*/"104",
- /*persistent_tensors=*/"18,19,38,39,58,59,78,79,98,99",
- /*sequence_size=*/320, &os));
- testing::TfLiteDriver test_driver;
- ASSERT_TRUE(testing::ParseAndRunTests(&os, &test_driver, GetMaxInvocations()))
- << test_driver.GetErrorMessage();
-}
-
-TEST_P(SpeechTest, AsrAmQuantizedTest) {
- std::stringstream os;
- ASSERT_TRUE(ConvertCsvData(
- "speech_asr_am_model_int8.tflite", "speech_asr_am_model_in.csv",
- "speech_asr_am_model_int8_out.csv", /*input_tensor=*/"0",
- /*output_tensor=*/"104",
- /*persistent_tensors=*/"18,19,38,39,58,59,78,79,98,99",
- /*sequence_size=*/320, &os));
- testing::TfLiteDriver test_driver;
- ASSERT_TRUE(testing::ParseAndRunTests(&os, &test_driver, GetMaxInvocations()))
- << test_driver.GetErrorMessage();
-}
-
-// The original version of speech_asr_lm_model_test.cc ran a few sequences
-// through the interpreter and stored the sum of all the output, which was them
-// compared for correctness. In this test we are comparing all the intermediate
-// results.
-TEST_P(SpeechTest, DISABLED_AsrLmTest) {
- std::ifstream in_file;
- testing::TfLiteDriver test_driver;
- ASSERT_TRUE(Init("speech_asr_lm_model.test_spec", &test_driver, &in_file));
- ASSERT_TRUE(
- testing::ParseAndRunTests(&in_file, &test_driver, GetMaxInvocations()))
- << test_driver.GetErrorMessage();
-}
-
-TEST_P(SpeechTest, DISABLED_EndpointerTest) {
- std::stringstream os;
- ASSERT_TRUE(ConvertCsvData(
- "speech_endpointer_model.tflite", "speech_endpointer_model_in.csv",
- "speech_endpointer_model_out.csv", /*input_tensor=*/"0",
- /*output_tensor=*/"56",
- /*persistent_tensors=*/"27,28,47,48",
- /*sequence_size=*/320, &os));
- testing::TfLiteDriver test_driver;
- ASSERT_TRUE(testing::ParseAndRunTests(&os, &test_driver, GetMaxInvocations()))
- << test_driver.GetErrorMessage();
-}
-
-TEST_P(SpeechTest, DISABLED_TtsTest) {
- std::stringstream os;
- ASSERT_TRUE(ConvertCsvData("speech_tts_model.tflite",
- "speech_tts_model_in.csv",
- "speech_tts_model_out.csv", /*input_tensor=*/"0",
- /*output_tensor=*/"71",
- /*persistent_tensors=*/"24,25,44,45,64,65,70",
- /*sequence_size=*/334, &os));
- testing::TfLiteDriver test_driver;
- ASSERT_TRUE(testing::ParseAndRunTests(&os, &test_driver, GetMaxInvocations()))
- << test_driver.GetErrorMessage();
-}
-
-// Define two instantiations. The "ShortTests" instantiations is used when
-// running the tests on Android, in order to prevent timeouts (It takes about
-// 200s just to bring up the Android emulator.)
-static const int kAllInvocations = -1;
-static const int kFirstFewInvocations = 10;
-INSTANTIATE_TEST_SUITE_P(LongTests, SpeechTest,
- ::testing::Values(kAllInvocations));
-INSTANTIATE_TEST_SUITE_P(ShortTests, SpeechTest,
- ::testing::Values(kFirstFewInvocations));
-
-} // namespace
-} // namespace tflite
diff --git a/tensorflow/lite/models/testdata/g3doc/README.md b/tensorflow/lite/models/testdata/g3doc/README.md
deleted file mode 100644
index afe5f16..0000000
--- a/tensorflow/lite/models/testdata/g3doc/README.md
+++ /dev/null
@@ -1,125 +0,0 @@
-## Speech Model Tests
-
-Sample test data has been provided for speech related models in Tensorflow Lite
-to help users working with speech models to verify and test their models.
-
-### Models and Inputs and Outputs:
-
-[ASR AM model](https://storage.googleapis.com/download.tensorflow.org/models/tflite/speech_asr_am_model.tflite)
-
-[ASR AM quantized model](https://storage.googleapis.com/download.tensorflow.org/models/tflite/speech_asr_am_model_int8.tflite)
-
-[ASR AM test inputs](https://storage.googleapis.com/download.tensorflow.org/models/tflite/speech_asr_am_model_in.csv)
-
-[ASR AM test outputs](https://storage.googleapis.com/download.tensorflow.org/models/tflite/speech_asr_am_model_out.csv)
-
-[ASR AM int8 test outputs](https://storage.googleapis.com/download.tensorflow.org/models/tflite/speech_asr_am_model_int8_out.csv)
-
-The models below are not maintained.
-
-[Speech hotword model (Svdf
-rank=1)](https://storage.googleapis.com/download.tensorflow.org/models/tflite/speech_hotword_model_rank1_2017_11_14.tflite)
-
-[Speech hotword model (Svdf
-rank=2)](https://storage.googleapis.com/download.tensorflow.org/models/tflite/speech_hotword_model_rank2_2017_11_14.tflite)
-
-[Speaker-id
-model](https://storage.googleapis.com/download.tensorflow.org/models/tflite/speech_speakerid_model_2017_11_14.tflite)
-
-[TTS
-model](https://storage.googleapis.com/download.tensorflow.org/models/tflite/speech_tts_model_2017_11_14.tflite)
-
-### Test Bench
-
-[Model tests](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/lite/models/speech_test.cc)
-
-Download the ASR AM test models and inputs and output files to the
-models/testdata directory to run the tests.
-
-
-## Speech Model Architectures
-
-For the hotword, speaker-id and automatic speech recognition sample models, the
-architecture assumes that the models receive their input from a speech
-pre-processing module. The speech pre-processing module receives the audio
-signal and produces features for the encoder neural network and uses some
-typical signal processing algorithms, like FFT and spectral subtraction, and
-ultimately produces a log-mel filterbank (the log of the triangular mel filters
-applied to the power spectra). The text-to-speech model assumes that the inputs
-are linguistic features describing characteristics of phonemes, syllables,
-words, phrases, and sentence. The outputs are acoustic features including
-mel-cepstral coefficients, log fundamental frequency, and band aperiodicity.
-The pre-processing modules for these models are not provided in the open source
-version of TensorFlow Lite.
-
-The following sections describe the architecture of the sample models at a high
-level:
-
-### Hotword Model
-
-The hotword model is the neural network model we use for keyphrase/hotword
-spotting (i.e. "okgoogle" detection). It is the entry point for voice
-interaction (e.g. Google search app on Android devices or Google Home, etc.).
-The speech hotword model block diagram is shown in Figure below. It has an input
-size of 40 (float), an output size of 7 (float), one Svdf layer, and four fully
-connected layers with the corresponding parameters as shown in figure below.
-
-
-
-### Speaker-id Model
-
-The speaker-id model is the neural network model we use for speaker
-verification. It runs after the hotword triggers. The speech speaker-id model
-block diagram is shown in Figure below. It has an input size of 80 (float), an
-output size of 64 (float), three Lstm layers, and one fully connected layers
-with the corresponding parameters as shown in figure below.
-
-
-
-### Text-to-speech (TTS) Model
-
-The text-to-speech model is the neural network model used to generate speech
-from text. The speech text-to-speech model’s block diagram is shown
-in Figure below. It has and input size of 334 (float), an output size of 196
-(float), two fully connected layers, three Lstm layers, and one recurrent layer
-with the corresponding parameters as shown in the figure.
-
-
-
-### Automatic Speech Recognizer (ASR) Acoustic Model (AM)
-
-The acoustic model for automatic speech recognition is the neural network model
-for matching phonemes to the input audio features. It generates posterior
-probabilities of phonemes from speech frontend features (log-mel filterbanks).
-It has an input size of 320 (float), an output size of 42 (float), five LSTM
-layers and one fully connected layers with a Softmax activation function, with
-the corresponding parameters as shown in the figure.
-
-
-
-### Automatic Speech Recognizer (ASR) Language Model (LM)
-
-The language model for automatic speech recognition is the neural network model
-for predicting the probability of a word given previous words in a sentence.
-It generates posterior probabilities of the next word based from a sequence of
-words. The words are encoded as indices in a fixed size dictionary.
-The model has two inputs both of size one (integer): the current word index and
-next word index, an output size of one (float): the log probability. It consists
-of three embedding layer, three LSTM layers, followed by a multiplication, a
-fully connected layers and an addition.
-The corresponding parameters as shown in the figure.
-
-
-
-### Endpointer Model
-
-The endpointer model is the neural network model for predicting end of speech
-in an utterance. More precisely, it generates posterior probabilities of various
-events that allow detection of speech start and end events.
-It has an input size of 40 (float) which are speech frontend features
-(log-mel filterbanks), and an output size of four corresponding to:
-speech, intermediate non-speech, initial non-speech, and final non-speech.
-The model consists of a convolutional layer, followed by a fully-connected
-layer, two LSTM layers, and two additional fully-connected layers.
-The corresponding parameters as shown in the figure.
-
diff --git a/tensorflow/lite/models/testdata/g3doc/asr_am.svg b/tensorflow/lite/models/testdata/g3doc/asr_am.svg
deleted file mode 100644
index 9f841c2..0000000
--- a/tensorflow/lite/models/testdata/g3doc/asr_am.svg
+++ /dev/null
@@ -1,4 +0,0 @@
-<?xml version="1.0" standalone="yes"?>
-
-<svg version="1.1" viewBox="0.0 0.0 703.0 722.8005249343832" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="p.0"><path d="m0 0l703.0 0l0 722.80054l-703.0 0l0 -722.80054z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#p.0)"><path fill="#000000" fill-opacity="0.0" d="m0 0l703.0 0l0 722.80054l-703.0 0z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m256.0 30.700842l166.01575 0l0 42.110237l-166.01575 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m256.0 30.700842l166.01575 0l0 42.110237l-166.01575 0z" fill-rule="evenodd"></path><path fill="#000000" d="m268.43954 57.620842l0 -13.59375l1.8125 0l0 13.59375l-1.8125 0zm4.6676636 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375732 3.78125l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm15.313202 4.875l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.5788574 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm9.897858 5.5q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.353302 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.254181 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.074646 -2.125l-8.96875 0l0 -1.5625l8.96875 0l0 1.5625zm0 4.125l-8.96875 0l0 -1.546875l8.96875 0l0 1.546875zm6.8439026 0.28125l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm19.141327 1.984375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm1.5944824 -5.09375q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm10.219452 10.703125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m223.0 102.02362l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m223.0 102.02362l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path fill="#000000" d="m265.82367 128.94362l0 -13.59375l1.796875 0l0 11.984375l6.703125 0l0 1.609375l-8.5 0zm9.844482 -4.375l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.506073 4.375l0 -12.0l-4.46875 0l0 -1.59375l10.765625 0l0 1.59375l-4.5 0l0 12.0l-1.796875 0zm7.8803406 0l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm21.212677 0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.918396 4.0q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.572052 -7.59375l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm19.141357 1.984375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm1.5944519 -5.09375q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.016357 6.703125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm9.96875 -3.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm10.375702 -3.140625q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.656982 0q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm10.219452 10.703125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" d="m259.9714 154.72487l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm8.844452 4.875l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm5.603302 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281982 -6.734375l0 -1.9375l1.65625 0l0 1.9375l-1.65625 0zm-2.125 15.484375l0.3125 -1.421875q0.5 0.125 0.796875 0.125q0.515625 0 0.765625 -0.34375q0.25 -0.328125 0.25 -1.6875l0 -10.359375l1.65625 0l0 10.390625q0 1.828125 -0.46875 2.546875q-0.59375 0.921875 -2.0 0.921875q-0.671875 0 -1.3125 -0.171875zm13.019806 -7.0l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547607 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5426636 -10.1875l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.5042114 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281952 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm14.887146 -2.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.2542114 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm9.578827 -2.078125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm0 7.953125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm18.210358 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm1.5944824 -5.09375q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.656952 0q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m249.80052 657.01575l180.00002 0l0 42.11023l-180.00002 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m249.80052 657.01575l180.00002 0l0 42.11023l-180.00002 0z" fill-rule="evenodd"></path><path fill="#000000" d="m266.3206 677.3107q0 -3.390625 1.8125 -5.296875q1.828125 -1.921875 4.703125 -1.921875q1.875 0 3.390625 0.90625q1.515625 0.890625 2.296875 2.5q0.796875 1.609375 0.796875 3.65625q0 2.0625 -0.84375 3.703125q-0.828125 1.625 -2.359375 2.46875q-1.53125 0.84375 -3.296875 0.84375q-1.921875 0 -3.4375 -0.921875q-1.5 -0.9375 -2.28125 -2.53125q-0.78125 -1.609375 -0.78125 -3.40625zm1.859375 0.03125q0 2.453125 1.3125 3.875q1.328125 1.40625 3.3125 1.40625q2.03125 0 3.34375 -1.421875q1.3125 -1.4375 1.3125 -4.0625q0 -1.65625 -0.5625 -2.890625q-0.546875 -1.234375 -1.640625 -1.921875q-1.078125 -0.6875 -2.421875 -0.6875q-1.90625 0 -3.28125 1.3125q-1.375 1.3125 -1.375 4.390625zm19.433289 6.59375l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.5788574 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5270386 5.28125l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm15.313232 4.875l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.578827 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm9.897858 5.5q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.353302 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.2542114 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.074646 -2.125l-8.96875 0l0 -1.5625l8.96875 0l0 1.5625zm0 4.125l-8.96875 0l0 -1.546875l8.96875 0l0 1.546875zm12.187653 3.875l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm13.797577 3.171875l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm3.1569824 5.609375l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m339.09448 161.01575l0 24.724411" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m339.09448 161.01575l0 18.724411" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m337.44275 179.74016l1.6517334 4.538101l1.6517334 -4.538101z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m339.09448 244.72906l0 25.29132" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m339.09448 244.72906l0 19.291351" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m337.44275 264.02042l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m339.00787 72.81108l0.09448242 29.196846" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m339.00787 72.81108l0.07507324 23.196877" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m337.4312 96.013306l1.6664124 4.5327225l1.6370544 -4.543419z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m223.0 526.4199l232.18896 0l0 42.11029l-232.18896 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m223.0 526.4199l232.18896 0l0 42.11029l-232.18896 0z" fill-rule="evenodd"></path><path fill="#000000" d="m236.43524 553.33997l0 -13.59375l9.171875 0l0 1.59375l-7.375 0l0 4.21875l6.375 0l0 1.609375l-6.375 0l0 6.171875l-1.796875 0zm17.53659 0l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.8913574 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.144806 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.0979614 3.796875l-0.171875 -1.5625q0.546875 0.140625 0.953125 0.140625q0.546875 0 0.875 -0.1875q0.34375 -0.1875 0.5625 -0.515625q0.15625 -0.25 0.5 -1.25q0.046875 -0.140625 0.15625 -0.40625l-3.734375 -9.875l1.796875 0l2.046875 5.71875q0.40625 1.078125 0.71875 2.28125q0.28125 -1.15625 0.6875 -2.25l2.09375 -5.75l1.671875 0l-3.75 10.03125q-0.59375 1.625 -0.9375 2.234375q-0.4375 0.828125 -1.015625 1.203125q-0.578125 0.390625 -1.375 0.390625q-0.484375 0 -1.078125 -0.203125zm19.328125 -8.5625l1.796875 0.453125q-0.5625 2.21875 -2.03125 3.390625q-1.46875 1.15625 -3.59375 1.15625q-2.203125 0 -3.578125 -0.890625q-1.375 -0.90625 -2.09375 -2.59375q-0.71875 -1.703125 -0.71875 -3.65625q0 -2.125 0.796875 -3.703125q0.8125 -1.578125 2.3125 -2.390625q1.5 -0.828125 3.296875 -0.828125q2.046875 0 3.4375 1.046875q1.390625 1.03125 1.9375 2.90625l-1.765625 0.421875q-0.46875 -1.484375 -1.375 -2.15625q-0.90625 -0.6875 -2.265625 -0.6875q-1.5625 0 -2.625 0.75q-1.046875 0.75 -1.484375 2.03125q-0.421875 1.265625 -0.421875 2.609375q0 1.734375 0.5 3.03125q0.515625 1.28125 1.578125 1.921875q1.078125 0.640625 2.3125 0.640625q1.515625 0 2.5625 -0.859375q1.046875 -0.875 1.421875 -2.59375zm2.926056 -0.15625q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281982 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375702 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm17.125732 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547577 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm8.277069 -1.671875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.500702 5.875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm17.637146 8.921875q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.556427 -7.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm10.375732 -3.140625q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.656952 0q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.016357 6.703125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm15.328125 0l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm13.797577 3.171875l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm3.1569824 5.609375l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m339.09448 413.32974l0 24.125977" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m339.09448 413.3297l0 18.126007" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m337.44275 431.45572l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m339.09448 329.01575l0 25.322845" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m339.09448 329.01575l0 19.322845" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m337.44275 348.3386l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m339.09448 496.44235l0 29.984283" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m339.09448 496.44238l0 23.984253" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m337.44275 520.42664l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m223.0 185.73694l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m223.0 185.73694l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path fill="#000000" d="m265.82367 212.65694l0 -13.59375l1.796875 0l0 11.984375l6.703125 0l0 1.609375l-8.5 0zm9.844482 -4.375l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.506073 4.375l0 -12.0l-4.46875 0l0 -1.59375l10.765625 0l0 1.59375l-4.5 0l0 12.0l-1.796875 0zm7.8803406 0l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm23.697052 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm10.434021 5.609375q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.556427 -7.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm10.375732 -3.140625q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.656952 0q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.016357 6.703125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm9.96875 -3.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm10.375702 -3.140625q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.656982 0q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm10.219452 10.703125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" d="m259.9714 238.43819l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm8.844452 4.875l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm5.603302 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281982 -6.734375l0 -1.9375l1.65625 0l0 1.9375l-1.65625 0zm-2.125 15.484375l0.3125 -1.421875q0.5 0.125 0.796875 0.125q0.515625 0 0.765625 -0.34375q0.25 -0.328125 0.25 -1.6875l0 -10.359375l1.65625 0l0 10.390625q0 1.828125 -0.46875 2.546875q-0.59375 0.921875 -2.0 0.921875q-0.671875 0 -1.3125 -0.171875zm13.019806 -7.0l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547607 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5426636 -10.1875l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.5042114 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281952 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm14.887146 -2.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.2542114 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm9.578827 -2.078125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm0 7.953125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm18.210358 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm1.5944824 -5.09375q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.656952 0q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m223.0 270.02362l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m223.0 270.02362l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path fill="#000000" d="m265.82367 296.94363l0 -13.59375l1.796875 0l0 11.984375l6.703125 0l0 1.609375l-8.5 0zm9.844482 -4.375l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.506073 4.375l0 -12.0l-4.46875 0l0 -1.59375l10.765625 0l0 1.59375l-4.5 0l0 12.0l-1.796875 0zm7.8803406 0l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm14.931427 -3.59375l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm19.199646 7.59375q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.556427 -7.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm10.375732 -3.140625q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.656952 0q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.016357 6.703125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm9.96875 -3.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm10.375702 -3.140625q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.656982 0q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm10.219452 10.703125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" d="m259.9714 322.72488l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm8.844452 4.875l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm5.603302 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281982 -6.734375l0 -1.9375l1.65625 0l0 1.9375l-1.65625 0zm-2.125 15.484375l0.3125 -1.421875q0.5 0.125 0.796875 0.125q0.515625 0 0.765625 -0.34375q0.25 -0.328125 0.25 -1.6875l0 -10.359375l1.65625 0l0 10.390625q0 1.828125 -0.46875 2.546875q-0.59375 0.921875 -2.0 0.921875q-0.671875 0 -1.3125 -0.171875zm13.019806 -7.0l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547607 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5426636 -10.1875l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.5042114 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281952 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm14.887146 -2.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.2542114 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm9.578827 -2.078125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm0 7.953125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm18.210358 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm1.5944824 -5.09375q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.656952 0q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m223.0 354.33762l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m223.0 354.33762l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path fill="#000000" d="m265.82367 381.2576l0 -13.59375l1.796875 0l0 11.984375l6.703125 0l0 1.609375l-8.5 0zm9.844482 -4.375l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.506073 4.375l0 -12.0l-4.46875 0l0 -1.59375l10.765625 0l0 1.59375l-4.5 0l0 12.0l-1.796875 0zm7.8803406 0l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm20.275177 0l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm13.855896 8.78125q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.556427 -7.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm10.375732 -3.140625q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.656952 0q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.016357 6.703125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm9.96875 -3.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm10.375702 -3.140625q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.656982 0q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm10.219452 10.703125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" d="m259.9714 407.03885l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm8.844452 4.875l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm5.603302 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281982 -6.734375l0 -1.9375l1.65625 0l0 1.9375l-1.65625 0zm-2.125 15.484375l0.3125 -1.421875q0.5 0.125 0.796875 0.125q0.515625 0 0.765625 -0.34375q0.25 -0.328125 0.25 -1.6875l0 -10.359375l1.65625 0l0 10.390625q0 1.828125 -0.46875 2.546875q-0.59375 0.921875 -2.0 0.921875q-0.671875 0 -1.3125 -0.171875zm13.019806 -7.0l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547607 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5426636 -10.1875l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.5042114 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281952 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm14.887146 -2.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.2542114 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm9.578827 -2.078125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm0 7.953125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm18.210358 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm1.5944824 -5.09375q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.656952 0q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m223.0 437.45026l232.18896 0l0 58.992096l-232.18896 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m223.0 437.45026l232.18896 0l0 58.992096l-232.18896 0z" fill-rule="evenodd"></path><path fill="#000000" d="m265.82367 464.37024l0 -13.59375l1.796875 0l0 11.984375l6.703125 0l0 1.609375l-8.5 0zm9.844482 -4.375l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.506073 4.375l0 -12.0l-4.46875 0l0 -1.59375l10.765625 0l0 1.59375l-4.5 0l0 12.0l-1.796875 0zm7.8803406 0l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm14.915802 -3.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm19.215271 7.5625q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.556427 -7.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm10.375732 -3.140625q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.656952 0q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.016357 6.703125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm9.96875 -3.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm10.375702 -3.140625q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.656982 0q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm10.219452 10.703125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" d="m259.9714 490.1515l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm8.844452 4.875l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm5.603302 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281982 -6.734375l0 -1.9375l1.65625 0l0 1.9375l-1.65625 0zm-2.125 15.484375l0.3125 -1.421875q0.5 0.125 0.796875 0.125q0.515625 0 0.765625 -0.34375q0.25 -0.328125 0.25 -1.6875l0 -10.359375l1.65625 0l0 10.390625q0 1.828125 -0.46875 2.546875q-0.59375 0.921875 -2.0 0.921875q-0.671875 0 -1.3125 -0.171875zm13.019806 -7.0l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547607 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5426636 -10.1875l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.5042114 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281952 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm14.887146 -2.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.2542114 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm9.578827 -2.078125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm0 7.953125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm18.210358 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm1.5944824 -5.09375q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.656952 0q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m269.46194 594.54596l140.06299 0l0 42.11023l-140.06299 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m269.46194 594.54596l140.06299 0l0 42.11023l-140.06299 0z" fill-rule="evenodd"></path><path fill="#000000" d="m306.13754 617.09094l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm12.209198 -0.546875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.688232 4.921875l0 -8.546875l-1.484375 0l0 -1.3125l1.484375 0l0 -1.046875q0 -0.984375 0.171875 -1.46875q0.234375 -0.65625 0.84375 -1.046875q0.609375 -0.40625 1.703125 -0.40625q0.703125 0 1.5624695 0.15625l-0.25 1.46875q-0.5155945 -0.09375 -0.9843445 -0.09375q-0.765625 0 -1.078125 0.328125q-0.3125 0.3125 -0.3125 1.203125l0 0.90625l1.921875 0l0 1.3125l-1.921875 0l0 8.546875l-1.65625 0zm8.433289 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5270386 1.5l0 -9.859375l1.5 0l0 1.390625q0.453125 -0.71875 1.21875 -1.15625q0.78125 -0.453125 1.765625 -0.453125q1.09375 0 1.796875 0.453125q0.703125 0.453125 0.984375 1.28125q1.171875 -1.734375 3.046875 -1.734375q1.46875 0 2.25 0.8125q0.796875 0.8125 0.796875 2.5l0 6.765625l-1.671875 0l0 -6.203125q0 -1.0 -0.15625 -1.4375q-0.15625 -0.453125 -0.59375 -0.71875q-0.421875 -0.265625 -1.0 -0.265625q-1.03125 0 -1.71875 0.6875q-0.6875 0.6875 -0.6875 2.21875l0 5.71875l-1.671875 0l0 -6.40625q0 -1.109375 -0.40625 -1.65625q-0.40625 -0.5625 -1.34375 -0.5625q-0.703125 0 -1.3125 0.375q-0.59375 0.359375 -0.859375 1.078125q-0.265625 0.71875 -0.265625 2.0625l0 5.109375l-1.671875 0zm21.978302 -1.21875q-0.9375 0.796875 -1.796875 1.125q-0.859375 0.3125 -1.84375 0.3125q-1.609375 0 -2.484375 -0.78125q-0.875 -0.796875 -0.875 -2.03125q0 -0.734375 0.328125 -1.328125q0.328125 -0.59375 0.859375 -0.953125q0.53125 -0.359375 1.203125 -0.546875q0.5 -0.140625 1.484375 -0.25q2.03125 -0.25 2.984375 -0.578125q0 -0.34375 0 -0.4375q0 -1.015625 -0.46875 -1.4375q-0.640625 -0.5625 -1.90625 -0.5625q-1.171875 0 -1.734375 0.40625q-0.5625 0.40625 -0.828125 1.46875l-1.640625 -0.234375q0.234375 -1.046875 0.734375 -1.6875q0.515625 -0.640625 1.46875 -0.984375q0.96875 -0.359375 2.25 -0.359375q1.265625 0 2.046875 0.296875q0.78125 0.296875 1.15625 0.75q0.375 0.453125 0.515625 1.140625q0.09375 0.421875 0.09375 1.53125l0 2.234375q0 2.328125 0.09375 2.953125q0.109375 0.609375 0.4375 1.171875l-1.75 0q-0.265625 -0.515625 -0.328125 -1.21875zm-0.140625 -3.71875q-0.90625 0.359375 -2.734375 0.625q-1.03125 0.140625 -1.453125 0.328125q-0.421875 0.1875 -0.65625 0.546875q-0.234375 0.359375 -0.234375 0.796875q0 0.671875 0.5 1.125q0.515625 0.4375 1.484375 0.4375q0.96875 0 1.71875 -0.421875q0.75 -0.4375 1.109375 -1.15625q0.265625 -0.578125 0.265625 -1.671875l0 -0.609375zm2.9694824 4.9375l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m339.09448 568.5302l0.40945435 26.015747" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m339.09448 568.5302l0.31503296 20.01648" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m337.758 588.5727l1.7229309 4.5115356l1.5801086 -4.5635376z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m339.49344 636.6562l0.31497192 20.346436" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m339.49344 636.6562l0.22210693 14.347168" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m338.064 651.02893l1.7217712 4.511963l1.5812988 -4.5631104z" fill-rule="evenodd"></path></g></svg>
-
diff --git a/tensorflow/lite/models/testdata/g3doc/asr_lm.svg b/tensorflow/lite/models/testdata/g3doc/asr_lm.svg
deleted file mode 100644
index 2662f77..0000000
--- a/tensorflow/lite/models/testdata/g3doc/asr_lm.svg
+++ /dev/null
@@ -1,4 +0,0 @@
-<?xml version="1.0" standalone="yes"?>
-
-<svg version="1.1" viewBox="0.0 0.0 742.6010498687664 753.6010498687664" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="p.0"><path d="m0 0l742.6011 0l0 753.6011l-742.6011 0l0 -753.6011z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#p.0)"><path fill="#000000" fill-opacity="0.0" d="m0 0l742.6011 0l0 753.6011l-742.6011 0z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m136.0 30.700842l166.01575 0l0 42.110237l-166.01575 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m136.0 30.700842l166.01575 0l0 42.110237l-166.01575 0z" fill-rule="evenodd"></path><path fill="#000000" d="m153.6274 57.620842l0 -13.59375l1.8125 0l0 13.59375l-1.8125 0zm4.667679 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375717 3.78125l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm15.313217 4.875l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.578842 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm7.355179 1.5l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.918396 4.0q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.353302 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.2541962 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.07463 -2.125l-8.968735 0l0 -1.5625l8.968735 0l0 1.5625zm0 4.125l-8.968735 0l0 -1.546875l8.968735 0l0 1.546875zm13.125153 3.875l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm5.641327 4.0l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m103.0 180.96326l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m103.0 180.96326l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path fill="#000000" d="m151.01154 207.88326l0 -13.59375l1.796875 0l0 11.984375l6.703125 0l0 1.609375l-8.5 0zm9.844467 -4.375l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.506073 4.375l0 -12.0l-4.46875 0l0 -1.59375l10.765625 0l0 1.59375l-4.5 0l0 12.0l-1.796875 0zm7.880356 0l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm21.212677 0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.918396 4.0q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.556427 -7.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm16.672592 3.5625l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.860092 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm0.95384216 1.609375l3.5937347 -5.125l-3.3281097 -4.734375l2.09375 0l1.5156097 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.5937347 3.890625l-2.015625 0zm16.26561 0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.750732 -10.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm10.078857 8.40625l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" d="m145.15926 233.6645l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm8.844467 4.875l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm5.603302 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281967 -6.734375l0 -1.9375l1.65625 0l0 1.9375l-1.65625 0zm-2.125 15.484375l0.3125 -1.421875q0.5 0.125 0.796875 0.125q0.515625 0 0.765625 -0.34375q0.25 -0.328125 0.25 -1.6875l0 -10.359375l1.65625 0l0 10.390625q0 1.828125 -0.46875 2.546875q-0.59375 0.921875 -2.0 0.921875q-0.671875 0 -1.3125 -0.171875zm13.019821 -7.0l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547592 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5426788 -10.1875l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.5041962 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281967 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm14.887146 -2.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.2541962 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.95311 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.4218597 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.2812347 -1.375 3.3281097 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.3437347 0q0.09375 1.625 0.92185974 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.4843597 -2.703125l5.4999847 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78123474 0.765625 -0.85935974 2.046875zm9.578842 -2.078125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm0 7.953125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm9.444733 -3.59375l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm19.141327 1.984375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m129.09448 653.0184l180.0 0l0 42.11023l-180.0 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m129.09448 653.0184l180.0 0l0 42.11023l-180.0 0z" fill-rule="evenodd"></path><path fill="#000000" d="m150.8024 673.31335q0 -3.390625 1.8125 -5.296875q1.828125 -1.921875 4.703125 -1.921875q1.875 0 3.390625 0.90625q1.515625 0.890625 2.296875 2.5q0.796875 1.609375 0.796875 3.65625q0 2.0625 -0.84375 3.703125q-0.828125 1.625 -2.359375 2.46875q-1.53125 0.84375 -3.296875 0.84375q-1.921875 0 -3.4375 -0.921875q-1.5 -0.9375 -2.28125 -2.53125q-0.78125 -1.609375 -0.78125 -3.40625zm1.859375 0.03125q0 2.453125 1.3125 3.875q1.328125 1.40625 3.3125 1.40625q2.03125 0 3.34375 -1.421875q1.3125 -1.4375 1.3125 -4.0625q0 -1.65625 -0.5625 -2.890625q-0.546875 -1.234375 -1.640625 -1.921875q-1.078125 -0.6875 -2.421875 -0.6875q-1.90625 0 -3.28125 1.3125q-1.375 1.3125 -1.375 4.390625zm19.433304 6.59375l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.578842 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5270538 5.28125l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm15.313217 4.875l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.578842 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm9.897858 5.5q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.353302 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.2541962 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.074646 -2.125l-8.96875 0l0 -1.5625l8.96875 0l0 1.5625zm0 4.125l-8.96875 0l0 -1.546875l8.96875 0l0 1.546875zm13.125153 3.875l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm5.641327 4.0l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m219.09448 239.95538l0 21.543304" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m219.09448 239.95538l0 15.543304" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m217.44275 255.49869l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m219.09448 320.48557l0 21.543304" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m219.09448 320.48557l0 15.543304" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m217.44275 336.02887l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m219.00787 72.81108l0.09448242 25.732285" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m219.00787 72.81108l0.07246399 19.732315" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m217.4286 92.54946l1.668396 4.5320053l1.6350555 -4.544136z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m219.09448 401.01575l0 19.40158" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m219.09448 401.01575l0 13.401581" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m217.44275 414.41733l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m103.0 261.49344l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m103.0 261.49344l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path fill="#000000" d="m145.82367 288.41342l0 -13.59375l1.796875 0l0 11.984375l6.703125 0l0 1.609375l-8.5 0zm9.844482 -4.375l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.506073 4.375l0 -12.0l-4.46875 0l0 -1.59375l10.765625 0l0 1.59375l-4.5 0l0 12.0l-1.796875 0zm7.8803406 0l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm23.697052 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm10.434021 5.609375q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.556427 -7.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm16.672607 3.5625l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.860077 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm0.9538574 1.609375l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm9.96875 -3.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm16.672577 3.5625l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.860107 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm3.156952 5.609375l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" d="m145.15926 314.19467l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm8.844467 4.875l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm5.603302 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281967 -6.734375l0 -1.9375l1.65625 0l0 1.9375l-1.65625 0zm-2.125 15.484375l0.3125 -1.421875q0.5 0.125 0.796875 0.125q0.515625 0 0.765625 -0.34375q0.25 -0.328125 0.25 -1.6875l0 -10.359375l1.65625 0l0 10.390625q0 1.828125 -0.46875 2.546875q-0.59375 0.921875 -2.0 0.921875q-0.671875 0 -1.3125 -0.171875zm13.019821 -7.0l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547592 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5426788 -10.1875l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.5041962 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281967 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm14.887146 -2.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.2541962 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.95311 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.4218597 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.2812347 -1.375 3.3281097 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.3437347 0q0.09375 1.625 0.92185974 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.4843597 -2.703125l5.4999847 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78123474 0.765625 -0.85935974 2.046875zm9.578842 -2.078125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm0 7.953125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm9.444733 -3.59375l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm19.141327 1.984375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m103.0 342.02362l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m103.0 342.02362l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path fill="#000000" d="m145.82367 368.94363l0 -13.59375l1.796875 0l0 11.984375l6.703125 0l0 1.609375l-8.5 0zm9.844482 -4.375l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.506073 4.375l0 -12.0l-4.46875 0l0 -1.59375l10.765625 0l0 1.59375l-4.5 0l0 12.0l-1.796875 0zm7.8803406 0l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm14.931427 -3.59375l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm19.199646 7.59375q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.556427 -7.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm16.672607 3.5625l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.860077 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm0.9538574 1.609375l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm9.96875 -3.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm16.672577 3.5625l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.860107 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm3.156952 5.609375l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" d="m145.15926 394.72488l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm8.844467 4.875l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm5.603302 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281967 -6.734375l0 -1.9375l1.65625 0l0 1.9375l-1.65625 0zm-2.125 15.484375l0.3125 -1.421875q0.5 0.125 0.796875 0.125q0.515625 0 0.765625 -0.34375q0.25 -0.328125 0.25 -1.6875l0 -10.359375l1.65625 0l0 10.390625q0 1.828125 -0.46875 2.546875q-0.59375 0.921875 -2.0 0.921875q-0.671875 0 -1.3125 -0.171875zm13.019821 -7.0l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547592 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5426788 -10.1875l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.5041962 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281967 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm14.887146 -2.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.2541962 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.95311 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.4218597 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.2812347 -1.375 3.3281097 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.3437347 0q0.09375 1.625 0.92185974 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.4843597 -2.703125l5.4999847 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78123474 0.765625 -0.85935974 2.046875zm9.578842 -2.078125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm0 7.953125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm9.444733 -3.59375l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm19.141327 1.984375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m219.09448 618.4042l0 34.614197" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m219.09448 618.4042l0 28.614197" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m217.44275 647.0184l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m103.0 98.54593l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m103.0 98.54593l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path fill="#000000" d="m143.32318 125.46593l0 -13.59375l9.84375 0l0 1.59375l-8.046875 0l0 4.171875l7.53125 0l0 1.59375l-7.53125 0l0 4.625l8.359375 0l0 1.609375l-10.15625 0zm12.193573 0l0 -9.859375l1.5 0l0 1.390625q0.453125 -0.71875 1.21875 -1.15625q0.78125 -0.453125 1.765625 -0.453125q1.09375 0 1.796875 0.453125q0.703125 0.453125 0.984375 1.28125q1.171875 -1.734375 3.046875 -1.734375q1.46875 0 2.25 0.8125q0.796875 0.8125 0.796875 2.5l0 6.765625l-1.671875 0l0 -6.203125q0 -1.0 -0.15625 -1.4375q-0.15625 -0.453125 -0.59375 -0.71875q-0.421875 -0.265625 -1.0 -0.265625q-1.03125 0 -1.71875 0.6875q-0.6875 0.6875 -0.6875 2.21875l0 5.71875l-1.671875 0l0 -6.40625q0 -1.109375 -0.40625 -1.65625q-0.40625 -0.5625 -1.34375 -0.5625q-0.703125 0 -1.3125 0.375q-0.59375 0.359375 -0.859375 1.078125q-0.265625 0.71875 -0.265625 2.0625l0 5.109375l-1.671875 0zm17.087677 0l-1.546875 0l0 -13.59375l1.65625 0l0 4.84375q1.0625 -1.328125 2.703125 -1.328125q0.90625 0 1.71875 0.375q0.8125 0.359375 1.328125 1.03125q0.53125 0.65625 0.828125 1.59375q0.296875 0.9375 0.296875 2.0q0 2.53125 -1.25 3.921875q-1.25 1.375 -3.0 1.375q-1.75 0 -2.734375 -1.453125l0 1.234375zm-0.015625 -5.0q0 1.765625 0.46875 2.5625q0.796875 1.28125 2.140625 1.28125q1.09375 0 1.890625 -0.9375q0.796875 -0.953125 0.796875 -2.84375q0 -1.921875 -0.765625 -2.84375q-0.765625 -0.921875 -1.84375 -0.921875q-1.09375 0 -1.890625 0.953125q-0.796875 0.953125 -0.796875 2.75zm15.594467 1.828125l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.500717 5.875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm15.656967 4.921875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm9.281967 -6.765625l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm4.129196 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.078842 0.8125l1.609375 0.25q0.109375 0.75 0.578125 1.09375q0.609375 0.453125 1.6875 0.453125q1.171875 0 1.796875 -0.46875q0.625 -0.453125 0.859375 -1.28125q0.125 -0.515625 0.109375 -2.15625q-1.09375 1.296875 -2.71875 1.296875q-2.03125 0 -3.15625 -1.46875q-1.109375 -1.46875 -1.109375 -3.515625q0 -1.40625 0.515625 -2.59375q0.515625 -1.203125 1.484375 -1.84375q0.96875 -0.65625 2.265625 -0.65625q1.75 0 2.875 1.40625l0 -1.1875l1.546875 0l0 8.515625q0 2.3125 -0.46875 3.265625q-0.46875 0.96875 -1.484375 1.515625q-1.015625 0.5625 -2.5 0.5625q-1.765625 0 -2.859375 -0.796875q-1.078125 -0.796875 -1.03125 -2.390625zm1.375 -5.921875q0 1.953125 0.765625 2.84375q0.78125 0.890625 1.9375 0.890625q1.140625 0 1.921875 -0.890625q0.78125 -0.890625 0.78125 -2.78125q0 -1.8125 -0.8125 -2.71875q-0.796875 -0.921875 -1.921875 -0.921875q-1.109375 0 -1.890625 0.90625q-0.78125 0.890625 -0.78125 2.671875zm14.449646 5.109375l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm3.5510712 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm8.656967 0q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.297607 4.921875l0 -13.59375l1.671875 0l0 7.75l3.953125 -4.015625l2.15625 0l-3.765625 3.65625l4.140625 6.203125l-2.0625 0l-3.25 -5.03125l-1.171875 1.125l0 3.90625l-1.671875 0zm15.765625 0l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.922577 3.78125l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625z" fill-rule="nonzero"></path><path fill="#000000" d="m176.34024 151.46593q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm11.228302 -14.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm8.531967 0.8125l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm10.625717 0.453125l1.59375 -0.15625q0.203125 1.140625 0.78125 1.65625q0.578125 0.5 1.484375 0.5q0.765625 0 1.34375 -0.34375q0.578125 -0.359375 0.953125 -0.953125q0.375 -0.59375 0.625 -1.59375q0.25 -1.0 0.25 -2.03125q0 -0.109375 -0.015625 -0.34375q-0.5 0.796875 -1.375 1.296875q-0.859375 0.5 -1.875 0.5q-1.6875 0 -2.859375 -1.21875q-1.171875 -1.234375 -1.171875 -3.234375q0 -2.078125 1.21875 -3.328125q1.234375 -1.265625 3.0625 -1.265625q1.328125 0 2.421875 0.71875q1.109375 0.703125 1.671875 2.03125q0.578125 1.328125 0.578125 3.828125q0 2.609375 -0.578125 4.15625q-0.5625 1.546875 -1.6875 2.359375q-1.109375 0.796875 -2.609375 0.796875q-1.59375 0 -2.609375 -0.890625q-1.0 -0.890625 -1.203125 -2.484375zm6.828125 -6.0q0 -1.4375 -0.765625 -2.28125q-0.765625 -0.859375 -1.84375 -0.859375q-1.109375 0 -1.9375 0.921875q-0.828125 0.90625 -0.828125 2.34375q0 1.3125 0.78125 2.125q0.796875 0.796875 1.9375 0.796875q1.171875 0 1.90625 -0.796875q0.75 -0.8125 0.75 -2.25zm5.860092 1.765625q-1.046875 -0.375 -1.546875 -1.078125q-0.5 -0.71875 -0.5 -1.703125q0 -1.484375 1.0625 -2.484375q1.078125 -1.015625 2.84375 -1.015625q1.78125 0 2.859375 1.03125q1.09375 1.03125 1.09375 2.515625q0 0.953125 -0.5 1.65625q-0.484375 0.703125 -1.5 1.078125q1.25 0.40625 1.90625 1.3125q0.65625 0.90625 0.65625 2.171875q0 1.75 -1.234375 2.9375q-1.234375 1.1875 -3.25 1.1875q-2.015625 0 -3.25 -1.1875q-1.234375 -1.203125 -1.234375 -2.984375q0 -1.328125 0.671875 -2.21875q0.671875 -0.890625 1.921875 -1.21875zm-0.328125 -2.828125q0 0.96875 0.609375 1.578125q0.625 0.609375 1.625 0.609375q0.953125 0 1.5625 -0.609375q0.625 -0.609375 0.625 -1.484375q0 -0.921875 -0.640625 -1.546875q-0.625 -0.625 -1.578125 -0.625q-0.953125 0 -1.578125 0.609375q-0.625 0.609375 -0.625 1.46875zm-0.546875 6.28125q0 0.71875 0.328125 1.390625q0.34375 0.65625 1.015625 1.03125q0.671875 0.359375 1.4375 0.359375q1.203125 0 1.984375 -0.765625q0.78125 -0.78125 0.78125 -1.96875q0 -1.203125 -0.8125 -1.984375q-0.796875 -0.796875 -2.0 -0.796875q-1.1875 0 -1.96875 0.78125q-0.765625 0.78125 -0.765625 1.953125zm8.688217 0.328125l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm9.719467 3.59375l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm16.265625 0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.750717 -10.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm10.078827 8.40625l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m219.09448 157.53806l0 23.433075" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m219.09448 157.53806l0 17.433075" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m217.44275 174.97113l1.6517334 4.538101l1.6517334 -4.538101z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m395.48425 30.700842l166.01575 0l0 42.110237l-166.01575 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m395.48425 30.700842l166.01575 0l0 42.110237l-166.01575 0z" fill-rule="evenodd"></path><path fill="#000000" d="m413.11163 57.620842l0 -13.59375l1.8125 0l0 13.59375l-1.8125 0zm4.667694 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375702 3.78125l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm15.313232 4.875l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.578827 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm9.839569 -0.109375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm10.434021 5.609375q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.353302 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.254181 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.074646 -2.125l-8.96875 0l0 -1.5625l8.96875 0l0 1.5625zm0 4.125l-8.96875 0l0 -1.546875l8.96875 0l0 1.546875zm13.125122 3.875l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm5.6413574 4.0l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m388.49344 411.97638l179.99997 0l0 58.992126l-179.99997 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m388.49344 411.97638l179.99997 0l0 58.992126l-179.99997 0z" fill-rule="evenodd"></path><path fill="#000000" d="m402.72214 438.89636l0 -13.59375l9.84375 0l0 1.59375l-8.046875 0l0 4.171875l7.53125 0l0 1.59375l-7.53125 0l0 4.625l8.359375 0l0 1.609375l-10.15625 0zm12.193573 0l0 -9.859375l1.5 0l0 1.390625q0.453125 -0.71875 1.21875 -1.15625q0.78125 -0.453125 1.765625 -0.453125q1.09375 0 1.796875 0.453125q0.703125 0.453125 0.984375 1.28125q1.171875 -1.734375 3.046875 -1.734375q1.46875 0 2.25 0.8125q0.796875 0.8125 0.796875 2.5l0 6.765625l-1.671875 0l0 -6.203125q0 -1.0 -0.15625 -1.4375q-0.15625 -0.453125 -0.59375 -0.71875q-0.421875 -0.265625 -1.0 -0.265625q-1.03125 0 -1.71875 0.6875q-0.6875 0.6875 -0.6875 2.21875l0 5.71875l-1.671875 0l0 -6.40625q0 -1.109375 -0.40625 -1.65625q-0.40625 -0.5625 -1.34375 -0.5625q-0.703125 0 -1.3125 0.375q-0.59375 0.359375 -0.859375 1.078125q-0.265625 0.71875 -0.265625 2.0625l0 5.109375l-1.671875 0zm17.087677 0l-1.546875 0l0 -13.59375l1.65625 0l0 4.84375q1.0625 -1.328125 2.703125 -1.328125q0.90625 0 1.71875 0.375q0.8125 0.359375 1.328125 1.03125q0.53125 0.65625 0.828125 1.59375q0.296875 0.9375 0.296875 2.0q0 2.53125 -1.25 3.921875q-1.25 1.375 -3.0 1.375q-1.75 0 -2.734375 -1.453125l0 1.234375zm-0.015625 -5.0q0 1.765625 0.46875 2.5625q0.796875 1.28125 2.140625 1.28125q1.09375 0 1.890625 -0.9375q0.796875 -0.953125 0.796875 -2.84375q0 -1.921875 -0.765625 -2.84375q-0.765625 -0.921875 -1.84375 -0.921875q-1.09375 0 -1.890625 0.953125q-0.796875 0.953125 -0.796875 2.75zm15.594452 1.828125l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.500732 5.875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm15.656952 4.921875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm9.281982 -6.765625l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm4.129181 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.078857 0.8125l1.609375 0.25q0.109375 0.75 0.578125 1.09375q0.609375 0.453125 1.6875 0.453125q1.171875 0 1.796875 -0.46875q0.625 -0.453125 0.859375 -1.28125q0.125 -0.515625 0.109375 -2.15625q-1.09375 1.296875 -2.71875 1.296875q-2.03125 0 -3.15625 -1.46875q-1.109375 -1.46875 -1.109375 -3.515625q0 -1.40625 0.515625 -2.59375q0.515625 -1.203125 1.484375 -1.84375q0.96875 -0.65625 2.265625 -0.65625q1.75 0 2.875 1.40625l0 -1.1875l1.546875 0l0 8.515625q0 2.3125 -0.46875 3.265625q-0.46875 0.96875 -1.484375 1.515625q-1.015625 0.5625 -2.5 0.5625q-1.765625 0 -2.859375 -0.796875q-1.078125 -0.796875 -1.03125 -2.390625zm1.375 -5.921875q0 1.953125 0.765625 2.84375q0.78125 0.890625 1.9375 0.890625q1.140625 0 1.921875 -0.890625q0.78125 -0.890625 0.78125 -2.78125q0 -1.8125 -0.8125 -2.71875q-0.796875 -0.921875 -1.921875 -0.921875q-1.109375 0 -1.890625 0.90625q-0.78125 0.890625 -0.78125 2.671875zm14.449646 5.109375l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm3.551056 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.0312805 0 3.3125305 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.0781555 0.59375 -2.3750305 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625305 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.8281555 -0.9375 -2.0625305 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm8.656952 0q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.297607 4.921875l0 -13.59375l1.671875 0l0 7.75l3.953125 -4.015625l2.15625 0l-3.765625 3.65625l4.140625 6.203125l-2.0625 0l-3.25 -5.03125l-1.171875 1.125l0 3.90625l-1.671875 0zm15.765625 0l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.9226074 3.78125l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625z" fill-rule="nonzero"></path><path fill="#000000" d="m435.7392 464.89636q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm11.228302 -14.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm8.531952 0.8125l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm10.625732 0.453125l1.59375 -0.15625q0.203125 1.140625 0.78125 1.65625q0.578125 0.5 1.484375 0.5q0.765625 0 1.34375 -0.34375q0.578125 -0.359375 0.953125 -0.953125q0.375 -0.59375 0.625 -1.59375q0.25 -1.0 0.25 -2.03125q0 -0.109375 -0.015625 -0.34375q-0.5 0.796875 -1.375 1.296875q-0.859375 0.5 -1.875 0.5q-1.6875 0 -2.859375 -1.21875q-1.171875 -1.234375 -1.171875 -3.234375q0 -2.078125 1.21875 -3.328125q1.234375 -1.265625 3.0625 -1.265625q1.328125 0 2.421875 0.71875q1.109375 0.703125 1.671875 2.03125q0.578125 1.328125 0.578125 3.828125q0 2.609375 -0.578125 4.15625q-0.5625 1.546875 -1.6875 2.359375q-1.109375 0.796875 -2.609375 0.796875q-1.59375 0 -2.609375 -0.890625q-1.0 -0.890625 -1.203125 -2.484375zm6.828125 -6.0q0 -1.4375 -0.765625 -2.28125q-0.765625 -0.859375 -1.84375 -0.859375q-1.109375 0 -1.9375 0.921875q-0.828125 0.90625 -0.828125 2.34375q0 1.3125 0.78125 2.125q0.796875 0.796875 1.9375 0.796875q1.171875 0 1.90625 -0.796875q0.75 -0.8125 0.75 -2.25zm5.860077 1.765625q-1.046875 -0.375 -1.546875 -1.078125q-0.5 -0.71875 -0.5 -1.703125q0 -1.484375 1.0625 -2.484375q1.078125 -1.015625 2.84375 -1.015625q1.78125 0 2.859375 1.03125q1.09375 1.03125 1.09375 2.515625q0 0.953125 -0.5 1.65625q-0.484375 0.703125 -1.5 1.078125q1.25 0.40625 1.90625 1.3125q0.65625 0.90625 0.65625 2.171875q0 1.75 -1.234375 2.9375q-1.234375 1.1875 -3.25 1.1875q-2.015625 0 -3.25 -1.1875q-1.234375 -1.203125 -1.234375 -2.984375q0 -1.328125 0.671875 -2.21875q0.671875 -0.890625 1.921875 -1.21875zm-0.328125 -2.828125q0 0.96875 0.609375 1.578125q0.625 0.609375 1.625 0.609375q0.953125 0 1.5625 -0.609375q0.625 -0.609375 0.625 -1.484375q0 -0.921875 -0.640625 -1.546875q-0.625 -0.625 -1.578125 -0.625q-0.953125 0 -1.578125 0.609375q-0.625 0.609375 -0.625 1.46875zm-0.546875 6.28125q0 0.71875 0.328125 1.390625q0.34375 0.65625 1.015625 1.03125q0.671875 0.359375 1.4375 0.359375q1.203125 0 1.984375 -0.765625q0.78125 -0.78125 0.78125 -1.96875q0 -1.203125 -0.8125 -1.984375q-0.796875 -0.796875 -2.0 -0.796875q-1.1875 0 -1.96875 0.78125q-0.765625 0.78125 -0.765625 1.953125zm8.688232 0.328125l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm9.719452 3.59375l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm9.984375 -3.59375l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm19.141357 1.984375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm3.1569214 5.609375l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m388.49344 567.8504l179.99997 0l0 58.992126l-179.99997 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m388.49344 567.8504l179.99997 0l0 58.992126l-179.99997 0z" fill-rule="evenodd"></path><path fill="#000000" d="m402.72214 594.7704l0 -13.59375l9.84375 0l0 1.59375l-8.046875 0l0 4.171875l7.53125 0l0 1.59375l-7.53125 0l0 4.625l8.359375 0l0 1.609375l-10.15625 0zm12.193573 0l0 -9.859375l1.5 0l0 1.390625q0.453125 -0.71875 1.21875 -1.15625q0.78125 -0.453125 1.765625 -0.453125q1.09375 0 1.796875 0.453125q0.703125 0.453125 0.984375 1.28125q1.171875 -1.734375 3.046875 -1.734375q1.46875 0 2.25 0.8125q0.796875 0.8125 0.796875 2.5l0 6.765625l-1.671875 0l0 -6.203125q0 -1.0 -0.15625 -1.4375q-0.15625 -0.453125 -0.59375 -0.71875q-0.421875 -0.265625 -1.0 -0.265625q-1.03125 0 -1.71875 0.6875q-0.6875 0.6875 -0.6875 2.21875l0 5.71875l-1.671875 0l0 -6.40625q0 -1.109375 -0.40625 -1.65625q-0.40625 -0.5625 -1.34375 -0.5625q-0.703125 0 -1.3125 0.375q-0.59375 0.359375 -0.859375 1.078125q-0.265625 0.71875 -0.265625 2.0625l0 5.109375l-1.671875 0zm17.087677 0l-1.546875 0l0 -13.59375l1.65625 0l0 4.84375q1.0625 -1.328125 2.703125 -1.328125q0.90625 0 1.71875 0.375q0.8125 0.359375 1.328125 1.03125q0.53125 0.65625 0.828125 1.59375q0.296875 0.9375 0.296875 2.0q0 2.53125 -1.25 3.921875q-1.25 1.375 -3.0 1.375q-1.75 0 -2.734375 -1.453125l0 1.234375zm-0.015625 -5.0q0 1.765625 0.46875 2.5625q0.796875 1.28125 2.140625 1.28125q1.09375 0 1.890625 -0.9375q0.796875 -0.953125 0.796875 -2.84375q0 -1.921875 -0.765625 -2.84375q-0.765625 -0.921875 -1.84375 -0.921875q-1.09375 0 -1.890625 0.953125q-0.796875 0.953125 -0.796875 2.75zm15.594452 1.828125l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.500732 5.875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm15.656952 4.921875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm9.281982 -6.765625l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm4.129181 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.078857 0.8125l1.609375 0.25q0.109375 0.75 0.578125 1.09375q0.609375 0.453125 1.6875 0.453125q1.171875 0 1.796875 -0.46875q0.625 -0.453125 0.859375 -1.28125q0.125 -0.515625 0.109375 -2.15625q-1.09375 1.296875 -2.71875 1.296875q-2.03125 0 -3.15625 -1.46875q-1.109375 -1.46875 -1.109375 -3.515625q0 -1.40625 0.515625 -2.59375q0.515625 -1.203125 1.484375 -1.84375q0.96875 -0.65625 2.265625 -0.65625q1.75 0 2.875 1.40625l0 -1.1875l1.546875 0l0 8.515625q0 2.3125 -0.46875 3.265625q-0.46875 0.96875 -1.484375 1.515625q-1.015625 0.5625 -2.5 0.5625q-1.765625 0 -2.859375 -0.796875q-1.078125 -0.796875 -1.03125 -2.390625zm1.375 -5.921875q0 1.953125 0.765625 2.84375q0.78125 0.890625 1.9375 0.890625q1.140625 0 1.921875 -0.890625q0.78125 -0.890625 0.78125 -2.78125q0 -1.8125 -0.8125 -2.71875q-0.796875 -0.921875 -1.921875 -0.921875q-1.109375 0 -1.890625 0.90625q-0.78125 0.890625 -0.78125 2.671875zm14.449646 5.109375l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm3.551056 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.0312805 0 3.3125305 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.0781555 0.59375 -2.3750305 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625305 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.8281555 -0.9375 -2.0625305 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm8.656952 0q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.297607 4.921875l0 -13.59375l1.671875 0l0 7.75l3.953125 -4.015625l2.15625 0l-3.765625 3.65625l4.140625 6.203125l-2.0625 0l-3.25 -5.03125l-1.171875 1.125l0 3.90625l-1.671875 0zm15.765625 0l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.9226074 3.78125l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625z" fill-rule="nonzero"></path><path fill="#000000" d="m440.92703 620.7704q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm11.228302 -14.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm8.531982 0.8125l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm10.625702 0.453125l1.59375 -0.15625q0.203125 1.140625 0.78125 1.65625q0.578125 0.5 1.484375 0.5q0.765625 0 1.34375 -0.34375q0.578125 -0.359375 0.953125 -0.953125q0.375 -0.59375 0.625 -1.59375q0.25 -1.0 0.25 -2.03125q0 -0.109375 -0.015625 -0.34375q-0.5 0.796875 -1.375 1.296875q-0.859375 0.5 -1.875 0.5q-1.6875 0 -2.859375 -1.21875q-1.171875 -1.234375 -1.171875 -3.234375q0 -2.078125 1.21875 -3.328125q1.234375 -1.265625 3.0625 -1.265625q1.328125 0 2.421875 0.71875q1.109375 0.703125 1.671875 2.03125q0.578125 1.328125 0.578125 3.828125q0 2.609375 -0.578125 4.15625q-0.5625 1.546875 -1.6875 2.359375q-1.109375 0.796875 -2.609375 0.796875q-1.59375 0 -2.609375 -0.890625q-1.0 -0.890625 -1.203125 -2.484375zm6.828125 -6.0q0 -1.4375 -0.765625 -2.28125q-0.765625 -0.859375 -1.84375 -0.859375q-1.109375 0 -1.9375 0.921875q-0.828125 0.90625 -0.828125 2.34375q0 1.3125 0.78125 2.125q0.796875 0.796875 1.9375 0.796875q1.171875 0 1.90625 -0.796875q0.75 -0.8125 0.75 -2.25zm5.8601074 1.765625q-1.046875 -0.375 -1.546875 -1.078125q-0.5 -0.71875 -0.5 -1.703125q0 -1.484375 1.0625 -2.484375q1.078125 -1.015625 2.84375 -1.015625q1.78125 0 2.859375 1.03125q1.09375 1.03125 1.09375 2.515625q0 0.953125 -0.5 1.65625q-0.484375 0.703125 -1.5 1.078125q1.25 0.40625 1.90625 1.3125q0.65625 0.90625 0.65625 2.171875q0 1.75 -1.234375 2.9375q-1.234375 1.1875 -3.25 1.1875q-2.015625 0 -3.25 -1.1875q-1.234375 -1.203125 -1.234375 -2.984375q0 -1.328125 0.671875 -2.21875q0.671875 -0.890625 1.921875 -1.21875zm-0.328125 -2.828125q0 0.96875 0.609375 1.578125q0.625 0.609375 1.625 0.609375q0.953125 0 1.5625 -0.609375q0.625 -0.609375 0.625 -1.484375q0 -0.921875 -0.640625 -1.546875q-0.625 -0.625 -1.578125 -0.625q-0.953125 0 -1.578125 0.609375q-0.625 0.609375 -0.625 1.46875zm-0.546875 6.28125q0 0.71875 0.328125 1.390625q0.34375 0.65625 1.015625 1.03125q0.671875 0.359375 1.4375 0.359375q1.203125 0 1.984375 -0.765625q0.78125 -0.78125 0.78125 -1.96875q0 -1.203125 -0.8125 -1.984375q-0.796875 -0.796875 -2.0 -0.796875q-1.1875 0 -1.96875 0.78125q-0.765625 0.78125 -0.765625 1.953125zm8.688202 0.328125l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm9.719482 3.59375l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm16.265625 0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm5.6413574 4.0l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m129.09448 420.41733l180.0 0l0 42.11023l-180.0 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m129.09448 420.41733l180.0 0l0 42.11023l-180.0 0z" fill-rule="evenodd"></path><path fill="#000000" d="m147.40158 447.3373l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm21.837677 0l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.8913422 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm7.832321 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5426788 -10.1875l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm4.129196 3.78125l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm8.828842 4.875l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.191696 -11.6875l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm10.566696 -3.609375l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm9.328125 2.390625q-0.9375 0.796875 -1.796875 1.125q-0.859375 0.3125 -1.84375 0.3125q-1.609375 0 -2.484375 -0.78125q-0.875 -0.796875 -0.875 -2.03125q0 -0.734375 0.328125 -1.328125q0.328125 -0.59375 0.859375 -0.953125q0.53125 -0.359375 1.203125 -0.546875q0.5 -0.140625 1.484375 -0.25q2.03125 -0.25 2.984375 -0.578125q0 -0.34375 0 -0.4375q0 -1.015625 -0.46875 -1.4375q-0.640625 -0.5625 -1.90625 -0.5625q-1.171875 0 -1.734375 0.40625q-0.5625 0.40625 -0.828125 1.46875l-1.640625 -0.234375q0.234375 -1.046875 0.734375 -1.6875q0.515625 -0.640625 1.46875 -0.984375q0.96875 -0.359375 2.25 -0.359375q1.265625 0 2.046875 0.296875q0.78125 0.296875 1.15625 0.75q0.375 0.453125 0.515625 1.140625q0.09375 0.421875 0.09375 1.53125l0 2.234375q0 2.328125 0.09375 2.953125q0.109375 0.609375 0.4375 1.171875l-1.75 0q-0.265625 -0.515625 -0.328125 -1.21875zm-0.140625 -3.71875q-0.90625 0.359375 -2.734375 0.625q-1.03125 0.140625 -1.453125 0.328125q-0.421875 0.1875 -0.65625 0.546875q-0.234375 0.359375 -0.234375 0.796875q0 0.671875 0.5 1.125q0.515625 0.4375 1.484375 0.4375q0.96875 0 1.71875 -0.421875q0.75 -0.4375 1.109375 -1.15625q0.265625 -0.578125 0.265625 -1.671875l0 -0.609375zm7.735092 3.4375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5426788 -10.1875l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.5041962 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281967 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm18.746506 4.0q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.572052 -7.59375l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm19.141357 1.984375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm3.156952 5.609375l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m129.09448 576.29395l180.0 0l0 42.11023l-180.0 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m129.09448 576.29395l180.0 0l0 42.11023l-180.0 0z" fill-rule="evenodd"></path><path fill="#000000" d="m171.36136 603.214l5.234375 -13.59375l1.9375 0l5.5625 13.59375l-2.046875 0l-1.59375 -4.125l-5.6875 0l-1.484375 4.125l-1.921875 0zm3.921875 -5.578125l4.609375 0l-1.40625 -3.78125q-0.65625 -1.703125 -0.96875 -2.8125q-0.265625 1.3125 -0.734375 2.59375l-1.5 4.0zm16.193573 5.578125l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm15.656967 4.921875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm9.281967 -6.765625l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm7.785446 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5426788 -10.1875l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.5041962 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281967 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm18.746521 4.0q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm8.853302 -4.0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm5.641327 4.0l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m388.49344 597.34644l-79.40158 0" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m388.49344 597.34644l-73.40158 0" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m315.09186 595.6947l-4.538086 1.6517334l4.538086 1.6517334z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m388.49344 441.47244l-79.40158 0" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m388.49344 441.47244l-73.40158 0" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m315.09186 439.8207l-4.538086 1.6517334l4.538086 1.6517334z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m219.09448 462.52756l0 31.84253" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m219.09448 462.52756l0 25.84253" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m217.44275 488.3701l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m561.5 51.755962l31.99347 0l0 545.57477l-25.001343 0" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m561.5 51.755962l31.99347 0l0 545.57477l-25.001343 0" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m478.49213 72.81108l0 339.1496" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m478.49213 72.81108l0 333.1496" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m476.8404 405.96066l1.6517334 4.5381165l1.6517334 -4.5381165z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m590.00525 597.4094l-21.51184 -0.06298828" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m590.00525 597.4094l-15.511841 -0.045410156" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m574.4982 595.7123l-4.5429077 1.6384277l4.533264 1.6650391z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m109.09449 494.357l220.0 0l0 42.11023l-220.0 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m109.09449 494.357l220.0 0l0 42.11023l-220.0 0z" fill-rule="evenodd"></path><path fill="#000000" d="m126.81095 521.277l0 -13.59375l9.171867 0l0 1.59375l-7.375 0l0 4.21875l6.375 0l0 1.609375l-6.375 0l0 6.171875l-1.7968674 0zm17.536598 0l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.8913422 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.144821 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.097946 3.796875l-0.171875 -1.5625q0.546875 0.140625 0.953125 0.140625q0.546875 0 0.875 -0.1875q0.34375 -0.1875 0.5625 -0.515625q0.15625 -0.25 0.5 -1.25q0.046875 -0.140625 0.15625 -0.40625l-3.734375 -9.875l1.796875 0l2.046875 5.71875q0.40625 1.078125 0.71875 2.28125q0.28125 -1.15625 0.6875 -2.25l2.09375 -5.75l1.671875 0l-3.75 10.03125q-0.59375 1.625 -0.9375 2.234375q-0.4375 0.828125 -1.015625 1.203125q-0.578125 0.390625 -1.375 0.390625q-0.484375 0 -1.078125 -0.203125zm19.328125 -8.5625l1.796875 0.453125q-0.5625 2.21875 -2.03125 3.390625q-1.46875 1.15625 -3.59375 1.15625q-2.203125 0 -3.578125 -0.890625q-1.375 -0.90625 -2.09375 -2.59375q-0.71875 -1.703125 -0.71875 -3.65625q0 -2.125 0.796875 -3.703125q0.8125 -1.578125 2.3125 -2.390625q1.5 -0.828125 3.296875 -0.828125q2.046875 0 3.4375 1.046875q1.390625 1.03125 1.9375 2.90625l-1.765625 0.421875q-0.46875 -1.484375 -1.375 -2.15625q-0.90625 -0.6875 -2.265625 -0.6875q-1.5625 0 -2.625 0.75q-1.046875 0.75 -1.484375 2.03125q-0.421875 1.265625 -0.421875 2.609375q0 1.734375 0.5 3.03125q0.515625 1.28125 1.578125 1.921875q1.078125 0.640625 2.3125 0.640625q1.515625 0 2.5625 -0.859375q1.046875 -0.875 1.421875 -2.59375zm2.9260712 -0.15625q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281967 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375717 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm17.125717 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547592 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm8.277054 -1.671875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.500717 5.875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm17.637161 8.921875q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.572052 -7.59375l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm19.141327 1.984375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm0.9538574 1.609375l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm16.265625 0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm5.641327 4.0l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m219.09448 536.4672l0 39.811035" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m219.09448 536.4672l0 33.811035" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m217.44275 570.27826l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path></g></svg>
-
diff --git a/tensorflow/lite/models/testdata/g3doc/endpointer.svg b/tensorflow/lite/models/testdata/g3doc/endpointer.svg
deleted file mode 100644
index 6033bdc..0000000
--- a/tensorflow/lite/models/testdata/g3doc/endpointer.svg
+++ /dev/null
@@ -1,4 +0,0 @@
-<?xml version="1.0" standalone="yes"?>
-
-<svg version="1.1" viewBox="0.0 0.0 681.8005249343832 883.6010498687664" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="p.0"><path d="m0 0l681.80054 0l0 883.6011l-681.80054 0l0 -883.6011z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#p.0)"><path fill="#000000" fill-opacity="0.0" d="m0 0l681.80054 0l0 883.6011l-681.80054 0z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m261.15503 14.700843l166.01575 0l0 42.110233l-166.01575 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m261.15503 14.700843l166.01575 0l0 42.110233l-166.01575 0z" fill-rule="evenodd"></path><path fill="#000000" d="m278.78244 41.620842l0 -13.593752l1.8125 0l0 13.593752l-1.8125 0zm4.6676636 0l0 -9.859377l1.5 0l0 1.4062519q1.09375 -1.6250019 3.140625 -1.6250019q0.890625 0 1.640625 0.328125q0.75 0.3125019 1.109375 0.8437519q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375732 3.78125l0 -13.640627l1.53125 0l0 1.2812519q0.53125 -0.75 1.203125 -1.1250019q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.6562519q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm15.313202 4.875l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109377l1.671875 0l0 5.468752q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.281252l1.671875 0l0 9.859377l-1.5 0zm7.5788574 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125019l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125019l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm9.897858 5.5q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.343752 2.578125 -4.671877l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671877q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.353302 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.2812519 1.0625 -0.4843769q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.3281269 1.28125 0.9062519q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.750002l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.687502l0 -9.859377l1.671875 0l0 9.859377l-1.671875 0zm3.254181 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.3593769l8.046875 0l0 1.1093769l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.3750019 3.328125 -1.3750019q1.984375 0 3.234375 1.3437519q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.074646 -2.125l-8.96875 0l0 -1.5625l8.96875 0l0 1.5625zm0 4.125l-8.96875 0l0 -1.546875l8.96875 0l0 1.546875zm12.187653 3.875l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.812502l1.359375 0l0 8.812502l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140627l-4.25 6.140627l4.25 0zm5.016327 -1.921875q0 -2.421875 0.5 -3.890627q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875019 0.3125 3.218752q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359377 -0.78125 -4.468752q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.593752zm10.219482 10.703125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.3906269 -0.890625 -2.671877q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671877q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m228.15503 78.02362l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m228.15503 78.02362l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path fill="#000000" d="m308.4097 104.94362l0 -13.59375l6.03125 0q1.8125 0 2.75 0.359375q0.953125 0.359375 1.515625 1.296875q0.5625 0.921875 0.5625 2.046875q0 1.453125 -0.9375 2.453125q-0.921875 0.984375 -2.890625 1.25q0.71875 0.34375 1.09375 0.671875q0.78125 0.734375 1.484375 1.8125l2.375 3.703125l-2.265625 0l-1.796875 -2.828125q-0.796875 -1.21875 -1.3125 -1.875q-0.5 -0.65625 -0.90625 -0.90625q-0.40625 -0.265625 -0.8125 -0.359375q-0.3125 -0.078125 -1.015625 -0.078125l-2.078125 0l0 6.046875l-1.796875 0zm1.796875 -7.59375l3.859375 0q1.234375 0 1.921875 -0.25q0.703125 -0.265625 1.0625 -0.828125q0.375 -0.5625 0.375 -1.21875q0 -0.96875 -0.703125 -1.578125q-0.703125 -0.625 -2.21875 -0.625l-4.296875 0l0 4.5zm18.176056 4.421875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm8.438232 2.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.0 2.9375l0 -13.59375l1.671875 0l0 4.875q1.171875 -1.359375 2.953125 -1.359375q1.09375 0 1.890625 0.4375q0.8125 0.421875 1.15625 1.1875q0.359375 0.765625 0.359375 2.203125l0 6.25l-1.671875 0l0 -6.25q0 -1.25 -0.546875 -1.8125q-0.546875 -0.578125 -1.53125 -0.578125q-0.75 0 -1.40625 0.390625q-0.640625 0.375 -0.921875 1.046875q-0.28125 0.65625 -0.28125 1.8125l0 5.390625l-1.671875 0zm16.813202 -1.21875q-0.9375 0.796875 -1.796875 1.125q-0.859375 0.3125 -1.84375 0.3125q-1.609375 0 -2.484375 -0.78125q-0.875 -0.796875 -0.875 -2.03125q0 -0.734375 0.328125 -1.328125q0.328125 -0.59375 0.859375 -0.953125q0.53125 -0.359375 1.203125 -0.546875q0.5 -0.140625 1.484375 -0.25q2.03125 -0.25 2.984375 -0.578125q0 -0.34375 0 -0.4375q0 -1.015625 -0.46875 -1.4375q-0.640625 -0.5625 -1.90625 -0.5625q-1.171875 0 -1.734375 0.40625q-0.5625 0.40625 -0.828125 1.46875l-1.640625 -0.234375q0.234375 -1.046875 0.734375 -1.6875q0.515625 -0.640625 1.46875 -0.984375q0.96875 -0.359375 2.25 -0.359375q1.265625 0 2.046875 0.296875q0.78125 0.296875 1.15625 0.75q0.375 0.453125 0.515625 1.140625q0.09375 0.421875 0.09375 1.53125l0 2.234375q0 2.328125 0.09375 2.953125q0.109375 0.609375 0.4375 1.171875l-1.75 0q-0.265625 -0.515625 -0.328125 -1.21875zm-0.140625 -3.71875q-0.90625 0.359375 -2.734375 0.625q-1.03125 0.140625 -1.453125 0.328125q-0.421875 0.1875 -0.65625 0.546875q-0.234375 0.359375 -0.234375 0.796875q0 0.671875 0.5 1.125q0.515625 0.4375 1.484375 0.4375q0.96875 0 1.71875 -0.421875q0.75 -0.4375 1.109375 -1.15625q0.265625 -0.578125 0.265625 -1.671875l0 -0.609375zm4.0788574 8.71875l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm15.610077 1.703125l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875z" fill-rule="nonzero"></path><path fill="#000000" d="m268.58267 130.94362q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.353302 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.254181 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.074646 -2.125l-8.96875 0l0 -1.5625l8.96875 0l0 1.5625zm0 4.125l-8.96875 0l0 -1.546875l8.96875 0l0 1.546875zm7.3439026 7.65625l0 -17.375l3.671875 0l0 1.375l-2.015625 0l0 14.609375l2.015625 0l0 1.390625l-3.671875 0zm10.964539 -3.78125l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm4.9851074 0l0 -1.90625l1.90625 0l0 1.90625q0 1.046875 -0.375 1.6875q-0.375 0.65625 -1.171875 1.0l-0.46875 -0.71875q0.53125 -0.21875 0.78125 -0.671875q0.25 -0.453125 0.28125 -1.296875l-0.953125 0zm14.819733 0l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm5.016327 -1.921875q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm9.563232 6.703125l0 -1.90625l1.90625 0l0 1.90625q0 1.046875 -0.375 1.6875q-0.375 0.65625 -1.171875 1.0l-0.46875 -0.71875q0.53125 -0.21875 0.78125 -0.671875q0.25 -0.453125 0.28125 -1.296875l-0.953125 0zm15.757233 0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm4.985077 0l0 -1.90625l1.90625 0l0 1.90625q0 1.046875 -0.375 1.6875q-0.375 0.65625 -1.171875 1.0l-0.46875 -0.71875q0.53125 -0.21875 0.78125 -0.671875q0.25 -0.453125 0.28125 -1.296875l-0.953125 0zm15.757233 0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm7.3444824 3.78125l-3.6875 0l0 -1.390625l2.015625 0l0 -14.609375l-2.015625 0l0 -1.375l3.6875 0l0 17.375zm3.4801636 0.21875l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m254.95555 833.01575l180.00002 0l0 42.11023l-180.00002 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m254.95555 833.01575l180.00002 0l0 42.11023l-180.00002 0z" fill-rule="evenodd"></path><path fill="#000000" d="m276.66348 853.3107q0 -3.390625 1.8125 -5.296875q1.828125 -1.921875 4.703125 -1.921875q1.875 0 3.390625 0.90625q1.515625 0.890625 2.296875 2.5q0.796875 1.609375 0.796875 3.65625q0 2.0625 -0.84375 3.703125q-0.828125 1.625 -2.359375 2.46875q-1.53125 0.84375 -3.296875 0.84375q-1.921875 0 -3.4375 -0.921875q-1.5 -0.9375 -2.28125 -2.53125q-0.78125 -1.609375 -0.78125 -3.40625zm1.859375 0.03125q0 2.453125 1.3125 3.875q1.328125 1.40625 3.3125 1.40625q2.03125 0 3.34375 -1.421875q1.3125 -1.4375 1.3125 -4.0625q0 -1.65625 -0.5625 -2.890625q-0.546875 -1.234375 -1.640625 -1.921875q-1.078125 -0.6875 -2.421875 -0.6875q-1.90625 0 -3.28125 1.3125q-1.375 1.3125 -1.375 4.390625zm19.433289 6.59375l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.5788574 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5270386 5.28125l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm15.313232 4.875l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.578827 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm9.897858 5.5q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.353302 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.2542114 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.074646 -2.125l-8.96875 0l0 -1.5625l8.96875 0l0 1.5625zm0 4.125l-8.96875 0l0 -1.546875l8.96875 0l0 1.546875zm12.187622 3.875l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm6.5788574 8.78125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m344.2495 137.01575l0 24.724411" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m344.2495 137.01575l0 18.724411" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m342.59778 155.74016l1.6517334 4.538101l1.6517334 -4.538101z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m344.2495 220.72906l0 25.291336" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m344.2495 220.72906l0 19.291336" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m342.59778 240.0204l1.6517334 4.538101l1.6517334 -4.538101z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m344.1629 56.811077l0.09448242 21.19685" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m344.1629 56.81108l0.06774902 15.196915" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m342.57892 72.01535l1.671936 4.530693l1.6315002 -4.545418z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m228.15503 694.4199l232.18896 0l0 42.11029l-232.18896 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m228.15503 694.4199l232.18896 0l0 42.11029l-232.18896 0z" fill-rule="evenodd"></path><path fill="#000000" d="m251.96599 721.33997l0 -13.59375l9.17186 0l0 1.59375l-7.3749847 0l0 4.21875l6.3749847 0l0 1.609375l-6.3749847 0l0 6.171875l-1.796875 0zm17.536606 0l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.891327 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.1448364 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.097931 3.796875l-0.171875 -1.5625q0.546875 0.140625 0.953125 0.140625q0.546875 0 0.875 -0.1875q0.34375 -0.1875 0.5625 -0.515625q0.15625 -0.25 0.5 -1.25q0.046875 -0.140625 0.15625 -0.40625l-3.734375 -9.875l1.796875 0l2.046875 5.71875q0.40625 1.078125 0.71875 2.28125q0.28125 -1.15625 0.6875 -2.25l2.09375 -5.75l1.671875 0l-3.75 10.03125q-0.59375 1.625 -0.9375 2.234375q-0.4375 0.828125 -1.015625 1.203125q-0.578125 0.390625 -1.375 0.390625q-0.484375 0 -1.078125 -0.203125zm19.328125 -8.5625l1.796875 0.453125q-0.5625 2.21875 -2.03125 3.390625q-1.46875 1.15625 -3.59375 1.15625q-2.203125 0 -3.578125 -0.890625q-1.375 -0.90625 -2.09375 -2.59375q-0.71875 -1.703125 -0.71875 -3.65625q0 -2.125 0.796875 -3.703125q0.8125 -1.578125 2.3125 -2.390625q1.5 -0.828125 3.296875 -0.828125q2.046875 0 3.4375 1.046875q1.390625 1.03125 1.9375 2.90625l-1.765625 0.421875q-0.46875 -1.484375 -1.375 -2.15625q-0.90625 -0.6875 -2.265625 -0.6875q-1.5625 0 -2.625 0.75q-1.046875 0.75 -1.484375 2.03125q-0.421875 1.265625 -0.421875 2.609375q0 1.734375 0.5 3.03125q0.515625 1.28125 1.578125 1.921875q1.078125 0.640625 2.3125 0.640625q1.515625 0 2.5625 -0.859375q1.046875 -0.875 1.421875 -2.59375zm2.9260864 -0.15625q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281952 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375732 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm17.125702 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547607 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm8.277039 -1.671875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.500732 5.875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm17.637146 8.921875q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm11.228302 -14.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875702 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm4.3757324 4.78125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm15.328125 0l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm6.578827 8.78125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m344.95538 503.2441l0 37.88974" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m344.95538 503.24408l0 31.88977" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m343.30365 535.13385l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m344.2495 284.66928l0 25.35434" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m344.2495 284.66928l0 19.35434" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m342.59778 304.02362l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m344.2495 664.5302l0 29.88971" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m344.2495 664.5302l0 23.88971" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m342.59778 688.4199l1.6517334 4.538147l1.6517334 -4.538147z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m228.15503 161.73694l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m228.15503 161.73694l232.18896 0l0 58.992126l-232.18896 0z" fill-rule="evenodd"></path><path fill="#000000" d="m321.72083 183.89131l1.796875 0.453125q-0.5625 2.21875 -2.03125 3.390625q-1.46875 1.15625 -3.59375 1.15625q-2.203125 0 -3.578125 -0.890625q-1.375 -0.90625 -2.09375 -2.59375q-0.71875 -1.703125 -0.71875 -3.65625q0 -2.125 0.796875 -3.703125q0.8125 -1.578125 2.3125 -2.390625q1.5 -0.828125 3.296875 -0.828125q2.046875 0 3.4375 1.046875q1.390625 1.03125 1.9375 2.90625l-1.765625 0.421875q-0.46875 -1.484375 -1.375 -2.15625q-0.90625 -0.6875 -2.265625 -0.6875q-1.5625 0 -2.625 0.75q-1.046875 0.75 -1.484375 2.03125q-0.421875 1.265625 -0.421875 2.609375q0 1.734375 0.5 3.03125q0.515625 1.28125 1.578125 1.921875q1.078125 0.640625 2.3125 0.640625q1.515625 0 2.5625 -0.859375q1.046875 -0.875 1.421875 -2.59375zm2.926056 -0.15625q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281982 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm13.110077 0l-3.75 -9.859375l1.765625 0l2.125 5.90625q0.34375 0.953125 0.625 1.984375q0.21875 -0.78125 0.625 -1.875l2.1875 -6.015625l1.71875 0l-3.734375 9.859375l-1.5625 0zm14.90625 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm2.2819824 1.609375l0 -13.59375l4.6875 0q1.578125 0 2.421875 0.1875q1.15625 0.265625 1.984375 0.96875q1.078125 0.921875 1.609375 2.34375q0.53125 1.40625 0.53125 3.21875q0 1.546875 -0.359375 2.75q-0.359375 1.1875 -0.921875 1.984375q-0.5625 0.78125 -1.234375 1.234375q-0.671875 0.4375 -1.625 0.671875q-0.953125 0.234375 -2.1875 0.234375l-4.90625 0zm1.796875 -1.609375l2.90625 0q1.34375 0 2.109375 -0.25q0.765625 -0.25 1.21875 -0.703125q0.640625 -0.640625 1.0 -1.71875q0.359375 -1.078125 0.359375 -2.625q0 -2.125 -0.703125 -3.265625q-0.703125 -1.15625 -1.703125 -1.546875q-0.71875 -0.28125 -2.328125 -0.28125l-2.859375 0l0 10.390625z" fill-rule="nonzero"></path><path fill="#000000" d="m268.58267 214.65694q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.353302 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.254181 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.074646 -2.125l-8.96875 0l0 -1.5625l8.96875 0l0 1.5625zm0 4.125l-8.96875 0l0 -1.546875l8.96875 0l0 1.546875zm7.3439026 7.65625l0 -17.375l3.671875 0l0 1.375l-2.015625 0l0 14.609375l2.015625 0l0 1.390625l-3.671875 0zm13.339539 -14.046875l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875732 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm5.922577 4.78125l0 -1.90625l1.90625 0l0 1.90625q0 1.046875 -0.375 1.6875q-0.375 0.65625 -1.171875 1.0l-0.46875 -0.71875q0.53125 -0.21875 0.78125 -0.671875q0.25 -0.453125 0.28125 -1.296875l-0.953125 0zm12.038483 -7.375q-1.046875 -0.375 -1.546875 -1.078125q-0.5 -0.71875 -0.5 -1.703125q0 -1.484375 1.0625 -2.484375q1.078125 -1.015625 2.84375 -1.015625q1.78125 0 2.859375 1.03125q1.09375 1.03125 1.09375 2.515625q0 0.953125 -0.5 1.65625q-0.484375 0.703125 -1.5 1.078125q1.25 0.40625 1.90625 1.3125q0.65625 0.90625 0.65625 2.171875q0 1.75 -1.234375 2.9375q-1.234375 1.1875 -3.25 1.1875q-2.015625 0 -3.25 -1.1875q-1.234375 -1.203125 -1.234375 -2.984375q0 -1.328125 0.671875 -2.21875q0.671875 -0.890625 1.921875 -1.21875zm-0.328125 -2.828125q0 0.96875 0.609375 1.578125q0.625 0.609375 1.625 0.609375q0.953125 0 1.5625 -0.609375q0.625 -0.609375 0.625 -1.484375q0 -0.921875 -0.640625 -1.546875q-0.625 -0.625 -1.578125 -0.625q-0.953125 0 -1.578125 0.609375q-0.625 0.609375 -0.625 1.46875zm-0.546875 6.28125q0 0.71875 0.328125 1.390625q0.34375 0.65625 1.015625 1.03125q0.671875 0.359375 1.4375 0.359375q1.203125 0 1.984375 -0.765625q0.78125 -0.78125 0.78125 -1.96875q0 -1.203125 -0.8125 -1.984375q-0.796875 -0.796875 -2.0 -0.796875q-1.1875 0 -1.96875 0.78125q-0.765625 0.78125 -0.765625 1.953125zm9.578857 3.921875l0 -1.90625l1.90625 0l0 1.90625q0 1.046875 -0.375 1.6875q-0.375 0.65625 -1.171875 1.0l-0.46875 -0.71875q0.53125 -0.21875 0.78125 -0.671875q0.25 -0.453125 0.28125 -1.296875l-0.953125 0zm15.757233 0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm4.985077 0l0 -1.90625l1.90625 0l0 1.90625q0 1.046875 -0.375 1.6875q-0.375 0.65625 -1.171875 1.0l-0.46875 -0.71875q0.53125 -0.21875 0.78125 -0.671875q0.25 -0.453125 0.28125 -1.296875l-0.953125 0zm15.757233 0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm7.3444824 3.78125l-3.6875 0l0 -1.390625l2.015625 0l0 -14.609375l-2.015625 0l0 -1.375l3.6875 0l0 17.375zm3.4801636 0.21875l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m228.15503 246.02362l232.18896 0l0 38.64566l-232.18896 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m228.15503 246.02362l232.18896 0l0 38.64566l-232.18896 0z" fill-rule="evenodd"></path><path fill="#000000" d="m255.45354 272.94363l0 -13.59375l2.7187347 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.7343597 0zm21.822037 -1.21875q-0.9375 0.796875 -1.796875 1.125q-0.859375 0.3125 -1.84375 0.3125q-1.609375 0 -2.484375 -0.78125q-0.875 -0.796875 -0.875 -2.03125q0 -0.734375 0.328125 -1.328125q0.328125 -0.59375 0.859375 -0.953125q0.53125 -0.359375 1.203125 -0.546875q0.5 -0.140625 1.484375 -0.25q2.03125 -0.25 2.984375 -0.578125q0 -0.34375 0 -0.4375q0 -1.015625 -0.46875 -1.4375q-0.640625 -0.5625 -1.90625 -0.5625q-1.171875 0 -1.734375 0.40625q-0.5625 0.40625 -0.828125 1.46875l-1.640625 -0.234375q0.234375 -1.046875 0.734375 -1.6875q0.515625 -0.640625 1.46875 -0.984375q0.96875 -0.359375 2.25 -0.359375q1.265625 0 2.046875 0.296875q0.78125 0.296875 1.15625 0.75q0.375 0.453125 0.515625 1.140625q0.09375 0.421875 0.09375 1.53125l0 2.234375q0 2.328125 0.09375 2.953125q0.109375 0.609375 0.4375 1.171875l-1.75 0q-0.265625 -0.515625 -0.328125 -1.21875zm-0.140625 -3.71875q-0.90625 0.359375 -2.734375 0.625q-1.03125 0.140625 -1.453125 0.328125q-0.421875 0.1875 -0.65625 0.546875q-0.234375 0.359375 -0.234375 0.796875q0 0.671875 0.5 1.125q0.515625 0.4375 1.484375 0.4375q0.96875 0 1.71875 -0.421875q0.75 -0.4375 1.109375 -1.15625q0.265625 -0.578125 0.265625 -1.671875l0 -0.609375zm2.9694824 4.9375l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm10.65625 0l0 -13.59375l5.125 0q1.359375 0 2.078125 0.125q1.0 0.171875 1.671875 0.640625q0.671875 0.46875 1.078125 1.3125q0.421875 0.84375 0.421875 1.84375q0 1.734375 -1.109375 2.9375q-1.09375 1.203125 -3.984375 1.203125l-3.484375 0l0 5.53125l-1.796875 0zm1.796875 -7.140625l3.515625 0q1.75 0 2.46875 -0.640625q0.734375 -0.65625 0.734375 -1.828125q0 -0.859375 -0.4375 -1.46875q-0.421875 -0.609375 -1.125 -0.796875q-0.453125 -0.125 -1.671875 -0.125l-3.484375 0l0 4.859375zm9.802948 2.21875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm8.656952 0q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.250732 4.921875l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm12.488556 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm2.2819824 1.609375l0 -13.59375l4.6875 0q1.578125 0 2.421875 0.1875q1.15625 0.265625 1.984375 0.96875q1.078125 0.921875 1.609375 2.34375q0.53125 1.40625 0.53125 3.21875q0 1.546875 -0.359375 2.75q-0.359375 1.1875 -0.921875 1.984375q-0.5625 0.78125 -1.234375 1.234375q-0.671875 0.4375 -1.625 0.671875q-0.953125 0.234375 -2.1875 0.234375l-4.90625 0zm1.796875 -1.609375l2.90625 0q1.34375 0 2.109375 -0.25q0.765625 -0.25 1.21875 -0.703125q0.640625 -0.640625 1.0 -1.71875q0.359375 -1.078125 0.359375 -2.625q0 -2.125 -0.703125 -3.265625q-0.703125 -1.15625 -1.703125 -1.546875q-0.71875 -0.28125 -2.328125 -0.28125l-2.859375 0l0 10.390625zm19.828125 5.609375q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.353302 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm13.65625 1.4375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5114136 1.5l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm6.243927 -11.6875l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm10.519836 0l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm16.016327 1.75l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm9.578857 -2.078125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm0 7.953125l0 -1.90625l1.90625 0l0 1.90625l-1.90625 0zm9.444733 -3.59375l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm11.922577 7.59375l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m228.86089 461.13388l232.18898 0l0 42.11023l-232.18898 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m228.86089 461.13388l232.18898 0l0 42.11023l-232.18898 0z" fill-rule="evenodd"></path><path fill="#000000" d="m282.06027 488.05386l0 -13.59375l1.796875 0l0 11.984375l6.703125 0l0 1.609375l-8.5 0zm9.844482 -4.375l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.506073 4.375l0 -12.0l-4.46875 0l0 -1.59375l10.765625 0l0 1.59375l-4.5 0l0 12.0l-1.796875 0zm7.8803406 0l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm21.212677 0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.918396 4.0q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm11.2283325 -14.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.4062805 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.8750305 -0.453125 1.8281555 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.0937805 0 -3.4062805 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.4219055 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125305 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.6562805 0.40625 1.3750305 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.1250305 0 -1.9219055 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875732 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm4.3757324 4.78125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm18.640625 -10.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875702 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm6.5788574 8.78125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m274.61697 770.54596l140.06299 0l0 42.11023l-140.06299 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m274.61697 770.54596l140.06299 0l0 42.11023l-140.06299 0z" fill-rule="evenodd"></path><path fill="#000000" d="m311.29257 793.09094l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm12.209198 -0.546875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.688232 4.921875l0 -8.546875l-1.484375 0l0 -1.3125l1.484375 0l0 -1.046875q0 -0.984375 0.171875 -1.46875q0.234375 -0.65625 0.84375 -1.046875q0.609375 -0.40625 1.703125 -0.40625q0.703125 0 1.5625 0.15625l-0.25 1.46875q-0.515625 -0.09375 -0.984375 -0.09375q-0.765625 0 -1.078125 0.328125q-0.3125 0.3125 -0.3125 1.203125l0 0.90625l1.921875 0l0 1.3125l-1.921875 0l0 8.546875l-1.65625 0zm8.433289 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5270691 1.5l0 -9.859375l1.5 0l0 1.390625q0.453125 -0.71875 1.21875 -1.15625q0.78125 -0.453125 1.765625 -0.453125q1.09375 0 1.796875 0.453125q0.703125 0.453125 0.984375 1.28125q1.171875 -1.734375 3.046875 -1.734375q1.46875 0 2.25 0.8125q0.796875 0.8125 0.796875 2.5l0 6.765625l-1.671875 0l0 -6.203125q0 -1.0 -0.15625 -1.4375q-0.15625 -0.453125 -0.59375 -0.71875q-0.421875 -0.265625 -1.0 -0.265625q-1.03125 0 -1.71875 0.6875q-0.6875 0.6875 -0.6875 2.21875l0 5.71875l-1.671875 0l0 -6.40625q0 -1.109375 -0.40625 -1.65625q-0.40625 -0.5625 -1.34375 -0.5625q-0.703125 0 -1.3125 0.375q-0.59375 0.359375 -0.859375 1.078125q-0.265625 0.71875 -0.265625 2.0625l0 5.109375l-1.671875 0zm21.978302 -1.21875q-0.9375 0.796875 -1.796875 1.125q-0.859375 0.3125 -1.84375 0.3125q-1.609375 0 -2.484375 -0.78125q-0.875 -0.796875 -0.875 -2.03125q0 -0.734375 0.328125 -1.328125q0.328125 -0.59375 0.859375 -0.953125q0.53125 -0.359375 1.203125 -0.546875q0.5 -0.140625 1.484375 -0.25q2.03125 -0.25 2.984375 -0.578125q0 -0.34375 0 -0.4375q0 -1.015625 -0.46875 -1.4375q-0.640625 -0.5625 -1.90625 -0.5625q-1.171875 0 -1.734375 0.40625q-0.5625 0.40625 -0.828125 1.46875l-1.640625 -0.234375q0.234375 -1.046875 0.734375 -1.6875q0.515625 -0.640625 1.46875 -0.984375q0.96875 -0.359375 2.25 -0.359375q1.265625 0 2.046875 0.296875q0.78125 0.296875 1.15625 0.75q0.375 0.453125 0.515625 1.140625q0.09375 0.421875 0.09375 1.53125l0 2.234375q0 2.328125 0.09375 2.953125q0.109375 0.609375 0.4375 1.171875l-1.75 0q-0.265625 -0.515625 -0.328125 -1.21875zm-0.140625 -3.71875q-0.90625 0.359375 -2.734375 0.625q-1.03125 0.140625 -1.453125 0.328125q-0.421875 0.1875 -0.65625 0.546875q-0.234375 0.359375 -0.234375 0.796875q0 0.671875 0.5 1.125q0.515625 0.4375 1.484375 0.4375q0.96875 0 1.71875 -0.421875q0.75 -0.4375 1.109375 -1.15625q0.265625 -0.578125 0.265625 -1.671875l0 -0.609375zm2.969452 4.9375l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m344.2495 736.5302l0.40945435 34.015747" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m344.2495 736.5302l0.33721924 28.016113" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m342.93512 764.5662l1.7062378 4.5178833l1.5969849 -4.557617z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m344.64847 812.6562l0.31497192 20.346436" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m344.64847 812.6562l0.22210693 14.347168" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m343.21902 827.02893l1.7217712 4.511963l1.5812988 -4.5631104z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m228.15503 622.4199l232.18896 0l0 42.11029l-232.18896 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m228.15503 622.4199l232.18896 0l0 42.11029l-232.18896 0z" fill-rule="evenodd"></path><path fill="#000000" d="m246.77812 649.33997l0 -13.59375l9.171875 0l0 1.59375l-7.375 0l0 4.21875l6.375 0l0 1.609375l-6.375 0l0 6.171875l-1.796875 0zm17.536606 0l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.8913574 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.144806 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.0979614 3.796875l-0.171875 -1.5625q0.546875 0.140625 0.953125 0.140625q0.546875 0 0.875 -0.1875q0.34375 -0.1875 0.5625 -0.515625q0.15625 -0.25 0.5 -1.25q0.046875 -0.140625 0.15625 -0.40625l-3.734375 -9.875l1.796875 0l2.046875 5.71875q0.40625 1.078125 0.71875 2.28125q0.28125 -1.15625 0.6875 -2.25l2.09375 -5.75l1.671875 0l-3.75 10.03125q-0.59375 1.625 -0.9375 2.234375q-0.4375 0.828125 -1.015625 1.203125q-0.578125 0.390625 -1.375 0.390625q-0.484375 0 -1.078125 -0.203125zm19.328125 -8.5625l1.796875 0.453125q-0.5625 2.21875 -2.03125 3.390625q-1.46875 1.15625 -3.59375 1.15625q-2.203125 0 -3.578125 -0.890625q-1.375 -0.90625 -2.09375 -2.59375q-0.71875 -1.703125 -0.71875 -3.65625q0 -2.125 0.796875 -3.703125q0.8125 -1.578125 2.3125 -2.390625q1.5 -0.828125 3.296875 -0.828125q2.046875 0 3.4375 1.046875q1.390625 1.03125 1.9375 2.90625l-1.765625 0.421875q-0.46875 -1.484375 -1.375 -2.15625q-0.90625 -0.6875 -2.265625 -0.6875q-1.5625 0 -2.625 0.75q-1.046875 0.75 -1.484375 2.03125q-0.421875 1.265625 -0.421875 2.609375q0 1.734375 0.5 3.03125q0.515625 1.28125 1.578125 1.921875q1.078125 0.640625 2.3125 0.640625q1.515625 0 2.5625 -0.859375q1.046875 -0.875 1.421875 -2.59375zm2.926056 -0.15625q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281982 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375702 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm17.125732 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547577 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm8.277069 -1.671875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.500702 5.875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm17.637146 8.921875q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm11.228302 -14.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875732 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm4.375702 4.78125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm18.640625 -10.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875732 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm6.578827 8.78125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m228.15503 390.41995l232.18896 0l0 42.11023l-232.18896 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m228.15503 390.41995l232.18896 0l0 42.11023l-232.18896 0z" fill-rule="evenodd"></path><path fill="#000000" d="m241.59027 417.33994l0 -13.59375l9.171875 0l0 1.59375l-7.375 0l0 4.21875l6.375 0l0 1.609375l-6.375 0l0 6.171875l-1.796875 0zm17.53659 0l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.85935974 0 -1.6249847 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.2656097 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.8913574 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.144806 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.0979614 3.796875l-0.171875 -1.5625q0.546875 0.140625 0.953125 0.140625q0.546875 0 0.875 -0.1875q0.34375 -0.1875 0.5625 -0.515625q0.15625 -0.25 0.5 -1.25q0.046875 -0.140625 0.15625 -0.40625l-3.734375 -9.875l1.796875 0l2.046875 5.71875q0.40625 1.078125 0.71875 2.28125q0.28125 -1.15625 0.6875 -2.25l2.09375 -5.75l1.671875 0l-3.75 10.03125q-0.59375 1.625 -0.9375 2.234375q-0.4375 0.828125 -1.015625 1.203125q-0.578125 0.390625 -1.375 0.390625q-0.484375 0 -1.078125 -0.203125zm19.328125 -8.5625l1.796875 0.453125q-0.5625 2.21875 -2.03125 3.390625q-1.46875 1.15625 -3.59375 1.15625q-2.203125 0 -3.578125 -0.890625q-1.375 -0.90625 -2.09375 -2.59375q-0.71875 -1.703125 -0.71875 -3.65625q0 -2.125 0.796875 -3.703125q0.8125 -1.578125 2.3125 -2.390625q1.5 -0.828125 3.296875 -0.828125q2.046875 0 3.4375 1.046875q1.390625 1.03125 1.9375 2.90625l-1.765625 0.421875q-0.46875 -1.484375 -1.375 -2.15625q-0.90625 -0.6875 -2.265625 -0.6875q-1.5625 0 -2.625 0.75q-1.046875 0.75 -1.484375 2.03125q-0.421875 1.265625 -0.421875 2.609375q0 1.734375 0.5 3.03125q0.515625 1.28125 1.578125 1.921875q1.078125 0.640625 2.3125 0.640625q1.515625 0 2.5625 -0.859375q1.046875 -0.875 1.421875 -2.59375zm2.926056 -0.15625q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281982 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375702 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm17.125732 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547577 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm8.277069 -1.671875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.500702 5.875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm17.637146 8.921875q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.681427 -15.8125l0 -1.609375l8.796875 0l0 1.296875q-1.296875 1.375 -2.578125 3.671875q-1.265625 2.296875 -1.96875 4.71875q-0.5 1.703125 -0.640625 3.734375l-1.71875 0q0.03125 -1.609375 0.625 -3.875q0.609375 -2.28125 1.734375 -4.390625q1.140625 -2.109375 2.40625 -3.546875l-6.65625 0zm10.250732 5.109375q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm14.016327 6.703125l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm4.3757324 4.78125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm18.640625 -10.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875702 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm6.5788574 8.78125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m228.86089 541.1336l232.18898 0l0 42.11023l-232.18898 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m228.86089 541.1336l232.18898 0l0 42.11023l-232.18898 0z" fill-rule="evenodd"></path><path fill="#000000" d="m282.06027 568.0536l0 -13.59375l1.796875 0l0 11.984375l6.703125 0l0 1.609375l-8.5 0zm9.844482 -4.375l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.506073 4.375l0 -12.0l-4.46875 0l0 -1.59375l10.765625 0l0 1.59375l-4.5 0l0 12.0l-1.796875 0zm7.8803406 0l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm23.697052 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm10.434021 5.609375q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm11.2283325 -14.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.4062805 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.8750305 -0.453125 1.8281555 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.0937805 0 -3.4062805 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.4219055 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125305 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.6562805 0.40625 1.3750305 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.1250305 0 -1.9219055 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875732 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm4.3757324 4.78125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm18.640625 -10.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875702 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm6.5788574 8.78125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m344.95538 583.24384l-0.6929016 39.18109" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m344.95538 583.24384l-0.5868225 33.182068" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m342.71707 616.39667l1.5712585 4.5665894l1.7316895 -4.5081787z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m344.2495 432.53018l0.6929321 28.59842" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m344.2495 432.53018l0.5475769 22.60019" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m343.14584 455.17038l1.7611694 4.496765l1.5413208 -4.576782z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m228.15486 310.02362l232.18898 0l0 58.992126l-232.18898 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m228.15486 310.02362l232.18898 0l0 58.992126l-232.18898 0z" fill-rule="evenodd"></path><path fill="#000000" d="m308.40952 336.94363l0 -13.59375l6.03125 0q1.8125 0 2.75 0.359375q0.953125 0.359375 1.515625 1.296875q0.5625 0.921875 0.5625 2.046875q0 1.453125 -0.9375 2.453125q-0.921875 0.984375 -2.890625 1.25q0.71875 0.34375 1.09375 0.671875q0.78125 0.734375 1.484375 1.8125l2.375 3.703125l-2.265625 0l-1.796875 -2.828125q-0.796875 -1.21875 -1.3125 -1.875q-0.5 -0.65625 -0.90625 -0.90625q-0.40625 -0.265625 -0.8125 -0.359375q-0.3125 -0.078125 -1.015625 -0.078125l-2.078125 0l0 6.046875l-1.796875 0zm1.796875 -7.59375l3.859375 0q1.234375 0 1.921875 -0.25q0.703125 -0.265625 1.0625 -0.828125q0.375 -0.5625 0.375 -1.21875q0 -0.96875 -0.703125 -1.578125q-0.703125 -0.625 -2.21875 -0.625l-4.296875 0l0 4.5zm18.176086 4.421875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm8.438202 2.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.0 2.9375l0 -13.59375l1.671875 0l0 4.875q1.171875 -1.359375 2.953125 -1.359375q1.09375 0 1.890625 0.4375q0.8125 0.421875 1.15625 1.1875q0.359375 0.765625 0.359375 2.203125l0 6.25l-1.671875 0l0 -6.25q0 -1.25 -0.546875 -1.8125q-0.546875 -0.578125 -1.53125 -0.578125q-0.75 0 -1.40625 0.390625q-0.640625 0.375 -0.921875 1.046875q-0.28125 0.65625 -0.28125 1.8125l0 5.390625l-1.671875 0zm16.813202 -1.21875q-0.9375 0.796875 -1.796875 1.125q-0.8593445 0.3125 -1.8437195 0.3125q-1.609375 0 -2.484375 -0.78125q-0.875 -0.796875 -0.875 -2.03125q0 -0.734375 0.328125 -1.328125q0.328125 -0.59375 0.859375 -0.953125q0.53125 -0.359375 1.203125 -0.546875q0.5 -0.140625 1.484375 -0.25q2.0312195 -0.25 2.9843445 -0.578125q0 -0.34375 0 -0.4375q0 -1.015625 -0.46875 -1.4375q-0.640625 -0.5625 -1.9062195 -0.5625q-1.171875 0 -1.734375 0.40625q-0.5625 0.40625 -0.828125 1.46875l-1.640625 -0.234375q0.234375 -1.046875 0.734375 -1.6875q0.515625 -0.640625 1.46875 -0.984375q0.96875 -0.359375 2.25 -0.359375q1.2655945 0 2.0468445 0.296875q0.78125 0.296875 1.15625 0.75q0.375 0.453125 0.515625 1.140625q0.09375 0.421875 0.09375 1.53125l0 2.234375q0 2.328125 0.09375 2.953125q0.109375 0.609375 0.4375 1.171875l-1.75 0q-0.265625 -0.515625 -0.328125 -1.21875zm-0.140625 -3.71875q-0.90625 0.359375 -2.7343445 0.625q-1.03125 0.140625 -1.453125 0.328125q-0.421875 0.1875 -0.65625 0.546875q-0.234375 0.359375 -0.234375 0.796875q0 0.671875 0.5 1.125q0.515625 0.4375 1.484375 0.4375q0.96875 0 1.7187195 -0.421875q0.75 -0.4375 1.109375 -1.15625q0.265625 -0.578125 0.265625 -1.671875l0 -0.609375zm4.0788574 8.71875l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm15.610077 1.703125l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875z" fill-rule="nonzero"></path><path fill="#000000" d="m284.13696 362.94363q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.353302 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.254181 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.074646 -2.125l-8.96875 0l0 -1.5625l8.96875 0l0 1.5625zm0 4.125l-8.96875 0l0 -1.546875l8.96875 0l0 1.546875zm7.3439026 7.65625l0 -17.375l3.671875 0l0 1.375l-2.015625 0l0 14.609375l2.015625 0l0 1.390625l-3.671875 0zm10.964539 -3.78125l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm4.985077 0l0 -1.90625l1.90625 0l0 1.90625q0 1.046875 -0.375 1.6875q-0.375 0.65625 -1.171875 1.0l-0.46875 -0.71875q0.53125 -0.21875 0.78125 -0.671875q0.25 -0.453125 0.28125 -1.296875l-0.953125 0zm9.585358 -11.8125l0 -1.609375l8.796875 0l0 1.296875q-1.296875 1.375 -2.578125 3.671875q-1.265625 2.296875 -1.96875 4.71875q-0.5 1.703125 -0.640625 3.734375l-1.71875 0q0.03125 -1.609375 0.625 -3.875q0.609375 -2.28125 1.734375 -4.390625q1.140625 -2.109375 2.40625 -3.546875l-6.65625 0zm10.250732 5.109375q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm14.016327 6.703125l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm8.281982 8.5625l-3.6875 0l0 -1.390625l2.015625 0l0 -14.609375l-2.015625 0l0 -1.375l3.6875 0l0 17.375zm3.4801636 0.21875l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m344.24933 369.01575l0 21.417328" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m344.24933 369.01575l0 15.417328" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m342.59763 384.43307l1.6517029 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path></g></svg>
-
diff --git a/tensorflow/lite/models/testdata/g3doc/hotword.svg b/tensorflow/lite/models/testdata/g3doc/hotword.svg
deleted file mode 100755
index 36187aa..0000000
--- a/tensorflow/lite/models/testdata/g3doc/hotword.svg
+++ /dev/null
@@ -1,4 +0,0 @@
-<?xml version="1.0" standalone="yes"?>
-
-<svg version="1.1" viewBox="0.0 0.0 720.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="p.0"><path d="m0 0l720.0 0l0 540.0l-720.0 0l0 -540.0z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#p.0)"><path fill="#000000" fill-opacity="0.0" d="m0 0l720.0 0l0 540.0l-720.0 0z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m286.0 5.0l166.01575 0l0 41.984253l-166.01575 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m286.0 5.0l166.01575 0l0 41.984253l-166.01575 0z" fill-rule="evenodd"></path><path fill="#000000" d="m303.62738 31.919998l0 -13.59375l1.8125 0l0 13.59375l-1.8125 0zm4.667694 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375702 3.78125l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm15.313232 4.875l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.5788574 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm9.897827 5.5q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.3533325 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.254181 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.074646 -2.125l-8.96875 0l0 -1.5625l8.96875 0l0 1.5625zm0 4.125l-8.96875 0l0 -1.546875l8.96875 0l0 1.546875zm12.187653 3.875l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm5.016327 -1.921875q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm10.219452 10.703125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m273.00787 70.23491l192.0 0l0 92.7874l-192.0 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m273.00787 70.23491l192.0 0l0 92.7874l-192.0 0z" fill-rule="evenodd"></path><path fill="#000000" d="m344.98923 92.77991l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.943573 4.375l-5.28125 -13.59375l1.953125 0l3.53125 9.875q0.4375 1.1875 0.71875 2.21875q0.3125 -1.109375 0.734375 -2.21875l3.671875 -9.875l1.84375 0l-5.328125 13.59375l-1.84375 0zm8.552948 0l0 -13.59375l4.6875 0q1.578125 0 2.421875 0.1875q1.15625 0.265625 1.984375 0.96875q1.078125 0.921875 1.609375 2.34375q0.53125 1.40625 0.53125 3.21875q0 1.546875 -0.359375 2.75q-0.359375 1.1875 -0.921875 1.984375q-0.5625 0.78125 -1.234375 1.234375q-0.671875 0.4375 -1.625 0.671875q-0.953125 0.234375 -2.1875 0.234375l-4.90625 0zm1.796875 -1.609375l2.90625 0q1.34375 0 2.109375 -0.25q0.765625 -0.25 1.21875 -0.703125q0.640625 -0.640625 1.0 -1.71875q0.359375 -1.078125 0.359375 -2.625q0 -2.125 -0.703125 -3.265625q-0.703125 -1.15625 -1.703125 -1.546875q-0.71875 -0.28125 -2.328125 -0.28125l-2.859375 0l0 10.390625zm11.769806 1.609375l0 -13.59375l9.171875 0l0 1.59375l-7.375 0l0 4.21875l6.375 0l0 1.609375l-6.375 0l0 6.171875l-1.796875 0z" fill-rule="nonzero"></path><path fill="#000000" d="m296.54065 119.15491l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm22.134552 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm9.110107 5.875l0 -9.859375l1.5 0l0 1.390625q0.453125 -0.71875 1.21875 -1.15625q0.78125 -0.453125 1.765625 -0.453125q1.09375 0 1.796875 0.453125q0.703125 0.453125 0.984375 1.28125q1.171875 -1.734375 3.046875 -1.734375q1.46875 0 2.25 0.8125q0.796875 0.8125 0.796875 2.5l0 6.765625l-1.671875 0l0 -6.203125q0 -1.0 -0.15625 -1.4375q-0.15625 -0.453125 -0.59375 -0.71875q-0.421875 -0.265625 -1.0 -0.265625q-1.03125 0 -1.71875 0.6875q-0.6875 0.6875 -0.6875 2.21875l0 5.71875l-1.671875 0l0 -6.40625q0 -1.109375 -0.40625 -1.65625q-0.40625 -0.5625 -1.34375 -0.5625q-0.703125 0 -1.3125 0.375q-0.59375 0.359375 -0.859375 1.078125q-0.265625 0.71875 -0.265625 2.0625l0 5.109375l-1.671875 0zm14.915802 -4.921875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.266327 4.921875l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm6.150177 3.796875l-0.171875 -1.5625q0.546875 0.140625 0.953125 0.140625q0.546875 0 0.875 -0.1875q0.34375 -0.1875 0.5625 -0.515625q0.15625 -0.25 0.5 -1.25q0.046875 -0.140625 0.15625 -0.40625l-3.734375 -9.875l1.796875 0l2.046875 5.71875q0.40625 1.078125 0.71875 2.28125q0.28125 -1.15625 0.6875 -2.25l2.09375 -5.75l1.671875 0l-3.75 10.03125q-0.59375 1.625 -0.9375 2.234375q-0.4375 0.828125 -1.015625 1.203125q-0.578125 0.390625 -1.375 0.390625q-0.484375 0 -1.078125 -0.203125zm13.917694 -6.734375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.254181 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.074646 -2.125l-8.96875 0l0 -1.5625l8.96875 0l0 1.5625zm0 4.125l-8.96875 0l0 -1.546875l8.96875 0l0 1.546875zm12.187653 3.875l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm5.016327 -1.921875q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375z" fill-rule="nonzero"></path><path fill="#000000" d="m326.25818 145.1549q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm7.915802 -4.0l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm5.0163574 -1.921875q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm13.199646 6.703125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm21.448914 0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.860107 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm7.891327 1.609375l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.750732 -10.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm10.078827 8.40625l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m294.0 411.00525l150.01575 0l0 58.992126l-150.01575 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m294.0 411.00525l150.01575 0l0 58.992126l-150.01575 0z" fill-rule="evenodd"></path><path fill="#000000" d="m305.7563 437.92526l0 -13.59375l9.171875 0l0 1.59375l-7.375 0l0 4.21875l6.375 0l0 1.609375l-6.375 0l0 6.171875l-1.796875 0zm17.536621 0l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.8913574 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.144806 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.097931 3.796875l-0.171875 -1.5625q0.546875 0.140625 0.953125 0.140625q0.546875 0 0.875 -0.1875q0.34375 -0.1875 0.5625 -0.515625q0.15625 -0.25 0.5 -1.25q0.046875 -0.140625 0.15625 -0.40625l-3.734375 -9.875l1.796875 0l2.046875 5.71875q0.40625 1.078125 0.71875 2.28125q0.28125 -1.15625 0.6875 -2.25l2.09375 -5.75l1.671875 0l-3.75 10.03125q-0.59375 1.625 -0.9375 2.234375q-0.4375 0.828125 -1.015625 1.203125q-0.578125 0.390625 -1.375 0.390625q-0.484375 0 -1.078125 -0.203125zm19.328125 -8.5625l1.796875 0.453125q-0.5625 2.21875 -2.03125 3.390625q-1.46875 1.15625 -3.59375 1.15625q-2.203125 0 -3.578125 -0.890625q-1.375 -0.90625 -2.09375 -2.59375q-0.71875 -1.703125 -0.71875 -3.65625q0 -2.125 0.796875 -3.703125q0.8125 -1.578125 2.3125 -2.390625q1.5 -0.828125 3.296875 -0.828125q2.046875 0 3.4375 1.046875q1.390625 1.03125 1.9375 2.90625l-1.765625 0.421875q-0.46875 -1.484375 -1.375 -2.15625q-0.90625 -0.6875 -2.265625 -0.6875q-1.5625 0 -2.625 0.75q-1.046875 0.75 -1.484375 2.03125q-0.421875 1.265625 -0.421875 2.609375q0 1.734375 0.5 3.03125q0.515625 1.28125 1.578125 1.921875q1.078125 0.640625 2.3125 0.640625q1.515625 0 2.5625 -0.859375q1.046875 -0.875 1.421875 -2.59375zm2.9260864 -0.15625q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281982 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375702 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm17.125702 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547607 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm8.277039 -1.671875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.500732 5.875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375z" fill-rule="nonzero"></path><path fill="#000000" d="m336.6339 463.92526q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm11.3376465 -5.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm1.5944824 -1.953125l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm19.047607 -6.703125l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.059021 4.40625l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm15.277039 -11.8125l0 -1.609375l8.796875 0l0 1.296875q-1.296875 1.375 -2.578125 3.671875q-1.265625 2.296875 -1.96875 4.71875q-0.5 1.703125 -0.640625 3.734375l-1.71875 0q0.03125 -1.609375 0.625 -3.875q0.609375 -2.28125 1.734375 -4.390625q1.140625 -2.109375 2.40625 -3.546875l-6.65625 0zm11.813232 15.8125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m286.0 485.50656l166.01575 0l0 41.984222l-166.01575 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m286.0 485.50656l166.01575 0l0 41.984222l-166.01575 0z" fill-rule="evenodd"></path><path fill="#000000" d="m300.7158 505.80157q0 -3.390625 1.8125 -5.296875q1.828125 -1.921875 4.703125 -1.921875q1.875 0 3.390625 0.90625q1.515625 0.890625 2.296875 2.5q0.796875 1.609375 0.796875 3.65625q0 2.0625 -0.84375 3.703125q-0.828125 1.625 -2.359375 2.46875q-1.53125 0.84375 -3.296875 0.84375q-1.921875 0 -3.4375 -0.921875q-1.5 -0.9375 -2.28125 -2.53125q-0.78125 -1.609375 -0.78125 -3.40625zm1.859375 0.03125q0 2.453125 1.3125 3.875q1.328125 1.40625 3.3125 1.40625q2.03125 0 3.34375 -1.421875q1.3125 -1.4375 1.3125 -4.0625q0 -1.65625 -0.5625 -2.890625q-0.546875 -1.234375 -1.640625 -1.921875q-1.078125 -0.6875 -2.421875 -0.6875q-1.90625 0 -3.28125 1.3125q-1.375 1.3125 -1.375 4.390625zm19.43332 6.59375l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.578827 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5270691 5.28125l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm15.313202 4.875l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.578827 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm9.897888 5.5q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.3532715 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.2542114 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.074646 -2.125l-8.96875 0l0 -1.5625l8.96875 0l0 1.5625zm0 4.125l-8.96875 0l0 -1.546875l8.96875 0l0 1.546875zm6.953247 -7.9375l0 -1.609375l8.796875 0l0 1.296875q-1.296875 1.375 -2.578125 3.671875q-1.265625 2.296875 -1.96875 4.71875q-0.5 1.703125 -0.640625 3.734375l-1.71875 0q0.03125 -1.609375 0.625 -3.875q0.609375 -2.28125 1.734375 -4.390625q1.140625 -2.109375 2.40625 -3.546875l-6.65625 0zm11.813232 15.8125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m294.0 187.5l150.01575 0l0 58.992126l-150.01575 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m294.0 187.5l150.01575 0l0 58.992126l-150.01575 0z" fill-rule="evenodd"></path><path fill="#000000" d="m305.7563 214.42l0 -13.59375l9.171875 0l0 1.59375l-7.375 0l0 4.21875l6.375 0l0 1.609375l-6.375 0l0 6.171875l-1.796875 0zm17.536621 0l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.8913574 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.144806 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.097931 3.796875l-0.171875 -1.5625q0.546875 0.140625 0.953125 0.140625q0.546875 0 0.875 -0.1875q0.34375 -0.1875 0.5625 -0.515625q0.15625 -0.25 0.5 -1.25q0.046875 -0.140625 0.15625 -0.40625l-3.734375 -9.875l1.796875 0l2.046875 5.71875q0.40625 1.078125 0.71875 2.28125q0.28125 -1.15625 0.6875 -2.25l2.09375 -5.75l1.671875 0l-3.75 10.03125q-0.59375 1.625 -0.9375 2.234375q-0.4375 0.828125 -1.015625 1.203125q-0.578125 0.390625 -1.375 0.390625q-0.484375 0 -1.078125 -0.203125zm19.328125 -8.5625l1.796875 0.453125q-0.5625 2.21875 -2.03125 3.390625q-1.46875 1.15625 -3.59375 1.15625q-2.203125 0 -3.578125 -0.890625q-1.375 -0.90625 -2.09375 -2.59375q-0.71875 -1.703125 -0.71875 -3.65625q0 -2.125 0.796875 -3.703125q0.8125 -1.578125 2.3125 -2.390625q1.5 -0.828125 3.296875 -0.828125q2.046875 0 3.4375 1.046875q1.390625 1.03125 1.9375 2.90625l-1.765625 0.421875q-0.46875 -1.484375 -1.375 -2.15625q-0.90625 -0.6875 -2.265625 -0.6875q-1.5625 0 -2.625 0.75q-1.046875 0.75 -1.484375 2.03125q-0.421875 1.265625 -0.421875 2.609375q0 1.734375 0.5 3.03125q0.515625 1.28125 1.578125 1.921875q1.078125 0.640625 2.3125 0.640625q1.515625 0 2.5625 -0.859375q1.046875 -0.875 1.421875 -2.59375zm2.9260864 -0.15625q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281982 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375702 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm17.125702 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547607 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm8.277039 -1.671875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.500732 5.875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375z" fill-rule="nonzero"></path><path fill="#000000" d="m321.0703 240.42q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm8.853302 -4.0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.860107 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm7.891327 1.609375l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.750732 -10.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.059021 4.40625l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm15.152039 -3.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm10.500732 -8.25l0 -1.609375l8.796875 0l0 1.296875q-1.296875 1.375 -2.578125 3.671875q-1.265625 2.296875 -1.96875 4.71875q-0.5 1.703125 -0.640625 3.734375l-1.71875 0q0.03125 -1.609375 0.625 -3.875q0.609375 -2.28125 1.734375 -4.390625q1.140625 -2.109375 2.40625 -3.546875l-6.65625 0zm12.828827 4.4375q-1.046875 -0.375 -1.546875 -1.078125q-0.5 -0.71875 -0.5 -1.703125q0 -1.484375 1.0625 -2.484375q1.078125 -1.015625 2.84375 -1.015625q1.78125 0 2.859375 1.03125q1.09375 1.03125 1.09375 2.515625q0 0.953125 -0.5 1.65625q-0.484375 0.703125 -1.5 1.078125q1.25 0.40625 1.90625 1.3125q0.65625 0.90625 0.65625 2.171875q0 1.75 -1.234375 2.9375q-1.234375 1.1875 -3.25 1.1875q-2.015625 0 -3.25 -1.1875q-1.234375 -1.203125 -1.234375 -2.984375q0 -1.328125 0.671875 -2.21875q0.671875 -0.890625 1.921875 -1.21875zm-0.328125 -2.828125q0 0.96875 0.609375 1.578125q0.625 0.609375 1.625 0.609375q0.953125 0 1.5625 -0.609375q0.625 -0.609375 0.625 -1.484375q0 -0.921875 -0.640625 -1.546875q-0.625 -0.625 -1.578125 -0.625q-0.953125 0 -1.578125 0.609375q-0.625 0.609375 -0.625 1.46875zm-0.546875 6.28125q0 0.71875 0.328125 1.390625q0.34375 0.65625 1.015625 1.03125q0.671875 0.359375 1.4375 0.359375q1.203125 0 1.984375 -0.765625q0.78125 -0.78125 0.78125 -1.96875q0 -1.203125 -0.8125 -1.984375q-0.796875 -0.796875 -2.0 -0.796875q-1.1875 0 -1.96875 0.78125q-0.765625 0.78125 -0.765625 1.953125zm10.235107 7.921875l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m294.0 262.00262l150.01575 0l0 58.992126l-150.01575 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m294.0 262.00262l150.01575 0l0 58.992126l-150.01575 0z" fill-rule="evenodd"></path><path fill="#000000" d="m305.7563 288.92264l0 -13.59375l9.171875 0l0 1.59375l-7.375 0l0 4.21875l6.375 0l0 1.609375l-6.375 0l0 6.171875l-1.796875 0zm17.536621 0l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.8913574 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.144806 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.097931 3.796875l-0.171875 -1.5625q0.546875 0.140625 0.953125 0.140625q0.546875 0 0.875 -0.1875q0.34375 -0.1875 0.5625 -0.515625q0.15625 -0.25 0.5 -1.25q0.046875 -0.140625 0.15625 -0.40625l-3.734375 -9.875l1.796875 0l2.046875 5.71875q0.40625 1.078125 0.71875 2.28125q0.28125 -1.15625 0.6875 -2.25l2.09375 -5.75l1.671875 0l-3.75 10.03125q-0.59375 1.625 -0.9375 2.234375q-0.4375 0.828125 -1.015625 1.203125q-0.578125 0.390625 -1.375 0.390625q-0.484375 0 -1.078125 -0.203125zm19.328125 -8.5625l1.796875 0.453125q-0.5625 2.21875 -2.03125 3.390625q-1.46875 1.15625 -3.59375 1.15625q-2.203125 0 -3.578125 -0.890625q-1.375 -0.90625 -2.09375 -2.59375q-0.71875 -1.703125 -0.71875 -3.65625q0 -2.125 0.796875 -3.703125q0.8125 -1.578125 2.3125 -2.390625q1.5 -0.828125 3.296875 -0.828125q2.046875 0 3.4375 1.046875q1.390625 1.03125 1.9375 2.90625l-1.765625 0.421875q-0.46875 -1.484375 -1.375 -2.15625q-0.90625 -0.6875 -2.265625 -0.6875q-1.5625 0 -2.625 0.75q-1.046875 0.75 -1.484375 2.03125q-0.421875 1.265625 -0.421875 2.609375q0 1.734375 0.5 3.03125q0.515625 1.28125 1.578125 1.921875q1.078125 0.640625 2.3125 0.640625q1.515625 0 2.5625 -0.859375q1.046875 -0.875 1.421875 -2.59375zm2.9260864 -0.15625q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281982 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375702 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm17.125702 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547607 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm8.277039 -1.671875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.500732 5.875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375z" fill-rule="nonzero"></path><path fill="#000000" d="m326.25818 314.92264q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.556427 -7.5625l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm10.500702 -8.25l0 -1.609375l8.796875 0l0 1.296875q-1.296875 1.375 -2.578125 3.671875q-1.265625 2.296875 -1.96875 4.71875q-0.5 1.703125 -0.640625 3.734375l-1.71875 0q0.03125 -1.609375 0.625 -3.875q0.609375 -2.28125 1.734375 -4.390625q1.140625 -2.109375 2.40625 -3.546875l-6.65625 0zm12.828857 4.4375q-1.046875 -0.375 -1.546875 -1.078125q-0.5 -0.71875 -0.5 -1.703125q0 -1.484375 1.0625 -2.484375q1.078125 -1.015625 2.84375 -1.015625q1.78125 0 2.859375 1.03125q1.09375 1.03125 1.09375 2.515625q0 0.953125 -0.5 1.65625q-0.484375 0.703125 -1.5 1.078125q1.25 0.40625 1.90625 1.3125q0.65625 0.90625 0.65625 2.171875q0 1.75 -1.234375 2.9375q-1.234375 1.1875 -3.25 1.1875q-2.015625 0 -3.25 -1.1875q-1.234375 -1.203125 -1.234375 -2.984375q0 -1.328125 0.671875 -2.21875q0.671875 -0.890625 1.921875 -1.21875zm-0.328125 -2.828125q0 0.96875 0.609375 1.578125q0.625 0.609375 1.625 0.609375q0.953125 0 1.5625 -0.609375q0.625 -0.609375 0.625 -1.484375q0 -0.921875 -0.640625 -1.546875q-0.625 -0.625 -1.578125 -0.625q-0.953125 0 -1.578125 0.609375q-0.625 0.609375 -0.625 1.46875zm-0.546875 6.28125q0 0.71875 0.328125 1.390625q0.34375 0.65625 1.015625 1.03125q0.671875 0.359375 1.4375 0.359375q1.203125 0 1.984375 -0.765625q0.78125 -0.78125 0.78125 -1.96875q0 -1.203125 -0.8125 -1.984375q-0.796875 -0.796875 -2.0 -0.796875q-1.1875 0 -1.96875 0.78125q-0.765625 0.78125 -0.765625 1.953125zm13.215271 3.921875l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm23.933289 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm1.5944824 -1.953125l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm19.047607 -6.703125l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm10.078827 8.40625l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m294.0 336.50394l150.01575 0l0 58.992126l-150.01575 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m294.0 336.50394l150.01575 0l0 58.992126l-150.01575 0z" fill-rule="evenodd"></path><path fill="#000000" d="m305.7563 363.42395l0 -13.59375l9.171875 0l0 1.59375l-7.375 0l0 4.21875l6.375 0l0 1.609375l-6.375 0l0 6.171875l-1.796875 0zm17.536621 0l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.8913574 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.144806 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.097931 3.796875l-0.171875 -1.5625q0.546875 0.140625 0.953125 0.140625q0.546875 0 0.875 -0.1875q0.34375 -0.1875 0.5625 -0.515625q0.15625 -0.25 0.5 -1.25q0.046875 -0.140625 0.15625 -0.40625l-3.734375 -9.875l1.796875 0l2.046875 5.71875q0.40625 1.078125 0.71875 2.28125q0.28125 -1.15625 0.6875 -2.25l2.09375 -5.75l1.671875 0l-3.75 10.03125q-0.59375 1.625 -0.9375 2.234375q-0.4375 0.828125 -1.015625 1.203125q-0.578125 0.390625 -1.375 0.390625q-0.484375 0 -1.078125 -0.203125zm19.328125 -8.5625l1.796875 0.453125q-0.5625 2.21875 -2.03125 3.390625q-1.46875 1.15625 -3.59375 1.15625q-2.203125 0 -3.578125 -0.890625q-1.375 -0.90625 -2.09375 -2.59375q-0.71875 -1.703125 -0.71875 -3.65625q0 -2.125 0.796875 -3.703125q0.8125 -1.578125 2.3125 -2.390625q1.5 -0.828125 3.296875 -0.828125q2.046875 0 3.4375 1.046875q1.390625 1.03125 1.9375 2.90625l-1.765625 0.421875q-0.46875 -1.484375 -1.375 -2.15625q-0.90625 -0.6875 -2.265625 -0.6875q-1.5625 0 -2.625 0.75q-1.046875 0.75 -1.484375 2.03125q-0.421875 1.265625 -0.421875 2.609375q0 1.734375 0.5 3.03125q0.515625 1.28125 1.578125 1.921875q1.078125 0.640625 2.3125 0.640625q1.515625 0 2.5625 -0.859375q1.046875 -0.875 1.421875 -2.59375zm2.9260864 -0.15625q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281982 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375702 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm17.125702 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547607 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm8.277039 -1.671875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.500732 5.875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375z" fill-rule="nonzero"></path><path fill="#000000" d="m326.25818 389.42395q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm11.337677 -5.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm1.5944519 -1.953125l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm19.047607 -6.703125l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.059021 4.40625l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm23.933289 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm1.5944824 -1.953125l1.765625 -0.15625q0.1875 1.28125 0.890625 1.9375q0.71875 0.640625 1.71875 0.640625q1.203125 0 2.03125 -0.90625q0.84375 -0.90625 0.84375 -2.421875q0 -1.421875 -0.8125 -2.25q-0.796875 -0.828125 -2.09375 -0.828125q-0.796875 0 -1.453125 0.375q-0.640625 0.359375 -1.015625 0.953125l-1.578125 -0.203125l1.328125 -7.0l6.765625 0l0 1.609375l-5.4375 0l-0.734375 3.640625q1.234375 -0.84375 2.578125 -0.84375q1.78125 0 3.0 1.234375q1.234375 1.234375 1.234375 3.171875q0 1.84375 -1.078125 3.1875q-1.3125 1.65625 -3.578125 1.65625q-1.859375 0 -3.03125 -1.03125q-1.171875 -1.046875 -1.34375 -2.765625zm19.047607 -6.703125l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm10.078827 8.40625l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m369.00787 46.984253l0 23.244095" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m369.00787 46.984253l0 17.244095" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m367.35614 64.22835l1.6517334 4.5380936l1.6517334 -4.5380936z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m369.00787 163.02231l0 24.472443" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m369.00787 163.02231l0 18.472443" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m367.35614 181.49475l1.6517334 4.538101l1.6517334 -4.538101z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m369.00787 246.50656l0 15.496063" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m369.00787 246.50656l0 9.496063" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m367.35614 256.00262l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m369.00787 320.99475l0 15.496063" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m369.00787 320.99475l0 9.496063" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m367.35614 330.4908l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m369.00787 395.49606l0 15.496063" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m369.00787 395.49606l0 9.496063" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m367.35614 404.99213l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m369.00787 470.0105l0 15.496063" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m369.00787 470.0105l0 9.496063" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m367.35614 479.50656l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path></g></svg>
-
diff --git a/tensorflow/lite/models/testdata/g3doc/speakerid.svg b/tensorflow/lite/models/testdata/g3doc/speakerid.svg
deleted file mode 100755
index dbe4312..0000000
--- a/tensorflow/lite/models/testdata/g3doc/speakerid.svg
+++ /dev/null
@@ -1,4 +0,0 @@
-<?xml version="1.0" standalone="yes"?>
-
-<svg version="1.1" viewBox="0.0 0.0 720.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="p.0"><path d="m0 0l720.0 0l0 540.0l-720.0 0l0 -540.0z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#p.0)"><path fill="#000000" fill-opacity="0.0" d="m0 0l720.0 0l0 540.0l-720.0 0z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m287.0 39.0l166.01575 0l0 41.984253l-166.01575 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m287.0 39.0l166.01575 0l0 41.984253l-166.01575 0z" fill-rule="evenodd"></path><path fill="#000000" d="m304.62738 65.92l0 -13.59375l1.8125 0l0 13.59375l-1.8125 0zm4.667694 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375702 3.78125l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm15.313232 4.875l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.5788574 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm9.897827 5.5q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.3533325 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.254181 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.074646 -2.125l-8.96875 0l0 -1.5625l8.96875 0l0 1.5625zm0 4.125l-8.96875 0l0 -1.546875l8.96875 0l0 1.546875zm9.406403 -3.5q-1.046875 -0.375 -1.546875 -1.078125q-0.5 -0.71875 -0.5 -1.703125q0 -1.484375 1.0625 -2.484375q1.078125 -1.015625 2.84375 -1.015625q1.78125 0 2.859375 1.03125q1.09375 1.03125 1.09375 2.515625q0 0.953125 -0.5 1.65625q-0.484375 0.703125 -1.5 1.078125q1.25 0.40625 1.90625 1.3125q0.65625 0.90625 0.65625 2.171875q0 1.75 -1.234375 2.9375q-1.234375 1.1875 -3.25 1.1875q-2.015625 0 -3.25 -1.1875q-1.234375 -1.203125 -1.234375 -2.984375q0 -1.328125 0.671875 -2.21875q0.671875 -0.890625 1.921875 -1.21875zm-0.328125 -2.828125q0 0.96875 0.609375 1.578125q0.625 0.609375 1.625 0.609375q0.953125 0 1.5625 -0.609375q0.625 -0.609375 0.625 -1.484375q0 -0.921875 -0.640625 -1.546875q-0.625 -0.625 -1.578125 -0.625q-0.953125 0 -1.578125 0.609375q-0.625 0.609375 -0.625 1.46875zm-0.546875 6.28125q0 0.71875 0.328125 1.390625q0.34375 0.65625 1.015625 1.03125q0.671875 0.359375 1.4375 0.359375q1.203125 0 1.984375 -0.765625q0.78125 -0.78125 0.78125 -1.96875q0 -1.203125 -0.8125 -1.984375q-0.796875 -0.796875 -2.0 -0.796875q-1.1875 0 -1.96875 0.78125q-0.765625 0.78125 -0.765625 1.953125zm8.672577 -2.78125q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm10.219452 10.703125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m295.0 111.0l150.01575 0l0 41.984253l-150.01575 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m295.0 111.0l150.01575 0l0 41.984253l-150.01575 0z" fill-rule="evenodd"></path><path fill="#000000" d="m307.1128 137.92l0 -13.59375l1.796875 0l0 11.984375l6.703125 0l0 1.609375l-8.5 0zm9.844452 -4.375l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.506073 4.375l0 -12.0l-4.46875 0l0 -1.59375l10.765625 0l0 1.59375l-4.5 0l0 12.0l-1.796875 0zm7.880371 0l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm21.212646 0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.918396 4.0q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm5.1345825 -11.375q-1.046875 -0.375 -1.546875 -1.078125q-0.5 -0.71875 -0.5 -1.703125q0 -1.484375 1.0625 -2.484375q1.078125 -1.015625 2.84375 -1.015625q1.78125 0 2.859375 1.03125q1.09375 1.03125 1.09375 2.515625q0 0.953125 -0.5 1.65625q-0.484375 0.703125 -1.5 1.078125q1.25 0.40625 1.90625 1.3125q0.65625 0.90625 0.65625 2.171875q0 1.75 -1.234375 2.9375q-1.234375 1.1875 -3.25 1.1875q-2.015625 0 -3.25 -1.1875q-1.234375 -1.203125 -1.234375 -2.984375q0 -1.328125 0.671875 -2.21875q0.671875 -0.890625 1.921875 -1.21875zm-0.328125 -2.828125q0 0.96875 0.609375 1.578125q0.625 0.609375 1.625 0.609375q0.953125 0 1.5625 -0.609375q0.625 -0.609375 0.625 -1.484375q0 -0.921875 -0.640625 -1.546875q-0.625 -0.625 -1.578125 -0.625q-0.953125 0 -1.578125 0.609375q-0.625 0.609375 -0.625 1.46875zm-0.546875 6.28125q0 0.71875 0.328125 1.390625q0.34375 0.65625 1.015625 1.03125q0.671875 0.359375 1.4375 0.359375q1.203125 0 1.984375 -0.765625q0.78125 -0.78125 0.78125 -1.96875q0 -1.203125 -0.8125 -1.984375q-0.796875 -0.796875 -2.0 -0.796875q-1.1875 0 -1.96875 0.78125q-0.765625 0.78125 -0.765625 1.953125zm8.672577 -2.78125q0 -2.421875 0.5 -3.890625q0.5 -1.46875 1.46875 -2.265625q0.984375 -0.796875 2.46875 -0.796875q1.09375 0 1.921875 0.4375q0.828125 0.4375 1.359375 1.28125q0.546875 0.828125 0.84375 2.015625q0.3125 1.1875 0.3125 3.21875q0 2.390625 -0.5 3.859375q-0.484375 1.46875 -1.46875 2.28125q-0.96875 0.796875 -2.46875 0.796875q-1.96875 0 -3.078125 -1.40625q-1.359375 -1.703125 -1.359375 -5.53125zm1.71875 0q0 3.34375 0.78125 4.453125q0.796875 1.109375 1.9375 1.109375q1.15625 0 1.9375 -1.109375q0.78125 -1.125 0.78125 -4.453125q0 -3.359375 -0.78125 -4.46875q-0.78125 -1.109375 -1.953125 -1.109375q-1.15625 0 -1.828125 0.984375q-0.875 1.234375 -0.875 4.59375zm8.016327 6.703125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm18.640625 -10.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875732 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm6.5788574 8.78125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m295.0 183.0l150.01575 0l0 41.984253l-150.01575 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m295.0 183.0l150.01575 0l0 41.984253l-150.01575 0z" fill-rule="evenodd"></path><path fill="#000000" d="m307.1128 209.92l0 -13.59375l1.796875 0l0 11.984375l6.703125 0l0 1.609375l-8.5 0zm9.844452 -4.375l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.506073 4.375l0 -12.0l-4.46875 0l0 -1.59375l10.765625 0l0 1.59375l-4.5 0l0 12.0l-1.796875 0zm7.880371 0l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm23.697021 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm10.434021 5.609375q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm11.2283325 -14.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875702 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm4.375702 4.78125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm18.640625 -10.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875732 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm6.5788574 8.78125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m295.0 255.0l150.01575 0l0 41.984253l-150.01575 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m295.0 255.0l150.01575 0l0 41.984253l-150.01575 0z" fill-rule="evenodd"></path><path fill="#000000" d="m307.1128 281.91998l0 -13.59375l1.796875 0l0 11.984375l6.703125 0l0 1.609375l-8.5 0zm9.844452 -4.375l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.506073 4.375l0 -12.0l-4.46875 0l0 -1.59375l10.765625 0l0 1.59375l-4.5 0l0 12.0l-1.796875 0zm7.880371 0l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm14.9313965 -3.59375l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm19.199646 7.59375q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm11.2283325 -14.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875702 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm4.375702 4.78125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm18.640625 -10.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875732 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm6.5788574 8.78125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m295.0 327.0l150.01575 0l0 58.992126l-150.01575 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m295.0 327.0l150.01575 0l0 58.992126l-150.01575 0z" fill-rule="evenodd"></path><path fill="#000000" d="m306.7563 353.91998l0 -13.59375l9.171875 0l0 1.59375l-7.375 0l0 4.21875l6.375 0l0 1.609375l-6.375 0l0 6.171875l-1.796875 0zm17.536621 0l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.8913574 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.144806 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.097931 3.796875l-0.171875 -1.5625q0.546875 0.140625 0.953125 0.140625q0.546875 0 0.875 -0.1875q0.34375 -0.1875 0.5625 -0.515625q0.15625 -0.25 0.5 -1.25q0.046875 -0.140625 0.15625 -0.40625l-3.734375 -9.875l1.796875 0l2.046875 5.71875q0.40625 1.078125 0.71875 2.28125q0.28125 -1.15625 0.6875 -2.25l2.09375 -5.75l1.671875 0l-3.75 10.03125q-0.59375 1.625 -0.9375 2.234375q-0.4375 0.828125 -1.015625 1.203125q-0.578125 0.390625 -1.375 0.390625q-0.484375 0 -1.078125 -0.203125zm19.328125 -8.5625l1.796875 0.453125q-0.5625 2.21875 -2.03125 3.390625q-1.46875 1.15625 -3.59375 1.15625q-2.203125 0 -3.578125 -0.890625q-1.375 -0.90625 -2.09375 -2.59375q-0.71875 -1.703125 -0.71875 -3.65625q0 -2.125 0.796875 -3.703125q0.8125 -1.578125 2.3125 -2.390625q1.5 -0.828125 3.296875 -0.828125q2.046875 0 3.4375 1.046875q1.390625 1.03125 1.9375 2.90625l-1.765625 0.421875q-0.46875 -1.484375 -1.375 -2.15625q-0.90625 -0.6875 -2.265625 -0.6875q-1.5625 0 -2.625 0.75q-1.046875 0.75 -1.484375 2.03125q-0.421875 1.265625 -0.421875 2.609375q0 1.734375 0.5 3.03125q0.515625 1.28125 1.578125 1.921875q1.078125 0.640625 2.3125 0.640625q1.515625 0 2.5625 -0.859375q1.046875 -0.875 1.421875 -2.59375zm2.9260864 -0.15625q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281982 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375702 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm17.125702 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547607 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm8.277039 -1.671875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.500732 5.875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375z" fill-rule="nonzero"></path><path fill="#000000" d="m342.8172 379.91998q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm11.228302 -14.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875702 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm4.3757324 4.78125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm18.640625 -10.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875732 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm6.578827 8.78125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m282.0 416.00787l177.19684 0l0 41.984253l-177.19684 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m282.0 416.00787l177.19684 0l0 41.984253l-177.19684 0z" fill-rule="evenodd"></path><path fill="#000000" d="m297.11847 436.3029q0 -3.390625 1.8125 -5.296875q1.828125 -1.921875 4.703125 -1.921875q1.875 0 3.390625 0.90625q1.515625 0.890625 2.296875 2.5q0.796875 1.609375 0.796875 3.65625q0 2.0625 -0.84375 3.703125q-0.828125 1.625 -2.359375 2.46875q-1.53125 0.84375 -3.296875 0.84375q-1.921875 0 -3.4375 -0.921875q-1.5 -0.9375 -2.28125 -2.53125q-0.78125 -1.609375 -0.78125 -3.40625zm1.859375 0.03125q0 2.453125 1.3125 3.875q1.328125 1.40625 3.3125 1.40625q2.03125 0 3.34375 -1.421875q1.3125 -1.4375 1.3125 -4.0625q0 -1.65625 -0.5625 -2.890625q-0.546875 -1.234375 -1.640625 -1.921875q-1.078125 -0.6875 -2.421875 -0.6875q-1.90625 0 -3.28125 1.3125q-1.375 1.3125 -1.375 4.390625zm19.43332 6.59375l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.578827 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5270691 5.28125l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm15.313202 4.875l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.5788574 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm9.897858 5.5q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.353302 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.2542114 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.074646 -2.125l-8.96875 0l0 -1.5625l8.96875 0l0 1.5625zm0 4.125l-8.96875 0l0 -1.546875l8.96875 0l0 1.546875zm15.500122 -6.390625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875732 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm6.578827 8.78125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m370.00787 80.98425l0 30.015747" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m370.00787 80.98425l0 24.015747" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m368.35614 105.0l1.6517334 4.538101l1.6517334 -4.538101z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m370.00787 152.98425l0 30.015747" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m370.00787 152.98425l0 24.015747" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m368.35614 177.0l1.6517334 4.538101l1.6517334 -4.538101z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m370.00787 224.98425l0 30.015747" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m370.00787 224.98425l0 24.015747" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m368.35614 249.0l1.6517334 4.538101l1.6517334 -4.538101z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m370.00787 296.98425l0 30.015747" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m370.00787 296.98425l0 24.015747" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m368.35614 321.0l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m370.00787 385.99213l0.5984192 30.015747" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m370.00787 385.99213l0.47885132 24.016937" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m368.8353 410.042l1.7418518 4.5042725l1.5609436 -4.5701294z" fill-rule="evenodd"></path></g></svg>
-
diff --git a/tensorflow/lite/models/testdata/g3doc/tts.svg b/tensorflow/lite/models/testdata/g3doc/tts.svg
deleted file mode 100755
index 9664b78..0000000
--- a/tensorflow/lite/models/testdata/g3doc/tts.svg
+++ /dev/null
@@ -1,4 +0,0 @@
-<?xml version="1.0" standalone="yes"?>
-
-<svg version="1.1" viewBox="0.0 0.0 720.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="p.0"><path d="m0 0l720.0 0l0 540.0l-720.0 0l0 -540.0z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#p.0)"><path fill="#000000" fill-opacity="0.0" d="m0 0l720.0 0l0 540.0l-720.0 0z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m264.0 14.7l166.01575 0l0 41.984253l-166.01575 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m264.0 14.7l166.01575 0l0 41.984253l-166.01575 0z" fill-rule="evenodd"></path><path fill="#000000" d="m276.43954 41.62l0 -13.59375l1.8125 0l0 13.59375l-1.8125 0zm4.6676636 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375732 3.78125l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm15.313202 4.875l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.5788574 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm9.897858 5.5q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.353302 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.254181 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.074646 -2.125l-8.96875 0l0 -1.5625l8.96875 0l0 1.5625zm0 4.125l-8.96875 0l0 -1.546875l8.96875 0l0 1.546875zm6.8439026 0.28125l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm10.375702 0l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm15.719482 3.59375l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm6.578827 8.78125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m264.0 238.01575l168.0 0l0 41.984253l-168.0 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m264.0 238.01575l168.0 0l0 41.984253l-168.0 0z" fill-rule="evenodd"></path><path fill="#000000" d="m279.91705 264.93576l0 -13.593765l1.796875 0l0 11.98439l6.703125 0l0 1.609375l-8.5 0zm9.844452 -4.375l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75001526 -0.46875 -1.6875153q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46876526 2.703125 0.96876526q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.506073 4.375l0 -12.000015l-4.46875 0l0 -1.59375l10.765625 0l0 1.59375l-4.5 0l0 12.000015l-1.796875 0zm7.880371 0l0 -13.593765l2.71875 0l3.21875 9.625015q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.45314l2.421875 0l0 13.593765l-1.734375 0l0 -11.39064l-3.953125 11.39064l-1.625 0l-3.9375 -11.57814l0 11.57814l-1.734375 0zm21.212677 0l-1.671875 0l0 -10.64064q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.56251526 -1.765625 0.85939026l0 -1.6250153q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.656265zm12.918396 4.0q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.2343903q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.6718903q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm8.853302 -4.0l-1.671875 0l0 -10.64064q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.56251526 -1.765625 0.85939026l0 -1.6250153q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.656265zm12.860077 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625153 0.78125 -2.0156403q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.6093903q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm4.1726074 -5.765625q-1.046875 -0.375 -1.546875 -1.078125q-0.5 -0.71876526 -0.5 -1.7031403q0 -1.484375 1.0625 -2.484375q1.078125 -1.015625 2.84375 -1.015625q1.78125 0 2.859375 1.03125q1.09375 1.03125 1.09375 2.515625q0 0.953125 -0.5 1.6562653q-0.484375 0.703125 -1.5 1.078125q1.25 0.40625 1.90625 1.3125q0.65625 0.90625 0.65625 2.171875q0 1.75 -1.234375 2.9375q-1.234375 1.1875 -3.25 1.1875q-2.015625 0 -3.25 -1.1875q-1.234375 -1.203125 -1.234375 -2.984375q0 -1.328125 0.671875 -2.21875q0.671875 -0.890625 1.921875 -1.21875zm-0.328125 -2.8281403q0 0.96875 0.609375 1.5781403q0.625 0.609375 1.625 0.609375q0.953125 0 1.5625 -0.609375q0.625 -0.60939026 0.625 -1.4843903q0 -0.921875 -0.640625 -1.546875q-0.625 -0.625 -1.578125 -0.625q-0.953125 0 -1.578125 0.609375q-0.625 0.609375 -0.625 1.46875zm-0.546875 6.2812653q0 0.71875 0.328125 1.390625q0.34375 0.65625 1.015625 1.03125q0.671875 0.359375 1.4375 0.359375q1.203125 0 1.984375 -0.765625q0.78125 -0.78125 0.78125 -1.96875q0 -1.203125 -0.8125 -1.984375q-0.796875 -0.796875 -2.0 -0.796875q-1.1875 0 -1.96875 0.78125q-0.765625 0.78125 -0.765625 1.953125zm8.031952 3.921875l3.59375 -5.125l-3.328125 -4.7343903l2.09375 0l1.515625 2.3125153q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.3281403l1.984375 0l-3.390625 4.6406403l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm18.640625 -10.26564l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.1406403q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.9531403 1.453125 -5.7343903q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.8593903q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875732 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.812515l1.359375 0l0 8.812515l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.1406403l-4.25 6.1406403l4.25 0zm6.578827 8.78125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.3906403 -0.890625 -2.6718903q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.0156403 0.71875 4.2343903q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m264.0 296.0l168.0 0l0 41.984253l-168.0 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m264.0 296.0l168.0 0l0 41.984253l-168.0 0z" fill-rule="evenodd"></path><path fill="#000000" d="m285.10492 322.91998l0 -13.59375l1.796875 0l0 11.984375l6.703125 0l0 1.609375l-8.5 0zm9.844452 -4.375l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.506073 4.375l0 -12.0l-4.46875 0l0 -1.59375l10.765625 0l0 1.59375l-4.5 0l0 12.0l-1.796875 0zm7.880371 0l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm23.697021 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm10.434021 5.609375q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm11.2283325 -14.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875702 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm4.375702 4.78125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm18.640625 -10.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875732 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm6.5788574 8.78125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m264.0 358.1l168.0 0l0 41.984253l-168.0 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m264.0 358.1l168.0 0l0 41.984253l-168.0 0z" fill-rule="evenodd"></path><path fill="#000000" d="m285.10492 385.02l0 -13.59375l1.796875 0l0 11.984375l6.703125 0l0 1.609375l-8.5 0zm9.844452 -4.375l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.506073 4.375l0 -12.0l-4.46875 0l0 -1.59375l10.765625 0l0 1.59375l-4.5 0l0 12.0l-1.796875 0zm7.880371 0l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm14.9313965 -3.59375l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm19.199646 7.59375q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm11.2283325 -14.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875702 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm4.375702 4.78125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm18.640625 -10.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875732 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm6.5788574 8.78125l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m264.0 78.7l168.0 0l0 58.992126l-168.0 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m264.0 78.7l168.0 0l0 58.992126l-168.0 0z" fill-rule="evenodd"></path><path fill="#000000" d="m279.56058 105.619995l0 -13.59375l9.171875 0l0 1.59375l-7.375 0l0 4.21875l6.375 0l0 1.609375l-6.375 0l0 6.171875l-1.796875 0zm17.53659 0l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.8913574 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.144806 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.0979614 3.796875l-0.171875 -1.5625q0.546875 0.140625 0.953125 0.140625q0.546875 0 0.875 -0.1875q0.34375 -0.1875 0.5625 -0.515625q0.15625 -0.25 0.5 -1.25q0.046875 -0.140625 0.15625 -0.40625l-3.734375 -9.875l1.796875 0l2.046875 5.71875q0.40625 1.078125 0.71875 2.28125q0.28125 -1.15625 0.6875 -2.25l2.09375 -5.75l1.671875 0l-3.75 10.03125q-0.59375 1.625 -0.9375 2.234375q-0.4375 0.828125 -1.015625 1.203125q-0.578125 0.390625 -1.375 0.390625q-0.484375 0 -1.078125 -0.203125zm19.328125 -8.5625l1.796875 0.453125q-0.5625 2.21875 -2.03125 3.390625q-1.46875 1.15625 -3.59375 1.15625q-2.203125 0 -3.578125 -0.890625q-1.375 -0.90625 -2.09375 -2.59375q-0.71875 -1.703125 -0.71875 -3.65625q0 -2.125 0.796875 -3.703125q0.8125 -1.578125 2.3125 -2.390625q1.5 -0.828125 3.296875 -0.828125q2.046875 0 3.4375 1.046875q1.390625 1.03125 1.9375 2.90625l-1.765625 0.421875q-0.46875 -1.484375 -1.375 -2.15625q-0.90625 -0.6875 -2.265625 -0.6875q-1.5625 0 -2.625 0.75q-1.046875 0.75 -1.484375 2.03125q-0.421875 1.265625 -0.421875 2.609375q0 1.734375 0.5 3.03125q0.515625 1.28125 1.578125 1.921875q1.078125 0.640625 2.3125 0.640625q1.515625 0 2.5625 -0.859375q1.046875 -0.875 1.421875 -2.59375zm2.926056 -0.15625q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281982 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375702 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm17.125732 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547577 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm8.277069 -1.671875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.500702 5.875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm15.094482 4.921875l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625z" fill-rule="nonzero"></path><path fill="#000000" d="m310.4336 131.62q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.5720825 -7.59375l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm10.375702 0l1.671875 -0.21875q0.28125 1.421875 0.96875 2.046875q0.703125 0.625 1.6875 0.625q1.1875 0 2.0 -0.8125q0.8125 -0.828125 0.8125 -2.03125q0 -1.140625 -0.765625 -1.890625q-0.75 -0.75 -1.90625 -0.75q-0.46875 0 -1.171875 0.1875l0.1875 -1.46875q0.15625 0.015625 0.265625 0.015625q1.0625 0 1.90625 -0.546875q0.859375 -0.5625 0.859375 -1.71875q0 -0.921875 -0.625 -1.515625q-0.609375 -0.609375 -1.59375 -0.609375q-0.96875 0 -1.625 0.609375q-0.640625 0.609375 -0.828125 1.84375l-1.671875 -0.296875q0.296875 -1.6875 1.375 -2.609375q1.09375 -0.921875 2.71875 -0.921875q1.109375 0 2.046875 0.484375q0.9375 0.46875 1.421875 1.296875q0.5 0.828125 0.5 1.75q0 0.890625 -0.46875 1.609375q-0.46875 0.71875 -1.40625 1.15625q1.21875 0.265625 1.875 1.15625q0.671875 0.875 0.671875 2.1875q0 1.78125 -1.296875 3.015625q-1.296875 1.234375 -3.28125 1.234375q-1.796875 0 -2.984375 -1.0625q-1.171875 -1.0625 -1.34375 -2.765625zm15.719452 3.59375l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm4.3757324 4.78125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm16.265625 0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.860107 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm4.172577 -5.765625q-1.046875 -0.375 -1.546875 -1.078125q-0.5 -0.71875 -0.5 -1.703125q0 -1.484375 1.0625 -2.484375q1.078125 -1.015625 2.84375 -1.015625q1.78125 0 2.859375 1.03125q1.09375 1.03125 1.09375 2.515625q0 0.953125 -0.5 1.65625q-0.484375 0.703125 -1.5 1.078125q1.25 0.40625 1.90625 1.3125q0.65625 0.90625 0.65625 2.171875q0 1.75 -1.234375 2.9375q-1.234375 1.1875 -3.25 1.1875q-2.015625 0 -3.25 -1.1875q-1.234375 -1.203125 -1.234375 -2.984375q0 -1.328125 0.671875 -2.21875q0.671875 -0.890625 1.921875 -1.21875zm-0.328125 -2.828125q0 0.96875 0.609375 1.578125q0.625 0.609375 1.625 0.609375q0.953125 0 1.5625 -0.609375q0.625 -0.609375 0.625 -1.484375q0 -0.921875 -0.640625 -1.546875q-0.625 -0.625 -1.578125 -0.625q-0.953125 0 -1.578125 0.609375q-0.625 0.609375 -0.625 1.46875zm-0.546875 6.28125q0 0.71875 0.328125 1.390625q0.34375 0.65625 1.015625 1.03125q0.671875 0.359375 1.4375 0.359375q1.203125 0 1.984375 -0.765625q0.78125 -0.78125 0.78125 -1.96875q0 -1.203125 -0.8125 -1.984375q-0.796875 -0.796875 -2.0 -0.796875q-1.1875 0 -1.96875 0.78125q-0.765625 0.78125 -0.765625 1.953125zm10.235077 7.921875l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m257.8 488.0l180.0 0l0 46.992126l-180.0 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m257.8 488.0l180.0 0l0 46.992126l-180.0 0z" fill-rule="evenodd"></path><path fill="#000000" d="m269.1322 508.29498q0 -3.390625 1.8125 -5.296875q1.828125 -1.921875 4.703125 -1.921875q1.875 0 3.390625 0.90625q1.515625 0.890625 2.296875 2.5q0.796875 1.609375 0.796875 3.65625q0 2.0625 -0.84375 3.703125q-0.828125 1.625 -2.359375 2.46875q-1.53125 0.84375 -3.296875 0.84375q-1.921875 0 -3.4375 -0.921875q-1.5 -0.9375 -2.28125 -2.53125q-0.78125 -1.609375 -0.78125 -3.40625zm1.859375 0.03125q0 2.453125 1.3125 3.875q1.328125 1.40625 3.3125 1.40625q2.03125 0 3.34375 -1.421875q1.3125 -1.4375 1.3125 -4.0625q0 -1.65625 -0.5625 -2.890625q-0.546875 -1.234375 -1.640625 -1.921875q-1.078125 -0.6875 -2.421875 -0.6875q-1.90625 0 -3.28125 1.3125q-1.375 1.3125 -1.375 4.390625zm19.433289 6.59375l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.5788574 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5270386 5.28125l0 -13.640625l1.53125 0l0 1.28125q0.53125 -0.75 1.203125 -1.125q0.6875 -0.375 1.640625 -0.375q1.265625 0 2.234375 0.65625q0.96875 0.640625 1.453125 1.828125q0.5 1.1875 0.5 2.59375q0 1.515625 -0.546875 2.734375q-0.546875 1.203125 -1.578125 1.84375q-1.03125 0.640625 -2.171875 0.640625q-0.84375 0 -1.515625 -0.34375q-0.65625 -0.359375 -1.078125 -0.890625l0 4.796875l-1.671875 0zm1.515625 -8.65625q0 1.90625 0.765625 2.8125q0.78125 0.90625 1.875 0.90625q1.109375 0 1.890625 -0.9375q0.796875 -0.9375 0.796875 -2.921875q0 -1.875 -0.78125 -2.8125q-0.765625 -0.9375 -1.84375 -0.9375q-1.0625 0 -1.890625 1.0q-0.8125 1.0 -0.8125 2.890625zm15.313232 4.875l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm7.578827 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm9.897858 5.5q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm2.353302 -6.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm10.015625 -8.75l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm3.2542114 0l0 -1.359375l6.265625 -7.1875q-1.0625 0.046875 -1.875 0.046875l-4.015625 0l0 -1.359375l8.046875 0l0 1.109375l-5.34375 6.25l-1.015625 1.140625q1.109375 -0.078125 2.09375 -0.078125l4.5625 0l0 1.4375l-8.71875 0zm16.953125 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm23.074646 -2.125l-8.96875 0l0 -1.5625l8.96875 0l0 1.5625zm0 4.125l-8.96875 0l0 -1.546875l8.96875 0l0 1.546875zm13.125122 3.875l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm4.3444824 -3.140625l1.59375 -0.15625q0.203125 1.140625 0.78125 1.65625q0.578125 0.5 1.484375 0.5q0.765625 0 1.34375 -0.34375q0.578125 -0.359375 0.953125 -0.953125q0.375 -0.59375 0.625 -1.59375q0.25 -1.0 0.25 -2.03125q0 -0.109375 -0.015625 -0.34375q-0.5 0.796875 -1.375 1.296875q-0.859375 0.5 -1.875 0.5q-1.6875 0 -2.859375 -1.21875q-1.171875 -1.234375 -1.171875 -3.234375q0 -2.078125 1.21875 -3.328125q1.234375 -1.265625 3.0625 -1.265625q1.328125 0 2.421875 0.71875q1.109375 0.703125 1.671875 2.03125q0.578125 1.328125 0.578125 3.828125q0 2.609375 -0.578125 4.15625q-0.5625 1.546875 -1.6875 2.359375q-1.109375 0.796875 -2.609375 0.796875q-1.59375 0 -2.609375 -0.890625q-1.0 -0.890625 -1.203125 -2.484375zm6.828125 -6.0q0 -1.4375 -0.765625 -2.28125q-0.765625 -0.859375 -1.84375 -0.859375q-1.109375 0 -1.9375 0.921875q-0.828125 0.90625 -0.828125 2.34375q0 1.3125 0.78125 2.125q0.796875 0.796875 1.9375 0.796875q1.171875 0 1.90625 -0.796875q0.75 -0.8125 0.75 -2.25zm11.953827 -1.125l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm10.078857 8.40625l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m348.0 280.0l0 16.0" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m348.0 280.0l0 10.0" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m346.34827 290.0l1.6517334 4.538086l1.6517334 -4.538086z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m348.0 337.98425l0 20.125977" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m348.0 337.98425l0 14.125977" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m346.34827 352.11023l1.6517334 4.5381165l1.6517334 -4.5381165z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m347.00787 56.684254l1.0078735 22.015743" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m347.00787 56.68425l0.7334595 16.022026" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m346.09134 72.781815l1.857544 4.4578094l1.4424744 -4.6088867z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m348.0 400.08426l0.31497192 21.921265" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m348.0 400.08423l0.22875977 15.921875" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m346.5772 416.02985l1.7167358 4.5138855l1.5863647 -4.5613403z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m264.0 158.19606l168.0 0l0 58.992126l-168.0 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m264.0 158.19606l168.0 0l0 58.992126l-168.0 0z" fill-rule="evenodd"></path><path fill="#000000" d="m279.56058 185.11606l0 -13.59375l9.171875 0l0 1.59375l-7.375 0l0 4.21875l6.375 0l0 1.609375l-6.375 0l0 6.171875l-1.796875 0zm17.53659 0l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.8913574 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.144806 0l0 -13.59375l1.671875 0l0 13.59375l-1.671875 0zm4.0979614 3.796875l-0.171875 -1.5625q0.546875 0.140625 0.953125 0.140625q0.546875 0 0.875 -0.1875q0.34375 -0.1875 0.5625 -0.515625q0.15625 -0.25 0.5 -1.25q0.046875 -0.140625 0.15625 -0.40625l-3.734375 -9.875l1.796875 0l2.046875 5.71875q0.40625 1.078125 0.71875 2.28125q0.28125 -1.15625 0.6875 -2.25l2.09375 -5.75l1.671875 0l-3.75 10.03125q-0.59375 1.625 -0.9375 2.234375q-0.4375 0.828125 -1.015625 1.203125q-0.578125 0.390625 -1.375 0.390625q-0.484375 0 -1.078125 -0.203125zm19.328125 -8.5625l1.796875 0.453125q-0.5625 2.21875 -2.03125 3.390625q-1.46875 1.15625 -3.59375 1.15625q-2.203125 0 -3.578125 -0.890625q-1.375 -0.90625 -2.09375 -2.59375q-0.71875 -1.703125 -0.71875 -3.65625q0 -2.125 0.796875 -3.703125q0.8125 -1.578125 2.3125 -2.390625q1.5 -0.828125 3.296875 -0.828125q2.046875 0 3.4375 1.046875q1.390625 1.03125 1.9375 2.90625l-1.765625 0.421875q-0.46875 -1.484375 -1.375 -2.15625q-0.90625 -0.6875 -2.265625 -0.6875q-1.5625 0 -2.625 0.75q-1.046875 0.75 -1.484375 2.03125q-0.421875 1.265625 -0.421875 2.609375q0 1.734375 0.5 3.03125q0.515625 1.28125 1.578125 1.921875q1.078125 0.640625 2.3125 0.640625q1.515625 0 2.5625 -0.859375q1.046875 -0.875 1.421875 -2.59375zm2.926056 -0.15625q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.281982 4.921875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm10.375702 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm17.125732 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547577 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm6.546875 2.109375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm8.277069 -1.671875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.500702 5.875l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm17.578857 3.3125l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0z" fill-rule="nonzero"></path><path fill="#000000" d="m310.4336 211.11606q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm8.8533325 -4.0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.860077 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm4.172577 -5.765625q-1.046875 -0.375 -1.546875 -1.078125q-0.5 -0.71875 -0.5 -1.703125q0 -1.484375 1.0625 -2.484375q1.078125 -1.015625 2.84375 -1.015625q1.78125 0 2.859375 1.03125q1.09375 1.03125 1.09375 2.515625q0 0.953125 -0.5 1.65625q-0.484375 0.703125 -1.5 1.078125q1.25 0.40625 1.90625 1.3125q0.65625 0.90625 0.65625 2.171875q0 1.75 -1.234375 2.9375q-1.234375 1.1875 -3.25 1.1875q-2.015625 0 -3.25 -1.1875q-1.234375 -1.203125 -1.234375 -2.984375q0 -1.328125 0.671875 -2.21875q0.671875 -0.890625 1.921875 -1.21875zm-0.328125 -2.828125q0 0.96875 0.609375 1.578125q0.625 0.609375 1.625 0.609375q0.953125 0 1.5625 -0.609375q0.625 -0.609375 0.625 -1.484375q0 -0.921875 -0.640625 -1.546875q-0.625 -0.625 -1.578125 -0.625q-0.953125 0 -1.578125 0.609375q-0.625 0.609375 -0.625 1.46875zm-0.546875 6.28125q0 0.71875 0.328125 1.390625q0.34375 0.65625 1.015625 1.03125q0.671875 0.359375 1.4375 0.359375q1.203125 0 1.984375 -0.765625q0.78125 -0.78125 0.78125 -1.96875q0 -1.203125 -0.8125 -1.984375q-0.796875 -0.796875 -2.0 -0.796875q-1.1875 0 -1.96875 0.78125q-0.765625 0.78125 -0.765625 1.953125zm8.031982 3.921875l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm16.265625 0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm12.860107 -1.609375l0 1.609375l-8.984375 0q-0.015625 -0.609375 0.1875 -1.15625q0.34375 -0.921875 1.09375 -1.8125q0.765625 -0.890625 2.1875 -2.0625q2.21875 -1.8125 3.0 -2.875q0.78125 -1.0625 0.78125 -2.015625q0 -0.984375 -0.71875 -1.671875q-0.703125 -0.6875 -1.84375 -0.6875q-1.203125 0 -1.9375 0.734375q-0.71875 0.71875 -0.71875 2.0l-1.71875 -0.171875q0.171875 -1.921875 1.328125 -2.921875q1.15625 -1.015625 3.09375 -1.015625q1.953125 0 3.09375 1.09375q1.140625 1.078125 1.140625 2.6875q0 0.8125 -0.34375 1.609375q-0.328125 0.78125 -1.109375 1.65625q-0.765625 0.859375 -2.5625 2.390625q-1.5 1.265625 -1.9375 1.71875q-0.421875 0.4375 -0.703125 0.890625l6.671875 0zm4.172577 -5.765625q-1.046875 -0.375 -1.546875 -1.078125q-0.5 -0.71875 -0.5 -1.703125q0 -1.484375 1.0625 -2.484375q1.078125 -1.015625 2.84375 -1.015625q1.78125 0 2.859375 1.03125q1.09375 1.03125 1.09375 2.515625q0 0.953125 -0.5 1.65625q-0.484375 0.703125 -1.5 1.078125q1.25 0.40625 1.90625 1.3125q0.65625 0.90625 0.65625 2.171875q0 1.75 -1.234375 2.9375q-1.234375 1.1875 -3.25 1.1875q-2.015625 0 -3.25 -1.1875q-1.234375 -1.203125 -1.234375 -2.984375q0 -1.328125 0.671875 -2.21875q0.671875 -0.890625 1.921875 -1.21875zm-0.328125 -2.828125q0 0.96875 0.609375 1.578125q0.625 0.609375 1.625 0.609375q0.953125 0 1.5625 -0.609375q0.625 -0.609375 0.625 -1.484375q0 -0.921875 -0.640625 -1.546875q-0.625 -0.625 -1.578125 -0.625q-0.953125 0 -1.578125 0.609375q-0.625 0.609375 -0.625 1.46875zm-0.546875 6.28125q0 0.71875 0.328125 1.390625q0.34375 0.65625 1.015625 1.03125q0.671875 0.359375 1.4375 0.359375q1.203125 0 1.984375 -0.765625q0.78125 -0.78125 0.78125 -1.96875q0 -1.203125 -0.8125 -1.984375q-0.796875 -0.796875 -2.0 -0.796875q-1.1875 0 -1.96875 0.78125q-0.765625 0.78125 -0.765625 1.953125zm10.235077 7.921875l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m348.0 137.69212l0 20.503937" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m348.0 137.69212l0 14.503937" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m346.34827 152.19606l1.6517334 4.538101l1.6517334 -4.538101z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m348.0 217.18819l0 20.818893" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m348.0 217.1882l0 14.818893" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m346.34827 232.0071l1.6517334 4.538101l1.6517334 -4.538101z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m253.3 422.01575l190.01573 0l0 41.984253l-190.01573 0z" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m253.3 422.01575l190.01573 0l0 41.984253l-190.01573 0z" fill-rule="evenodd"></path><path fill="#000000" d="m269.44388 448.93576l0 -13.59375l6.03125 0q1.8125 0 2.75 0.359375q0.953125 0.359375 1.515625 1.296875q0.5625 0.921875 0.5625 2.046875q0 1.453125 -0.9375 2.453125q-0.921875 0.984375 -2.890625 1.25q0.71875 0.34375 1.09375 0.671875q0.78125 0.734375 1.484375 1.8125l2.375 3.703125l-2.265625 0l-1.796875 -2.828125q-0.796875 -1.21875 -1.3125 -1.875q-0.5 -0.65625 -0.90625 -0.90625q-0.40625 -0.265625 -0.8125 -0.359375q-0.3125 -0.078125 -1.015625 -0.078125l-2.078125 0l0 6.046875l-1.796875 0zm1.796875 -7.59375l3.859375 0q1.234375 0 1.921875 -0.25q0.703125 -0.265625 1.0625 -0.828125q0.375 -0.5625 0.375 -1.21875q0 -0.96875 -0.703125 -1.578125q-0.703125 -0.625 -2.21875 -0.625l-4.296875 0l0 4.5zm18.176086 4.421875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547577 2.265625l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm9.34375 3.609375l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.9069824 0l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm6.212677 0l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm12.978302 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm9.110077 5.875l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm14.031982 -1.5l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm9.897858 5.5q-1.375 -1.75 -2.328125 -4.078125q-0.953125 -2.34375 -0.953125 -4.84375q0 -2.21875 0.703125 -4.234375q0.84375 -2.34375 2.578125 -4.671875l1.203125 0q-1.125 1.921875 -1.484375 2.75q-0.5625 1.28125 -0.890625 2.671875q-0.40625 1.734375 -0.40625 3.484375q0 4.46875 2.78125 8.921875l-1.203125 0zm11.228302 -14.265625l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm13.875702 4.40625l0 -3.25l-5.90625 0l0 -1.53125l6.21875 -8.8125l1.359375 0l0 8.8125l1.84375 0l0 1.53125l-1.84375 0l0 3.25l-1.671875 0zm0 -4.78125l0 -6.140625l-4.25 6.140625l4.25 0zm4.3757324 4.78125l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0zm16.265625 0l-1.671875 0l0 -10.640625q-0.59375 0.578125 -1.578125 1.15625q-0.984375 0.5625 -1.765625 0.859375l0 -1.625q1.40625 -0.65625 2.453125 -1.59375q1.046875 -0.9375 1.484375 -1.8125l1.078125 0l0 13.65625zm4.344452 -3.140625l1.59375 -0.15625q0.203125 1.140625 0.78125 1.65625q0.578125 0.5 1.484375 0.5q0.765625 0 1.34375 -0.34375q0.578125 -0.359375 0.953125 -0.953125q0.375 -0.59375 0.625 -1.59375q0.25 -1.0 0.25 -2.03125q0 -0.109375 -0.015625 -0.34375q-0.5 0.796875 -1.375 1.296875q-0.859375 0.5 -1.875 0.5q-1.6875 0 -2.859375 -1.21875q-1.171875 -1.234375 -1.171875 -3.234375q0 -2.078125 1.21875 -3.328125q1.234375 -1.265625 3.0625 -1.265625q1.328125 0 2.421875 0.71875q1.109375 0.703125 1.671875 2.03125q0.578125 1.328125 0.578125 3.828125q0 2.609375 -0.578125 4.15625q-0.5625 1.546875 -1.6875 2.359375q-1.109375 0.796875 -2.609375 0.796875q-1.59375 0 -2.609375 -0.890625q-1.0 -0.890625 -1.203125 -2.484375zm6.828125 -6.0q0 -1.4375 -0.765625 -2.28125q-0.765625 -0.859375 -1.84375 -0.859375q-1.109375 0 -1.9375 0.921875q-0.828125 0.90625 -0.828125 2.34375q0 1.3125 0.78125 2.125q0.796875 0.796875 1.9375 0.796875q1.171875 0 1.90625 -0.796875q0.75 -0.8125 0.75 -2.25zm11.953857 -1.125l-1.65625 0.125q-0.21875 -0.984375 -0.640625 -1.421875q-0.671875 -0.71875 -1.65625 -0.71875q-0.8125 0 -1.40625 0.4375q-0.796875 0.578125 -1.25 1.6875q-0.453125 1.09375 -0.46875 3.140625q0.609375 -0.921875 1.46875 -1.359375q0.875 -0.453125 1.828125 -0.453125q1.671875 0 2.84375 1.234375q1.171875 1.234375 1.171875 3.171875q0 1.28125 -0.546875 2.390625q-0.546875 1.09375 -1.515625 1.6875q-0.96875 0.578125 -2.1875 0.578125q-2.09375 0 -3.40625 -1.53125q-1.3125 -1.546875 -1.3125 -5.0625q0 -3.953125 1.453125 -5.734375q1.265625 -1.5625 3.421875 -1.5625q1.609375 0 2.625 0.90625q1.03125 0.890625 1.234375 2.484375zm-6.8125 5.859375q0 0.859375 0.359375 1.65625q0.375 0.78125 1.03125 1.203125q0.65625 0.40625 1.375 0.40625q1.0625 0 1.8125 -0.84375q0.765625 -0.859375 0.765625 -2.328125q0 -1.40625 -0.75 -2.21875q-0.75 -0.8125 -1.890625 -0.8125q-1.125 0 -1.921875 0.8125q-0.78125 0.8125 -0.78125 2.125zm10.078827 8.40625l-1.1875 0q2.765625 -4.453125 2.765625 -8.921875q0 -1.734375 -0.390625 -3.453125q-0.328125 -1.390625 -0.890625 -2.671875q-0.359375 -0.84375 -1.484375 -2.78125l1.1875 0q1.75 2.328125 2.578125 4.671875q0.71875 2.015625 0.71875 4.234375q0 2.5 -0.96875 4.84375q-0.953125 2.328125 -2.328125 4.078125z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m348.30786 464.0l-0.50393677 24.0" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m348.30786 464.0l-0.37799072 18.001312" fill-rule="evenodd"></path><path fill="#000000" stroke="#000000" stroke-width="1.0" stroke-linecap="butt" d="m346.2785 481.96664l1.5561218 4.5717773l1.7466431 -4.502411z" fill-rule="evenodd"></path></g></svg>
-
diff --git a/tensorflow/lite/models/testdata/speech_asr_lm_model.test_spec b/tensorflow/lite/models/testdata/speech_asr_lm_model.test_spec
deleted file mode 100644
index f7f518b..0000000
--- a/tensorflow/lite/models/testdata/speech_asr_lm_model.test_spec
+++ /dev/null
@@ -1,202 +0,0 @@
-load_model: "speech_asr_lm_model.tflite"
-init_state: "20,21,40,41,60,61"
-invoke {
- id: 3
- input: "63982"
- input: "8409"
- output: "-2.75389"
-}
-invoke {
- id: 4
- input: "8409"
- input: "1488"
- output: "0.601841"
-}
-invoke {
- id: 5
- input: "1488"
- input: "63981"
- output: "-0.314846"
-}
-init_state: "20,21,40,41,60,61"
-invoke {
- id: 6
- input: "63982"
- input: "8409"
- output: "-2.75389"
-}
-invoke {
- id: 7
- input: "8409"
- input: "3082"
- output: "-3.63721"
-}
-init_state: "20,21,40,41,60,61"
-invoke {
- id: 8
- input: "63982"
- input: "8409"
- output: "-2.75389"
-}
-invoke {
- id: 9
- input: "8409"
- input: "18965"
- output: "-6.93985"
-}
-init_state: "20,21,40,41,60,61"
-invoke {
- id: 13
- input: "63982"
- input: "12516"
- output: "-6.20867"
-}
-invoke {
- id: 14
- input: "12516"
- input: "914"
- output: "-0.407277"
-}
-invoke {
- id: 15
- input: "914"
- input: "63981"
- output: "-3.82091"
-}
-init_state: "20,21,40,41,60,61"
-invoke {
- id: 19
- input: "63982"
- input: "12516"
- output: "-6.20867"
-}
-invoke {
- id: 20
- input: "12516"
- input: "914"
- output: "-0.407277"
-}
-invoke {
- id: 21
- input: "914"
- input: "48619"
- output: "-4.02131"
-}
-invoke {
- id: 22
- input: "48619"
- input: "63981"
- output: "-0.677399"
-}
-init_state: "20,21,40,41,60,61"
-invoke {
- id: 26
- input: "63982"
- input: "12516"
- output: "-6.20867"
-}
-invoke {
- id: 27
- input: "12516"
- input: "914"
- output: "-0.407277"
-}
-invoke {
- id: 28
- input: "914"
- input: "4700"
- output: "-4.056"
-}
-invoke {
- id: 29
- input: "4700"
- input: "63981"
- output: "0.415889"
-}
-init_state: "20,21,40,41,60,61"
-invoke {
- id: 30
- input: "63982"
- input: "12516"
- output: "-6.20867"
-}
-invoke {
- id: 31
- input: "12516"
- input: "914"
- output: "-0.407277"
-invoke {
- id: 32
- input: "914"
- input: "51923"
- output: "-14.1147"
-}
-init_state: "20,21,40,41,60,61"
-invoke {
- id: 34
- input: "63982"
- input: "5520"
- output: "-4.56971"
-}
-invoke {
- id: 35
- input: "5520"
- input: "16318"
- output: "-1.54815"
-}
-init_state: "20,21,40,41,60,61"
-invoke {
- id: 36
- input: "63982"
- input: "5520"
- output: "-4.56971"
-}
-invoke {
- id: 37
- input: "5520"
- input: "28303"
- output: "-14.0947"
-}
-init_state: "20,21,40,41,60,61"
-invoke {
- id: 38
- input: "63982"
- input: "12451"
- output: "-6.24243"
-}
-invoke {
- id: 39
- input: "12451"
- input: "752"
- output: "0.0700736"
-}
-invoke {
- id: 40
- input: "752"
- input: "11"
- output: "-1.72744"
-}
-invoke {
- id: 41
- input: "11"
- input: "19454"
- output: "-3.19211"
-}
-invoke {
- id: 42
- input: "19454"
- input: "16989"
- output: "-4.01684"
-}
-invoke {
- id: 43
- input: "16989"
- input: "40168"
- output: "-8.91317"
-}
-invoke {
- id: 44
- input: "40168"
- input: "63981"
- output: "-0.675377"
-}
diff --git a/tensorflow/lite/schema/BUILD b/tensorflow/lite/schema/BUILD
index 407655b..038a470 100644
--- a/tensorflow/lite/schema/BUILD
+++ b/tensorflow/lite/schema/BUILD
@@ -12,7 +12,7 @@
py_binary(
name = "upgrade_schema",
srcs = ["upgrade_schema.py"],
- python_version = "PY2",
+ python_version = "PY3",
deps = [":upgrade_schema_main_lib"],
)
@@ -38,7 +38,7 @@
name = "upgrade_schema_test",
size = "small",
srcs = ["upgrade_schema_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = [
"manual",
diff --git a/tensorflow/lite/testing/BUILD b/tensorflow/lite/testing/BUILD
index f1cea7a..0501e35 100644
--- a/tensorflow/lite/testing/BUILD
+++ b/tensorflow/lite/testing/BUILD
@@ -509,6 +509,7 @@
"sub",
"sum", # high error
"tanh",
+ "transpose_conv",
]
[gen_zipped_test_file(
diff --git a/tensorflow/lite/testing/op_tests/transpose_conv.py b/tensorflow/lite/testing/op_tests/transpose_conv.py
index 85c6eaa..f403ba0 100644
--- a/tensorflow/lite/testing/op_tests/transpose_conv.py
+++ b/tensorflow/lite/testing/op_tests/transpose_conv.py
@@ -34,22 +34,50 @@
"""Make a set of tests to do transpose_conv."""
# Tensorflow only supports equal strides
- test_parameters = [{
- "input_shape": [[1, 3, 4, 1], [1, 10, 10, 3], [3, 20, 20, 1]],
- "filter_size": [[1, 1], [1, 2], [3, 3]],
- "strides": [[1, 1, 1, 1], [1, 3, 3, 1]],
- "padding": ["SAME", "VALID"],
- "data_format": ["NHWC"],
- "channel_multiplier": [1, 2],
- }]
+ test_parameters = [
+ {
+ "input_shape": [[1, 3, 4, 1], [1, 10, 10, 3], [3, 20, 20, 1]],
+ "filter_size": [[1, 1], [1, 2], [3, 3]],
+ "strides": [[1, 1, 1, 1], [1, 3, 3, 1]],
+ "padding": ["SAME", "VALID"],
+ "data_format": ["NHWC"],
+ "channel_multiplier": [1, 2],
+ "output_shape": [[]],
+ "fully_quantize": [False]
+ },
+ # TODO(yunluli): Adding simple tests for now to unblock edgetpu debugging.
+ # Need to add more test cases.
+ {
+ "input_shape": [[1, 3, 3, 1]],
+ "filter_size": [[3, 3, 2, 1]],
+ "strides": [[1, 1, 1, 1]],
+ "padding": ["SAME"],
+ "data_format": ["NHWC"],
+ "channel_multiplier": [1],
+ "output_shape": [[1, 3, 3, 2]],
+ "fully_quantize": [True]
+ },
+ {
+ "input_shape": [[1, 3, 3, 1]],
+ "filter_size": [[3, 3, 2, 1]],
+ "strides": [[1, 2, 2, 1]],
+ "padding": ["SAME"],
+ "data_format": ["NHWC"],
+ "channel_multiplier": [1],
+ "output_shape": [[1, 6, 6, 2]],
+ "fully_quantize": [True]
+ }
+ ]
def get_tensor_shapes(parameters):
input_shape = parameters["input_shape"]
filter_size = parameters["filter_size"]
- filter_shape = filter_size + [
- input_shape[3], parameters["channel_multiplier"]
- ]
- return [input_shape, filter_shape]
+ if not parameters["fully_quantize"]:
+ filter_shape = filter_size + [
+ input_shape[3], parameters["channel_multiplier"]
+ ]
+ return [input_shape, filter_shape]
+ return [input_shape, filter_size]
def build_graph(parameters):
"""Build a transpose_conv graph given `parameters`."""
@@ -60,28 +88,48 @@
filter_input = tf.compat.v1.placeholder(
dtype=tf.float32, name="filter", shape=filter_shape)
- conv_outputs = tf.nn.conv2d(
- input_tensor,
- filter_input,
- strides=parameters["strides"],
- padding=parameters["padding"],
- data_format=parameters["data_format"])
- out = tf.compat.v1.nn.conv2d_backprop_input(
- input_shape,
- filter_input,
- conv_outputs,
- strides=parameters["strides"],
- padding=parameters["padding"],
- data_format=parameters["data_format"])
- input_tensors = [input_tensor, filter_input]
+ if not parameters["fully_quantize"]:
+ input_tensors = [input_tensor, filter_input]
+ conv_outputs = tf.nn.conv2d(
+ input_tensor,
+ filter_input,
+ strides=parameters["strides"],
+ padding=parameters["padding"],
+ data_format=parameters["data_format"])
+ out = tf.compat.v1.nn.conv2d_backprop_input(
+ input_shape,
+ filter_input,
+ conv_outputs,
+ strides=parameters["strides"],
+ padding=parameters["padding"],
+ data_format=parameters["data_format"])
+ else:
+ input_tensors = [input_tensor]
+ filter_input = create_tensor_data(
+ np.float32, filter_shape, min_value=-1, max_value=1)
+ out = tf.nn.conv2d_transpose(
+ input_tensor,
+ filter_input,
+ parameters["output_shape"],
+ strides=parameters["strides"],
+ padding=parameters["padding"],
+ data_format=parameters["data_format"])
+
return input_tensors, [out]
def build_inputs(parameters, sess, inputs, outputs):
input_shape, filter_shape = get_tensor_shapes(parameters)
- values = [
- create_tensor_data(np.float32, input_shape),
- create_tensor_data(np.float32, filter_shape)
- ]
+ if not parameters["fully_quantize"]:
+ values = [
+ create_tensor_data(np.float32, input_shape),
+ create_tensor_data(np.float32, filter_shape)
+ ]
+ else:
+ values = [
+ create_tensor_data(
+ np.float32, input_shape, min_value=-1, max_value=1),
+ ]
+
return values, sess.run(outputs, feed_dict=dict(zip(inputs, values)))
make_zip_of_tests(options, test_parameters, build_graph, build_inputs)
diff --git a/tensorflow/lite/tools/benchmark/BUILD b/tensorflow/lite/tools/benchmark/BUILD
index b7f4383..cd173a7 100644
--- a/tensorflow/lite/tools/benchmark/BUILD
+++ b/tensorflow/lite/tools/benchmark/BUILD
@@ -106,7 +106,12 @@
name = "benchmark_tflite_model_lib",
srcs = ["benchmark_tflite_model.cc"],
hdrs = ["benchmark_tflite_model.h"],
- copts = common_copts,
+ copts = common_copts + select({
+ "//tensorflow:ios": [
+ "-xobjective-c++",
+ ],
+ "//conditions:default": [],
+ }),
deps = [
":benchmark_model_lib",
":benchmark_utils",
@@ -125,6 +130,9 @@
"//tensorflow:android": [
"//tensorflow/lite/delegates/gpu:delegate",
],
+ "//tensorflow:ios": [
+ "//tensorflow/lite/delegates/gpu:metal_delegate",
+ ],
"//conditions:default": [],
}),
)
diff --git a/tensorflow/lite/tools/benchmark/README.md b/tensorflow/lite/tools/benchmark/README.md
index 3741a95..b9655aa 100644
--- a/tensorflow/lite/tools/benchmark/README.md
+++ b/tensorflow/lite/tools/benchmark/README.md
@@ -58,7 +58,11 @@
benchmark tool will not correctly use NNAPI.
* `use_gpu`: `bool` (default=false) \
Whether to use the [GPU accelerator delegate](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/lite/delegates/gpu).
- This option is currently only available on Android devices.
+ This option is currently only available on Android and iOS devices.
+* `gpu_wait_type`: `str` (default="") \
+ Which GPU wait_type option to use, when using GPU delegate on iOS. Should be
+ one of the following: passive, active, do_not_wait, aggressive. When left
+ blank, passive mode is used by default.
* `enable_op_profiling`: `bool` (default=false) \
Whether to enable per-operator profiling measurement.
diff --git a/tensorflow/lite/tools/benchmark/benchmark_tflite_model.cc b/tensorflow/lite/tools/benchmark/benchmark_tflite_model.cc
index 445f370..3448dde 100644
--- a/tensorflow/lite/tools/benchmark/benchmark_tflite_model.cc
+++ b/tensorflow/lite/tools/benchmark/benchmark_tflite_model.cc
@@ -31,6 +31,13 @@
#if defined(__ANDROID__)
#include "tensorflow/lite/delegates/gpu/delegate.h"
#include "tensorflow/lite/nnapi/nnapi_util.h"
+#elif defined(__APPLE__)
+#include "TargetConditionals.h"
+#if TARGET_OS_IPHONE && !TARGET_IPHONE_SIMULATOR
+// Only enable metal delegate when using a real iPhone device.
+#define REAL_IPHONE_DEVICE
+#include "tensorflow/lite/delegates/gpu/metal_delegate.h"
+#endif
#endif
#include "tensorflow/lite/kernels/register.h"
@@ -264,10 +271,14 @@
default_params.AddParam("nnapi_accelerator_name",
BenchmarkParam::Create<std::string>(""));
default_params.AddParam("use_gpu", BenchmarkParam::Create<bool>(false));
-#if defined(__ANDROID__)
+#if defined(__ANDROID__) || defined(REAL_IPHONE_DEVICE)
default_params.AddParam("gpu_precision_loss_allowed",
BenchmarkParam::Create<bool>(true));
#endif
+#if defined(REAL_IPHONE_DEVICE)
+ default_params.AddParam("gpu_wait_type",
+ BenchmarkParam::Create<std::string>(""));
+#endif
default_params.AddParam("allow_fp16", BenchmarkParam::Create<bool>(false));
default_params.AddParam("require_full_delegation",
BenchmarkParam::Create<bool>(false));
@@ -314,11 +325,17 @@
"nnapi_accelerator_name", ¶ms_,
"the name of the nnapi accelerator to use (requires Android Q+)"),
CreateFlag<bool>("use_gpu", ¶ms_, "use gpu"),
-#if defined(__ANDROID__)
+#if defined(__ANDROID__) || defined(REAL_IPHONE_DEVICE)
CreateFlag<bool>("gpu_precision_loss_allowed", ¶ms_,
"Allow to process computation in lower precision than "
"FP32 in GPU. By default, it's enabled."),
#endif
+#if defined(REAL_IPHONE_DEVICE)
+ CreateFlag<std::string>(
+ "gpu_wait_type", ¶ms_,
+ "GPU wait type. Should be one of the following: passive, active, "
+ "do_not_wait, aggressive"),
+#endif
CreateFlag<bool>("allow_fp16", ¶ms_, "allow fp16"),
CreateFlag<bool>("require_full_delegation", ¶ms_,
"require delegate to run the entire graph"),
@@ -363,10 +380,14 @@
}
#endif
TFLITE_LOG(INFO) << "Use gpu : [" << params_.Get<bool>("use_gpu") << "]";
-#if defined(__ANDROID__)
+#if defined(__ANDROID__) || defined(REAL_IPHONE_DEVICE)
TFLITE_LOG(INFO) << "Allow lower precision in gpu : ["
<< params_.Get<bool>("gpu_precision_loss_allowed") << "]";
#endif
+#if defined(REAL_IPHONE_DEVICE)
+ TFLITE_LOG(INFO) << "GPU delegate wait type : ["
+ << params_.Get<std::string>("gpu_wait_type") << "]";
+#endif
TFLITE_LOG(INFO) << "Allow fp16 : [" << params_.Get<bool>("allow_fp16")
<< "]";
TFLITE_LOG(INFO) << "Require full delegation : ["
@@ -639,13 +660,40 @@
TfLiteGpuDelegateOptionsV2 gpu_opts = TfLiteGpuDelegateOptionsV2Default();
gpu_opts.inference_preference =
TFLITE_GPU_INFERENCE_PREFERENCE_SUSTAINED_SPEED;
- gpu_opts.is_precision_loss_allowed =
- params_.Get<bool>("gpu_precision_loss_allowed") ? 1 : 0;
+ if (params_.Get<bool>("gpu_precision_loss_allowed")) {
+ gpu_opts.inference_priority1 = TFLITE_GPU_INFERENCE_PRIORITY_MIN_LATENCY;
+ gpu_opts.inference_priority2 =
+ TFLITE_GPU_INFERENCE_PRIORITY_MIN_MEMORY_USAGE;
+ gpu_opts.inference_priority3 =
+ TFLITE_GPU_INFERENCE_PRIORITY_MAX_PRECISION;
+ }
Interpreter::TfLiteDelegatePtr delegate =
evaluation::CreateGPUDelegate(model_.get(), &gpu_opts);
+#elif defined(REAL_IPHONE_DEVICE)
+ TFLGpuDelegateOptions gpu_opts = {0};
+ gpu_opts.allow_precision_loss =
+ params_.Get<bool>("gpu_precision_loss_allowed");
+
+ std::string string_gpu_wait_type =
+ params_.Get<std::string>("gpu_wait_type");
+ if (!string_gpu_wait_type.empty()) {
+ TFLGpuDelegateWaitType wait_type = TFLGpuDelegateWaitTypePassive;
+ if (string_gpu_wait_type == "passive") {
+ wait_type = TFLGpuDelegateWaitTypePassive;
+ } else if (string_gpu_wait_type == "active") {
+ wait_type = TFLGpuDelegateWaitTypeActive;
+ } else if (string_gpu_wait_type == "do_not_wait") {
+ wait_type = TFLGpuDelegateWaitTypeDoNotWait;
+ } else if (string_gpu_wait_type == "aggressive") {
+ wait_type = TFLGpuDelegateWaitTypeAggressive;
+ }
+ gpu_opts.wait_type = wait_type;
+ }
+ Interpreter::TfLiteDelegatePtr delegate(TFLGpuDelegateCreate(&gpu_opts),
+ &TFLGpuDelegateDelete);
#else
- TFLITE_LOG(WARN) << "The GPU delegate compile options aren't supported to "
- "be benchmarked on non-Android platforms.";
+ TFLITE_LOG(WARN) << "The GPU delegate compile options are only supported "
+ "to be benchmarked on Android or iOS platforms.";
Interpreter::TfLiteDelegatePtr delegate =
evaluation::CreateGPUDelegate(model_.get());
#endif
diff --git a/tensorflow/lite/tools/benchmark/experimental/c/benchmark_c_api.cc b/tensorflow/lite/tools/benchmark/experimental/c/benchmark_c_api.cc
index d6fb393..354420d 100644
--- a/tensorflow/lite/tools/benchmark/experimental/c/benchmark_c_api.cc
+++ b/tensorflow/lite/tools/benchmark/experimental/c/benchmark_c_api.cc
@@ -71,21 +71,21 @@
void OnBenchmarkStart(
const tflite::benchmark::BenchmarkParams& params) override {
if (on_benchmark_start_fn_ != nullptr) {
- on_benchmark_start_fn_();
+ on_benchmark_start_fn_(user_data_);
}
}
void OnSingleRunStart(tflite::benchmark::RunType runType) override {
if (on_single_run_start_fn_ != nullptr) {
- on_single_run_start_fn_(runType == tflite::benchmark::WARMUP
- ? TfLiteBenchmarkWarmup
- : TfLiteBenchmarkRegular);
+ on_single_run_start_fn_(user_data_, runType == tflite::benchmark::WARMUP
+ ? TfLiteBenchmarkWarmup
+ : TfLiteBenchmarkRegular);
}
}
void OnSingleRunEnd() override {
if (on_single_run_end_fn_ != nullptr) {
- on_single_run_end_fn_();
+ on_single_run_end_fn_(user_data_);
}
}
@@ -93,17 +93,22 @@
const tflite::benchmark::BenchmarkResults& results) override {
if (on_benchmark_end_fn_ != nullptr) {
TfLiteBenchmarkResults* wrapper = new TfLiteBenchmarkResults{&results};
- on_benchmark_end_fn_(wrapper);
+ on_benchmark_end_fn_(user_data_, wrapper);
delete wrapper;
}
}
+ // Keep the user_data pointer provided when setting the callbacks.
+ void* user_data_;
+
// Function pointers set by the TfLiteBenchmarkListenerSetCallbacks call.
// Only non-null callbacks will be actually called.
- void (*on_benchmark_start_fn_)();
- void (*on_single_run_start_fn_)(TfLiteBenchmarkRunType runType);
- void (*on_single_run_end_fn_)();
- void (*on_benchmark_end_fn_)(TfLiteBenchmarkResults* results);
+ void (*on_benchmark_start_fn_)(void* user_data);
+ void (*on_single_run_start_fn_)(void* user_data,
+ TfLiteBenchmarkRunType runType);
+ void (*on_single_run_end_fn_)(void* user_data);
+ void (*on_benchmark_end_fn_)(void* user_data,
+ TfLiteBenchmarkResults* results);
};
struct TfLiteBenchmarkListener {
@@ -121,10 +126,14 @@
}
void TfLiteBenchmarkListenerSetCallbacks(
- TfLiteBenchmarkListener* listener, void (*on_benchmark_start_fn)(),
- void (*on_single_run_start_fn)(TfLiteBenchmarkRunType runType),
- void (*on_single_run_end_fn)(),
- void (*on_benchmark_end_fn)(TfLiteBenchmarkResults* results)) {
+ TfLiteBenchmarkListener* listener, void* user_data,
+ void (*on_benchmark_start_fn)(void* user_data),
+ void (*on_single_run_start_fn)(void* user_data,
+ TfLiteBenchmarkRunType runType),
+ void (*on_single_run_end_fn)(void* user_data),
+ void (*on_benchmark_end_fn)(void* user_data,
+ TfLiteBenchmarkResults* results)) {
+ listener->adapter->user_data_ = user_data;
listener->adapter->on_benchmark_start_fn_ = on_benchmark_start_fn;
listener->adapter->on_single_run_start_fn_ = on_single_run_start_fn;
listener->adapter->on_single_run_end_fn_ = on_single_run_end_fn;
diff --git a/tensorflow/lite/tools/benchmark/experimental/c/benchmark_c_api.h b/tensorflow/lite/tools/benchmark/experimental/c/benchmark_c_api.h
index a642a4f..956996a 100644
--- a/tensorflow/lite/tools/benchmark/experimental/c/benchmark_c_api.h
+++ b/tensorflow/lite/tools/benchmark/experimental/c/benchmark_c_api.h
@@ -82,7 +82,8 @@
extern void TfLiteBenchmarkListenerDelete(TfLiteBenchmarkListener* listener);
// Sets the listener callbacks. Only non-null callback functions will be called
-// when the following events occur.
+// when the following events occur. The user_data pointer provided by the caller
+// will also be forwarded as a parameter of each callback function.
//
// - on_benchmark_start: Called before the (outer) inference loop begins. Note
// that this is called *after* the interpreter has been initialized, but
@@ -95,10 +96,13 @@
// only valid during the callback function execution, and will be destroyed
// afterwards.
extern void TfLiteBenchmarkListenerSetCallbacks(
- TfLiteBenchmarkListener* listener, void (*on_benchmark_start_fn)(),
- void (*on_single_run_start_fn)(TfLiteBenchmarkRunType runType),
- void (*on_single_run_end_fn)(),
- void (*on_benchmark_end_fn)(TfLiteBenchmarkResults* results));
+ TfLiteBenchmarkListener* listener, void* user_data,
+ void (*on_benchmark_start_fn)(void* user_data),
+ void (*on_single_run_start_fn)(void* user_data,
+ TfLiteBenchmarkRunType runType),
+ void (*on_single_run_end_fn)(void* user_data),
+ void (*on_benchmark_end_fn)(void* user_data,
+ TfLiteBenchmarkResults* results));
// -----------------------------------------------------------------------------
// C APIs corresponding to tflite::benchmark::BenchmarkTfLiteModel type.
diff --git a/tensorflow/lite/tools/benchmark/experimental/ios/BUILD.apple b/tensorflow/lite/tools/benchmark/experimental/ios/BUILD.apple
index 9b4f835..fc70292 100644
--- a/tensorflow/lite/tools/benchmark/experimental/ios/BUILD.apple
+++ b/tensorflow/lite/tools/benchmark/experimental/ios/BUILD.apple
@@ -1,3 +1,4 @@
+load("@bazel_skylib//rules:build_test.bzl", "build_test")
load("//tensorflow/lite/experimental/ios:ios.bzl", "TFL_MINIMUM_OS_VERSION")
load("@build_bazel_rules_apple//apple:ios.bzl", "ios_static_framework")
@@ -20,3 +21,11 @@
"//tensorflow/lite/tools/benchmark/experimental/c:benchmark_c_api",
],
)
+
+# Used for building TensorFlowLiteBenchmarkC_framework framework on TAP.
+build_test(
+ name = "framework_build_test",
+ targets = [
+ ":TensorFlowLiteBenchmarkC_framework",
+ ],
+)
diff --git a/tensorflow/lite/tools/benchmark/ios/README.md b/tensorflow/lite/tools/benchmark/ios/README.md
index 5c772ac..90fd28b 100644
--- a/tensorflow/lite/tools/benchmark/ios/README.md
+++ b/tensorflow/lite/tools/benchmark/ios/README.md
@@ -4,52 +4,43 @@
An iOS app to benchmark TFLite models.
-The app reads benchmark parameters from a JSON file named `benchmark_params.json`
-in its `benchmark_data` directory. Any downloaded models for benchmarking should
-also be placed in `benchmark_data` directory.
+The app reads benchmark parameters from a JSON file named
+`benchmark_params.json` in its `benchmark_data` directory. Any downloaded models
+for benchmarking should also be placed in `benchmark_data` directory.
The JSON file specifies the name of the model file and other benchmarking
parameters like inputs to the model, type of inputs, number of iterations,
number of threads. The default values in the JSON file are for the
-Mobilenet_1.0_224 model
-([paper](https://arxiv.org/pdf/1704.04861.pdf),
-[tflite&pb](https://storage.googleapis.com/download.tensorflow.org/models/mobilenet_v1_2018_02_22/mobilenet_v1_1.0_224.tgz))
+Mobilenet_1.0_224 model ([paper][mobilenet-paper],
+[tflite&pb][mobilenet-model]).
-## To build/install/run
+## Building / running the app
-- Follow instructions at
- [iOS build for TFLite](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/lite/g3doc/guide/build_ios.md)
- to build TFLite.
+* Follow the [iOS build instructions][build-ios] to configure the Bazel
+ workspace and `.bazelrc` file correctly.
-Running
+* Run `build_benchmark_framework.sh` script to build the benchmark framework.
+ This script will build the benchmark framework for iOS and put it under
+ `TFLiteBenchmark/TFLiteBenchmark/Frameworks` directory.
-```bash
-tensorflow/lite/tools/make/build_ios_universal_lib.sh
-```
+* If you want more detailed profiling, run the build script with `-p` option:
+ `build_benchmark_framework.sh -p`.
-will also build `tensorflow/lite/tools/make/gen/lib/benchmark-lib.a` .
+* Modify `benchmark_params.json` change the `input_layer`, `input_layer_shape`
+ and other benchmark parameters.
-- Now copy the downloaded model file to `benchmark_data` directory.
+* Change `Build Phases -> Copy Bundle Resources` and add the model file to the
+ resources that need to be copied.
-- Modify `benchmark_params.json` change the `input_layer`, `input_layer_shape`
-and other benchmark parameters.
+* Ensure that `Build Phases -> Link Binary With Library` contains the
+ `Accelerate framework` and `TensorFlowLiteBenchmarkC.framework`.
-- Change `Build Phases -> Copy Bundle Resources` and add the model file to the
-resources that need to be copied.
+* Now try running the app. The app has a single button that runs the benchmark
+ on the model and displays results in a text view below. You can also see the
+ console output section in your Xcode to see more detailed benchmark
+ information.
-- Ensure that `Build Phases -> Link Binary With Library` contains the
-`Accelerate framework` and `tensorflow/lite/tools/make/gen/lib/benchmark-lib.a`.
-- Now try running the app. The app has a single button that runs the benchmark
- on the model and displays results in a text view below.
-
-## Profiling
-
-If you want detailed profiling, use the following command:
-
-```bash
-tensorflow/lite/tools/make/build_ios_universal_lib.sh -p
-```
-
-Then following the same steps above and run the benchmark app. You will see the
-detailed profiling results in the outputs.
+[build-ios]: https://tensorflow.org/lite/guide/build_ios
+[mobilenet-model]: https://storage.googleapis.com/download.tensorflow.org/models/mobilenet_v1_2018_02_22/mobilenet_v1_1.0_224.tgz
+[mobilenet-paper]: https://arxiv.org/pdf/1704.04861.pdf
diff --git a/tensorflow/lite/tools/benchmark/ios/TFLiteBenchmark/TFLiteBenchmark.xcodeproj/project.pbxproj b/tensorflow/lite/tools/benchmark/ios/TFLiteBenchmark/TFLiteBenchmark.xcodeproj/project.pbxproj
index a5f5bfb..056ca69 100644
--- a/tensorflow/lite/tools/benchmark/ios/TFLiteBenchmark/TFLiteBenchmark.xcodeproj/project.pbxproj
+++ b/tensorflow/lite/tools/benchmark/ios/TFLiteBenchmark/TFLiteBenchmark.xcodeproj/project.pbxproj
@@ -8,7 +8,6 @@
/* Begin PBXBuildFile section */
6FE7579A20D59CE500F01636 /* benchmark_params.json in Resources */ = {isa = PBXBuildFile; fileRef = 6FE7579920D59CE500F01636 /* benchmark_params.json */; };
- 6FE7579D20D5A5E000F01636 /* benchmark-lib.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 6FE7579C20D5A5E000F01636 /* benchmark-lib.a */; };
6FE7579F20D5A6A700F01636 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 6FE7579E20D5A6A700F01636 /* Accelerate.framework */; };
6FE757A120D5AB8100F01636 /* mobilenet_v1_1.0_224.tflite in Resources */ = {isa = PBXBuildFile; fileRef = 6FE757A020D5AB8000F01636 /* mobilenet_v1_1.0_224.tflite */; };
6FE93FFD20D592D8008C9FE4 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 6FE93FFC20D592D8008C9FE4 /* AppDelegate.m */; };
@@ -16,8 +15,24 @@
6FE9400320D592D8008C9FE4 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 6FE9400120D592D8008C9FE4 /* Main.storyboard */; };
6FE9400520D592DA008C9FE4 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 6FE9400420D592DA008C9FE4 /* Assets.xcassets */; };
6FE9400B20D592DA008C9FE4 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 6FE9400A20D592DA008C9FE4 /* main.m */; };
+ DC4D465D2373ECF400397CBD /* TensorFlowLiteBenchmarkC.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = DC4D465C2373ECF300397CBD /* TensorFlowLiteBenchmarkC.framework */; };
+ DC4D465E2373ECF400397CBD /* TensorFlowLiteBenchmarkC.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = DC4D465C2373ECF300397CBD /* TensorFlowLiteBenchmarkC.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
/* End PBXBuildFile section */
+/* Begin PBXCopyFilesBuildPhase section */
+ DC4D465F2373ECF400397CBD /* Embed Frameworks */ = {
+ isa = PBXCopyFilesBuildPhase;
+ buildActionMask = 8;
+ dstPath = "";
+ dstSubfolderSpec = 10;
+ files = (
+ DC4D465E2373ECF400397CBD /* TensorFlowLiteBenchmarkC.framework in Embed Frameworks */,
+ );
+ name = "Embed Frameworks";
+ runOnlyForDeploymentPostprocessing = 1;
+ };
+/* End PBXCopyFilesBuildPhase section */
+
/* Begin PBXFileReference section */
6FE7579920D59CE500F01636 /* benchmark_params.json */ = {isa = PBXFileReference; lastKnownFileType = text.json; path = benchmark_params.json; sourceTree = "<group>"; };
6FE7579C20D5A5E000F01636 /* benchmark-lib.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = "benchmark-lib.a"; path = "$SRCROOT/../../../../../../tensorflow/lite/tools/make/gen/lib/benchmark-lib.a"; sourceTree = "<group>"; };
@@ -32,6 +47,7 @@
6FE9400420D592DA008C9FE4 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
6FE9400920D592DA008C9FE4 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
6FE9400A20D592DA008C9FE4 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = "<group>"; };
+ DC4D465C2373ECF300397CBD /* TensorFlowLiteBenchmarkC.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = TensorFlowLiteBenchmarkC.framework; path = TFLiteBenchmark/Frameworks/TensorFlowLiteBenchmarkC.framework; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@@ -40,7 +56,7 @@
buildActionMask = 2147483647;
files = (
6FE7579F20D5A6A700F01636 /* Accelerate.framework in Frameworks */,
- 6FE7579D20D5A5E000F01636 /* benchmark-lib.a in Frameworks */,
+ DC4D465D2373ECF400397CBD /* TensorFlowLiteBenchmarkC.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
@@ -59,6 +75,7 @@
6FE7579B20D5A5E000F01636 /* Frameworks */ = {
isa = PBXGroup;
children = (
+ DC4D465C2373ECF300397CBD /* TensorFlowLiteBenchmarkC.framework */,
6FE7579E20D5A6A700F01636 /* Accelerate.framework */,
6FE7579C20D5A5E000F01636 /* benchmark-lib.a */,
);
@@ -108,6 +125,7 @@
6FE93FF420D592D8008C9FE4 /* Sources */,
6FE93FF520D592D8008C9FE4 /* Frameworks */,
6FE93FF620D592D8008C9FE4 /* Resources */,
+ DC4D465F2373ECF400397CBD /* Embed Frameworks */,
);
buildRules = (
);
@@ -308,6 +326,10 @@
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_STYLE = Automatic;
+ FRAMEWORK_SEARCH_PATHS = (
+ "$(inherited)",
+ "$(PROJECT_DIR)/TFLiteBenchmark/Frameworks",
+ );
"HEADER_SEARCH_PATHS[arch=*]" = (
$SRCROOT/../../../../../../,
$SRCROOT/../../../../../../tensorflow/lite/tools/make/downloads/eigen,
@@ -334,6 +356,10 @@
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_STYLE = Automatic;
+ FRAMEWORK_SEARCH_PATHS = (
+ "$(inherited)",
+ "$(PROJECT_DIR)/TFLiteBenchmark/Frameworks",
+ );
"HEADER_SEARCH_PATHS[arch=*]" = (
$SRCROOT/../../../../../../,
$SRCROOT/../../../../../../tensorflow/lite/tools/make/downloads/eigen,
diff --git a/tensorflow/lite/tools/benchmark/ios/TFLiteBenchmark/TFLiteBenchmark/BenchmarkViewController.mm b/tensorflow/lite/tools/benchmark/ios/TFLiteBenchmark/TFLiteBenchmark/BenchmarkViewController.mm
index 590c215..bb24040 100644
--- a/tensorflow/lite/tools/benchmark/ios/TFLiteBenchmark/TFLiteBenchmark/BenchmarkViewController.mm
+++ b/tensorflow/lite/tools/benchmark/ios/TFLiteBenchmark/TFLiteBenchmark/BenchmarkViewController.mm
@@ -18,8 +18,8 @@
#import <sstream>
#import <string>
#import <vector>
-#import "tensorflow/lite/tools/benchmark/benchmark_tflite_model.h"
-#import "tensorflow/lite/tools/benchmark/logging.h"
+
+#import <TensorFlowLiteBenchmarkC/TensorFlowLiteBenchmarkC.h>
namespace {
NSString* FilePathForResourceName(NSString* filename) {
@@ -64,42 +64,57 @@
return charptr_vec;
}
-class ResultsListener : public tflite::benchmark::BenchmarkListener {
+class ResultsListener {
public:
- void OnBenchmarkEnd(const tflite::benchmark::BenchmarkResults& results) override;
+ void OnBenchmarkEnd(TfLiteBenchmarkResults* results);
std::string Results() { return results_; }
private:
std::string results_;
};
-void OutputMicrosecondsStatToStream(const tensorflow::Stat<int64_t>& time_us,
+void OutputMicrosecondsStatToStream(const TfLiteBenchmarkInt64Stat& time_us,
const std::string& prefix, std::ostringstream* stream) {
- *stream << prefix << "Num runs: " << time_us.count() << "\n";
+ *stream << prefix << "Num runs: " << time_us.count << "\n";
- *stream << prefix << "Average: " << time_us.avg() / 1e3 << " ms\n";
- *stream << prefix << "Min: " << time_us.min() / 1e3 << " ms \n";
- *stream << prefix << "Max: " << time_us.max() / 1e3 << " ms \n";
- *stream << prefix << "Std deviation: " << time_us.std_deviation() / 1e3 << " ms\n";
+ *stream << prefix << "Average: " << time_us.avg / 1e3 << " ms\n";
+ *stream << prefix << "Min: " << time_us.min / 1e3 << " ms \n";
+ *stream << prefix << "Max: " << time_us.max / 1e3 << " ms \n";
+ *stream << prefix << "Std deviation: " << time_us.std_deviation / 1e3 << " ms\n";
}
-void ResultsListener::OnBenchmarkEnd(const tflite::benchmark::BenchmarkResults& results) {
+void ResultsListener::OnBenchmarkEnd(TfLiteBenchmarkResults* results) {
std::ostringstream stream;
const std::string prefix = " - ";
+
+ TfLiteBenchmarkInt64Stat inference = TfLiteBenchmarkResultsGetInferenceTimeMicroseconds(results);
+ TfLiteBenchmarkInt64Stat warmup = TfLiteBenchmarkResultsGetWarmupTimeMicroseconds(results);
+
stream << "Startup latency: ";
- stream << results.startup_latency_us() / 1e3 << " ms\n";
+ stream << TfLiteBenchmarkResultsGetStartupLatencyMicroseconds(results) / 1e3 << " ms\n";
stream << "\nInference:\n";
- OutputMicrosecondsStatToStream(results.inference_time_us(), prefix, &stream);
+ OutputMicrosecondsStatToStream(inference, prefix, &stream);
stream << "\nWarmup:\n";
- OutputMicrosecondsStatToStream(results.warmup_time_us(), prefix, &stream);
+ OutputMicrosecondsStatToStream(warmup, prefix, &stream);
results_ = stream.str();
}
+void OnBenchmarkEnd(void* user_data, TfLiteBenchmarkResults* results) {
+ if (user_data != nullptr) {
+ reinterpret_cast<ResultsListener*>(user_data)->OnBenchmarkEnd(results);
+ }
+}
+
std::string RunBenchmark() {
- ResultsListener listener;
- tflite::benchmark::BenchmarkTfLiteModel benchmark;
- benchmark.AddListener(&listener);
+ ResultsListener results_listener;
+ TfLiteBenchmarkTfLiteModel* benchmark = TfLiteBenchmarkTfLiteModelCreate();
+
+ TfLiteBenchmarkListener* listener = TfLiteBenchmarkListenerCreate();
+ TfLiteBenchmarkListenerSetCallbacks(listener, &results_listener, nullptr, nullptr, nullptr,
+ OnBenchmarkEnd);
+
+ TfLiteBenchmarkTfLiteModelAddListener(benchmark, listener);
// TODO(shashishekhar): Passing arguments like this is brittle, refactor the BenchmarkParams
// so that it contains arguments for BenchmarkTfLiteModel and set parameters using BenchmarkParams
std::vector<std::string> command_line_params;
@@ -109,8 +124,15 @@
ReadCommandLineParameters(&command_line_params);
std::vector<char*> argv = StringVecToCharPtrVec(command_line_params);
int argc = static_cast<int>(argv.size());
- benchmark.Run(argc, argv.data());
- return listener.Results();
+
+ TfLiteBenchmarkTfLiteModelRunWithArgs(benchmark, argc, argv.data());
+
+ std::string results = results_listener.Results();
+
+ TfLiteBenchmarkListenerDelete(listener);
+ TfLiteBenchmarkTfLiteModelDelete(benchmark);
+
+ return results;
}
} // namespace
diff --git a/tensorflow/lite/tools/benchmark/ios/TFLiteBenchmark/TFLiteBenchmark/Frameworks/.gitignore b/tensorflow/lite/tools/benchmark/ios/TFLiteBenchmark/TFLiteBenchmark/Frameworks/.gitignore
new file mode 100644
index 0000000..63d8bd7
--- /dev/null
+++ b/tensorflow/lite/tools/benchmark/ios/TFLiteBenchmark/TFLiteBenchmark/Frameworks/.gitignore
@@ -0,0 +1 @@
+*.framework/
diff --git a/tensorflow/lite/tools/benchmark/ios/build_benchmark_framework.sh b/tensorflow/lite/tools/benchmark/ios/build_benchmark_framework.sh
new file mode 100755
index 0000000..5c74158
--- /dev/null
+++ b/tensorflow/lite/tools/benchmark/ios/build_benchmark_framework.sh
@@ -0,0 +1,55 @@
+#!/bin/bash
+# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+
+set -e
+set -x
+
+WORKSPACE_ROOT=$(bazel info workspace)
+BENCHMARK_DIR=tensorflow/lite/tools/benchmark
+DEST_DIR="${BENCHMARK_DIR}/ios/TFLiteBenchmark/TFLiteBenchmark/Frameworks"
+FRAMEWORK_TARGET=TensorFlowLiteBenchmarkC_framework
+
+usage() {
+ echo "Usage: $(basename "$0") [-p]"
+ echo "-p enable profiling"
+ exit 1
+}
+
+PROFILING_ARGS=""
+while getopts "p" opt_name; do
+ case "$opt_name" in
+ p) PROFILING_ARGS='--copt=-DGEMMLOWP_PROFILING';;
+ *) usage;;
+ esac
+done
+shift $(($OPTIND - 1))
+
+pushd "${WORKSPACE_ROOT}"
+
+# Build the framework.
+bazel build --config=ios_fat -c opt ${PROFILING_ARGS} \
+ "//${BENCHMARK_DIR}/experimental/ios:${FRAMEWORK_TARGET}"
+
+# Copy the framework into the destination and unzip.
+mkdir -p "${DEST_DIR}"
+cp -f "bazel-bin/${BENCHMARK_DIR}/experimental/ios/${FRAMEWORK_TARGET}.zip" \
+ "${DEST_DIR}"
+pushd "${DEST_DIR}"
+unzip -o "${FRAMEWORK_TARGET}.zip"
+rm -f "${FRAMEWORK_TARGET}.zip"
+
+popd
+popd
diff --git a/tensorflow/lite/tools/evaluation/proto/BUILD b/tensorflow/lite/tools/evaluation/proto/BUILD
index 3e0478d..f3f7b7b 100644
--- a/tensorflow/lite/tools/evaluation/proto/BUILD
+++ b/tensorflow/lite/tools/evaluation/proto/BUILD
@@ -13,7 +13,6 @@
# limitations under the License.
# ==============================================================================
-# Placeholder for Google-internal load statements.
load(
"//tensorflow/core/platform:default/build_config.bzl",
"tf_proto_library_py",
diff --git a/tensorflow/lite/tools/optimize/operator_property.cc b/tensorflow/lite/tools/optimize/operator_property.cc
index eec8bac..b284e02 100644
--- a/tensorflow/lite/tools/optimize/operator_property.cc
+++ b/tensorflow/lite/tools/optimize/operator_property.cc
@@ -93,6 +93,16 @@
property.version = 3;
break;
}
+ case BuiltinOperator_TRANSPOSE_CONV: {
+ TensorProperty tensor_property;
+ tensor_property.per_axis = true;
+ tensor_property.per_axis_index = 0;
+ tensor_property.symmetric = true;
+ property.inputs = {{1, tensor_property}, {2, {}}};
+ property.outputs = {{0, {}}};
+ property.version = 2;
+ break;
+ }
case BuiltinOperator_DEPTHWISE_CONV_2D: {
TensorProperty tensor_property;
tensor_property.per_axis = true;
diff --git a/tensorflow/lite/tools/optimize/quantization_utils.cc b/tensorflow/lite/tools/optimize/quantization_utils.cc
index 52c3d77..8588f2f 100644
--- a/tensorflow/lite/tools/optimize/quantization_utils.cc
+++ b/tensorflow/lite/tools/optimize/quantization_utils.cc
@@ -41,10 +41,6 @@
} // namespace
TfLiteStatus NumElements(const TensorT& tensor, uint64_t* num_elements) {
- if (tensor.shape.empty()) {
- *num_elements = 0;
- return kTfLiteOk;
- }
*num_elements = 1;
for (const int64_t dim : tensor.shape) {
if (dim <= 0 || *num_elements > UINT64_MAX / static_cast<uint64_t>(dim)) {
diff --git a/tensorflow/lite/tools/optimize/quantization_utils_test.cc b/tensorflow/lite/tools/optimize/quantization_utils_test.cc
index 9487939..ece0123 100644
--- a/tensorflow/lite/tools/optimize/quantization_utils_test.cc
+++ b/tensorflow/lite/tools/optimize/quantization_utils_test.cc
@@ -65,7 +65,8 @@
tensor.shape = {};
EXPECT_EQ(kTfLiteOk, NumElements(tensor, &num_elements));
- EXPECT_EQ(num_elements, 0);
+ // Scalars with empty shape have 1 element.
+ EXPECT_EQ(num_elements, 1);
tensor.shape = {1, 2, 3, -1};
EXPECT_EQ(kTfLiteError, NumElements(tensor, &num_elements));
diff --git a/tensorflow/lite/tools/versioning/op_version.cc b/tensorflow/lite/tools/versioning/op_version.cc
index fdb361a..e638840 100644
--- a/tensorflow/lite/tools/versioning/op_version.cc
+++ b/tensorflow/lite/tools/versioning/op_version.cc
@@ -149,6 +149,13 @@
}
return 1;
+ case BuiltinOperator_TRANSPOSE_CONV:
+ // If the op takes int8 input, it is version 2.
+ if (op_sig.input_types.at(0) == TensorType_INT8) {
+ return 2;
+ }
+ return 1;
+
case BuiltinOperator_LSTM:
// If the input tensor is float and a weight is int8, this is a version
// 3 hybrid operation.
diff --git a/tensorflow/lite/tools/versioning/op_version_test.cc b/tensorflow/lite/tools/versioning/op_version_test.cc
index 025b4a2..adb1e89 100644
--- a/tensorflow/lite/tools/versioning/op_version_test.cc
+++ b/tensorflow/lite/tools/versioning/op_version_test.cc
@@ -351,4 +351,19 @@
EXPECT_EQ(GetBuiltinOperatorVersion(fake_op_sig), 2);
}
+TEST(OpVersionTest, VersioningTransposeConvOperatorTest) {
+ OpSignature fake_op_sig = {
+ .op = BuiltinOperator_TRANSPOSE_CONV,
+ .input_types =
+ std::vector<TensorType>{TensorType_FLOAT32, TensorType_UINT8},
+ };
+ EXPECT_EQ(GetBuiltinOperatorVersion(fake_op_sig), 1);
+
+ fake_op_sig = {
+ .op = BuiltinOperator_TRANSPOSE_CONV,
+ .input_types = std::vector<TensorType>{TensorType_INT8},
+ };
+ EXPECT_EQ(GetBuiltinOperatorVersion(fake_op_sig), 2);
+}
+
} // namespace tflite
diff --git a/tensorflow/lite/tutorials/dataset.py b/tensorflow/lite/tutorials/dataset.py
index ba49dfc..fdaf84c 100644
--- a/tensorflow/lite/tutorials/dataset.py
+++ b/tensorflow/lite/tutorials/dataset.py
@@ -15,7 +15,7 @@
"""tf.data.Dataset interface to the MNIST dataset.
This is cloned from
- https://github.com/tensorflow/models/blob/master/official/mnist/dataset.py
+ https://github.com/tensorflow/models/blob/master/official/r1/mnist/dataset.py
"""
from __future__ import absolute_import
diff --git a/tensorflow/python/BUILD b/tensorflow/python/BUILD
index a18a978..de0adce 100644
--- a/tensorflow/python/BUILD
+++ b/tensorflow/python/BUILD
@@ -273,6 +273,7 @@
":platform",
":platform_test",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -283,6 +284,7 @@
":client_testlib",
":platform",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -293,6 +295,7 @@
":client_testlib",
":platform",
],
+ python_version = "PY3",
tags = [
"no_windows",
"nomac",
@@ -305,6 +308,7 @@
size = "small",
srcs = ["platform/app_test.py"],
additional_deps = [":platform"],
+ python_version = "PY3",
tags = ["notap"],
)
@@ -400,6 +404,7 @@
deps = [
"//tensorflow/c:tf_status",
"//tensorflow/core:lib",
+ "//tensorflow/core:protos_all_cc",
"//third_party/python_runtime:headers",
],
)
@@ -431,7 +436,9 @@
],
features = ["-parse_headers"],
deps = [
- "//tensorflow/core:lib_internal",
+ ":py_exception_registry",
+ "//tensorflow/c:tf_status",
+ "//tensorflow/core:lib",
"//tensorflow/core:protos_all_cc",
"//third_party/python_runtime:headers",
"@pybind11",
@@ -841,6 +848,7 @@
],
data = [":framework/test_file_system.so"],
main = "framework/file_system_test.py",
+ python_version = "PY3",
tags = [
"no_pip", # Path issues due to test environment
"no_windows",
@@ -856,6 +864,7 @@
":platform",
":util",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -866,6 +875,7 @@
":platform",
":util",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -876,6 +886,7 @@
":platform",
":util",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -886,6 +897,7 @@
":platform",
":util",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -895,6 +907,7 @@
":client_testlib",
":util",
],
+ python_version = "PY3",
)
cc_library(
@@ -1202,6 +1215,7 @@
":op_def_library",
":test_ops",
],
+ python_version = "PY3",
tags = ["no_pip"],
)
@@ -1341,6 +1355,7 @@
"//tensorflow/python/eager:test",
"//tensorflow/python/keras",
],
+ python_version = "PY3",
)
py_library(
@@ -1430,6 +1445,7 @@
":client_testlib",
"//tensorflow/python/keras",
],
+ python_version = "PY3",
)
py_library(
@@ -1455,6 +1471,7 @@
":test_ops",
":util",
],
+ python_version = "PY3",
tags = ["no_pip"], # test_ops are not available in pip.
)
@@ -1499,6 +1516,7 @@
":session",
":smart_cond",
],
+ python_version = "PY3",
)
py_library(
@@ -1545,7 +1563,7 @@
name = "framework_composite_tensor_test",
srcs = ["framework/composite_tensor_test.py"],
main = "framework/composite_tensor_test.py",
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":composite_tensor",
@@ -1572,6 +1590,7 @@
"//tensorflow/python/ops/ragged:ragged_tensor_value",
],
main = "framework/composite_tensor_utils_test.py",
+ python_version = "PY3",
)
# This target is maintained separately from :util to provide separate visibility
@@ -1776,6 +1795,7 @@
"//tensorflow/python:client_testlib",
],
main = "framework/registry_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -1788,6 +1808,7 @@
"//tensorflow/core:protos_all_py",
],
main = "framework/errors_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -1801,6 +1822,7 @@
":traceable_stack",
],
main = "framework/error_interpolation_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -1817,6 +1839,7 @@
":subscribe",
],
main = "framework/subscribe_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -1831,6 +1854,7 @@
":platform",
],
main = "platform/build_info_test.py",
+ python_version = "PY3",
tags = [
"no_pip",
"notap",
@@ -1849,6 +1873,7 @@
":platform",
],
main = "platform/benchmark_test.py",
+ python_version = "PY3",
tags = [
"no_pip",
],
@@ -1864,6 +1889,7 @@
"//third_party/py/numpy",
],
main = "framework/proto_test.py",
+ python_version = "PY3",
)
tf_gen_op_wrapper_private_py(
@@ -1919,6 +1945,7 @@
"//third_party/py/numpy",
"//tensorflow/core:protos_all_py",
],
+ python_version = "PY3",
shard_count = 10,
tags = [
"noasan",
@@ -1935,6 +1962,7 @@
":framework_for_generated_wrappers",
],
main = "framework/versions_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -1957,6 +1985,7 @@
"//tensorflow/core:protos_all_py",
],
main = "framework/importer_test.py",
+ python_version = "PY3",
)
filegroup(
@@ -1988,6 +2017,7 @@
],
data = [":meta_graph_testdata"],
main = "framework/meta_graph_test.py",
+ python_version = "PY3",
tags = [
"no_pip",
"no_windows",
@@ -2006,6 +2036,7 @@
":util",
],
main = "framework/traceable_stack_test.py",
+ python_version = "PY3",
)
tf_gen_op_wrapper_py(
@@ -2059,6 +2090,7 @@
"//tensorflow/core:protos_all_py",
],
main = "framework/common_shapes_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -2087,6 +2119,7 @@
"//tensorflow/python/eager:function",
],
main = "framework/ops_test.py",
+ python_version = "PY3",
tags = ["no_pip"], # test_ops_2 is not available in pip.
)
@@ -2100,6 +2133,7 @@
"//tensorflow/python/eager:context",
],
main = "framework/ops_enable_eager_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -2114,6 +2148,7 @@
"@absl_py//absl/testing:parameterized",
],
main = "framework/tensor_shape_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -2128,6 +2163,7 @@
"//third_party/py/numpy",
],
main = "framework/type_spec_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -2142,6 +2178,7 @@
"//third_party/py/numpy",
],
main = "framework/tensor_spec_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -2156,6 +2193,7 @@
"//tensorflow/core:protos_all_py",
],
main = "framework/sparse_tensor_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -2169,6 +2207,7 @@
"//tensorflow/core:protos_all_py",
],
main = "framework/device_spec_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -2182,6 +2221,7 @@
"//tensorflow/core:protos_all_py",
],
main = "framework/device_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -2193,6 +2233,7 @@
":framework",
],
main = "framework/random_seed_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -2207,6 +2248,7 @@
"//tensorflow/core:protos_all_py",
],
main = "framework/tensor_shape_div_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -2224,6 +2266,7 @@
"//third_party/py/numpy",
],
main = "framework/tensor_util_test.py",
+ python_version = "PY3",
tags = ["no_windows"],
)
@@ -2248,6 +2291,7 @@
"//tensorflow/python/eager:context",
],
main = "framework/test_util_test.py",
+ python_version = "PY3",
tags = ["no_windows"],
)
@@ -2264,6 +2308,7 @@
"//tensorflow/core:protos_all_py",
],
main = "framework/dtypes_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -2275,6 +2320,7 @@
":framework_test_lib",
":platform_test",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -2288,6 +2334,7 @@
":test_ops",
],
main = "framework/kernels_test.py",
+ python_version = "PY3",
)
tf_gen_op_wrapper_private_py(
@@ -2480,7 +2527,7 @@
name = "batch_ops_test",
size = "small",
srcs = ["ops/batch_ops_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = [
"manual",
@@ -2807,6 +2854,7 @@
":framework_for_generated_wrappers",
"//third_party/py/numpy",
],
+ python_version = "PY3",
)
py_library(
@@ -2831,6 +2879,7 @@
":framework_for_generated_wrappers",
"//third_party/py/numpy",
],
+ python_version = "PY3",
)
py_library(
@@ -2854,6 +2903,7 @@
":framework_for_generated_wrappers",
"//third_party/py/numpy",
],
+ python_version = "PY3",
)
cuda_py_test(
@@ -2866,6 +2916,7 @@
":framework_for_generated_wrappers",
"//third_party/py/numpy",
],
+ python_version = "PY3",
tags = [
"no_cuda_on_cpu_tap",
"no_rocm",
@@ -2973,6 +3024,7 @@
":client_testlib",
":platform_test",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -2985,6 +3037,7 @@
":client_testlib",
":platform_test",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -2997,6 +3050,7 @@
":client_testlib",
":platform_test",
],
+ python_version = "PY3",
# This tests that it is possible to disable cfv2 using env vars.
# This does not apply to TF 2.0 nightly builds which enable
# v2 behavior using `tf.compat.v1.enable_v2_behavior()` in which case
@@ -3484,7 +3538,7 @@
name = "loss_scale_optimizer_test",
size = "small",
srcs = ["training/experimental/loss_scale_optimizer_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
deps = [
":client_testlib",
":loss_scale_optimizer",
@@ -3499,7 +3553,7 @@
name = "loss_scale_test",
size = "medium",
srcs = ["training/experimental/loss_scale_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
deps = [
":client_testlib",
":loss_scale",
@@ -3537,6 +3591,7 @@
"@absl_py//absl/testing:parameterized",
"//tensorflow/python:client_testlib",
],
+ python_version = "PY3",
tags = [
"no_rocm",
],
@@ -3843,6 +3898,7 @@
"//tensorflow/python/kernel_tests/random:util",
"//tensorflow/python/distribute:mirrored_strategy",
],
+ python_version = "PY3",
tags = ["no_rocm"],
xla_enable_strict_auto_jit = False,
)
@@ -3982,6 +4038,7 @@
":gradient_checker_v2",
"@absl_py//absl/testing:parameterized",
],
+ python_version = "PY3",
)
py_library(
@@ -4008,6 +4065,7 @@
":sort_ops",
"//third_party/py/numpy",
],
+ python_version = "PY3",
)
py_library(
@@ -4370,6 +4428,7 @@
":dtypes",
":framework_test_lib",
],
+ python_version = "PY3",
tags = ["no_windows"],
)
@@ -4399,6 +4458,7 @@
":while_v2",
"//tensorflow/python/eager:def_function",
],
+ python_version = "PY3",
shard_count = 2,
)
@@ -4416,12 +4476,13 @@
":platform",
"//third_party/py/numpy",
],
+ python_version = "PY3",
)
py_test(
name = "op_selector_test",
srcs = ["ops/op_selector_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":client_testlib",
@@ -4446,6 +4507,7 @@
":platform",
"//third_party/py/numpy",
],
+ python_version = "PY3",
)
cuda_py_test(
@@ -4481,6 +4543,7 @@
"//third_party/py/numpy",
"//tensorflow/python/keras:engine",
],
+ python_version = "PY3",
tags = ["no_oss"], # b/118709825
)
@@ -4497,6 +4560,7 @@
":variables",
"//third_party/py/numpy",
],
+ python_version = "PY3",
)
cuda_py_test(
@@ -4510,6 +4574,7 @@
":image_ops",
"//third_party/py/numpy",
],
+ python_version = "PY3",
)
cuda_py_test(
@@ -4534,6 +4599,7 @@
"//tensorflow/core:protos_all_py",
],
data = ["//tensorflow/core:image_testdata"],
+ python_version = "PY3",
shard_count = 5,
)
@@ -4549,6 +4615,7 @@
"//third_party/py/numpy",
"//tensorflow/python/eager:context",
],
+ python_version = "PY3",
)
cuda_py_test(
@@ -4564,6 +4631,7 @@
"//third_party/py/numpy",
"//tensorflow/python/eager:context",
],
+ python_version = "PY3",
)
cuda_py_test(
@@ -4580,6 +4648,7 @@
"//tensorflow/python/eager:context",
"//third_party/py/numpy",
],
+ python_version = "PY3",
tags = ["no_windows_gpu"],
)
@@ -4598,6 +4667,7 @@
":variables",
"//third_party/py/numpy",
],
+ python_version = "PY3",
tags = ["no_windows_gpu"],
)
@@ -4616,6 +4686,7 @@
":nn_ops_gen",
"//third_party/py/numpy",
],
+ python_version = "PY3",
shard_count = 4,
tags = ["no_windows"],
)
@@ -4633,6 +4704,7 @@
":nn_grad",
"//third_party/py/numpy",
],
+ python_version = "PY3",
shard_count = 16,
tags = ["no_rocm"],
)
@@ -4654,14 +4726,17 @@
"@absl_py//absl/testing:parameterized",
"//third_party/py/numpy",
],
+ python_version = "PY3",
tags = ["no_windows"],
+ # TODO(b/130689556): Numerical differences due to fast math on CPU.
+ xla_enable_strict_auto_jit = False,
)
py_test(
name = "nn_loss_scaling_utilities_test",
size = "small",
srcs = ["ops/nn_loss_scaling_utilities_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
deps = [
":client_testlib",
"//tensorflow/python/distribute:combinations",
@@ -4682,6 +4757,7 @@
":nn_grad",
"//third_party/py/numpy",
],
+ python_version = "PY3",
)
cuda_py_test(
@@ -4697,6 +4773,7 @@
":special_math_ops",
"//third_party/py/numpy",
],
+ python_version = "PY3",
shard_count = 10,
tags = ["no_windows_gpu"],
)
@@ -4711,6 +4788,7 @@
":platform_test",
"//third_party/py/numpy",
],
+ python_version = "PY3",
)
py_library(
@@ -4929,6 +5007,7 @@
"//tensorflow/core:protos_all_py",
"//tensorflow/python/ops/losses",
],
+ python_version = "PY3",
shard_count = 3,
tags = [
"manual",
@@ -5003,12 +5082,14 @@
":client_testlib",
":util",
],
+ python_version = "PY3",
)
tf_py_test(
name = "object_identity_test",
size = "small",
srcs = ["util/object_identity_test.py"],
+ python_version = "PY3",
)
# Placeholder for intenal nest_test comments.
@@ -5018,6 +5099,7 @@
srcs = ["util/nest_test.py"],
additional_deps = [":util_nest_test_main_lib"],
main = "util/nest_test.py",
+ python_version = "PY3",
)
py_library(
@@ -5045,6 +5127,7 @@
":util",
],
main = "util/serialization_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -5054,6 +5137,7 @@
":client_testlib",
":util",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -5064,6 +5148,7 @@
":client_testlib",
":util",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -5074,6 +5159,7 @@
":client_testlib",
":util",
],
+ python_version = "PY3",
)
py_library(
@@ -5096,6 +5182,7 @@
":client_testlib",
":tf_should_use",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -5106,6 +5193,7 @@
":client_testlib",
":util",
],
+ python_version = "PY3",
)
py_library(
@@ -5130,6 +5218,7 @@
"@absl_py//absl/testing:parameterized",
],
main = "util/lock_util_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -5142,6 +5231,7 @@
"@six_archive//:six",
"//tensorflow/tools/compatibility:all_renames_v2",
],
+ python_version = "PY3",
)
tf_proto_library(
@@ -5181,6 +5271,7 @@
"@six_archive//:six",
],
main = "util/protobuf/compare_test.py",
+ python_version = "PY3",
tags = ["no_pip"], # compare_test_pb2 proto is not available in pip.
)
@@ -5197,6 +5288,7 @@
":util_example_parser_configuration",
],
main = "util/example_parser_configuration_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -5210,6 +5302,7 @@
":platform_test",
":util",
],
+ python_version = "PY3",
)
py_library(
@@ -5250,6 +5343,7 @@
":platform_test",
"//tensorflow/core:protos_all_py",
],
+ python_version = "PY3",
)
cc_library(
@@ -5551,6 +5645,7 @@
"//tensorflow/core:protos_all_py",
],
grpc_enabled = True,
+ python_version = "PY3",
)
tf_py_test(
@@ -5571,6 +5666,7 @@
"//tensorflow/core:protos_all_py",
],
grpc_enabled = True,
+ python_version = "PY3",
)
tf_py_test(
@@ -5591,6 +5687,7 @@
"//tensorflow/core:protos_all_py",
],
grpc_enabled = True,
+ python_version = "PY3",
)
tf_py_test(
@@ -5611,6 +5708,7 @@
"//tensorflow/core:protos_all_py",
],
grpc_enabled = True,
+ python_version = "PY3",
)
tf_py_test(
@@ -5631,6 +5729,7 @@
"//tensorflow/core:protos_all_py",
],
grpc_enabled = True,
+ python_version = "PY3",
)
tf_py_test(
@@ -5651,6 +5750,7 @@
"//tensorflow/core:protos_all_py",
],
grpc_enabled = True,
+ python_version = "PY3",
)
cuda_py_test(
@@ -5671,6 +5771,7 @@
"//third_party/py/numpy",
],
grpc_enabled = True,
+ python_version = "PY3",
tags = [
"no_oss", # Test flaky due to port collisions.
"oss_serial",
@@ -5690,6 +5791,7 @@
":variables",
],
grpc_enabled = True,
+ python_version = "PY3",
tags = [
"no_oss", # Test flaky due to port collisions.
"notsan", # data race due to b/62910646
@@ -5744,6 +5846,7 @@
"@six_archive//:six",
],
grpc_enabled = True,
+ python_version = "PY3",
tags = [
"no_gpu", # b/127001953
"no_pip_gpu", # testInteractivePlacePrunedGraph fails on invalid assumption about GPU ops.
@@ -5771,6 +5874,7 @@
"//third_party/py/numpy",
],
grpc_enabled = True,
+ python_version = "PY3",
tags = [
"no_gpu",
"no_oss",
@@ -5792,6 +5896,7 @@
":training",
],
grpc_enabled = True,
+ python_version = "PY3",
tags = [
"no_gpu",
"no_pip_gpu",
@@ -5817,6 +5922,7 @@
"@six_archive//:six",
],
grpc_enabled = True,
+ python_version = "PY3",
tags = [
"no_gpu",
"no_windows",
@@ -5835,6 +5941,7 @@
"//tensorflow/core:protos_all_py",
],
args = tf_additional_cupti_test_flags(),
+ python_version = "PY3",
xla_enable_strict_auto_jit = False, # Graph structure is different with autojit
)
@@ -5849,6 +5956,7 @@
":math_ops",
"//tensorflow/core:protos_all_py",
],
+ python_version = "PY3",
tags = [
"no_gpu", # b/127386241
"no_windows_gpu",
@@ -5864,6 +5972,7 @@
":framework_test_lib",
":platform_test",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -5881,6 +5990,7 @@
":variables",
"//tensorflow/core:protos_all_py",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -5892,6 +6002,7 @@
"client_testlib",
"framework_test_lib",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -5903,6 +6014,7 @@
":lib",
":pywrap_tensorflow",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -5914,6 +6026,7 @@
":errors",
":lib",
],
+ python_version = "PY3",
tags = ["no_windows"],
)
@@ -5927,6 +6040,7 @@
":lib",
":util",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -5939,6 +6053,7 @@
":lib",
":util",
],
+ python_version = "PY3",
tags = [
# multiprocessing can be flaky in the internal google
# environment, so we disable it there.
@@ -5963,6 +6078,7 @@
":client_testlib",
"//third_party/py/numpy",
],
+ python_version = "PY3",
)
cuda_py_test(
@@ -5982,6 +6098,7 @@
":variable_scope",
":variables",
],
+ python_version = "PY3",
tags = [
"no_windows", # b/139083295: bfloat16 tests fail on Windows
"notsan",
@@ -6048,6 +6165,7 @@
"@six_archive//:six",
"//tensorflow/core:protos_all_py",
],
+ python_version = "PY3",
)
py_library(
@@ -6094,6 +6212,7 @@
"//tensorflow/core:protos_all_py",
"//tensorflow/python/data/ops:dataset_ops",
],
+ python_version = "PY3",
tags = ["multi_gpu"],
)
@@ -6131,6 +6250,7 @@
"//tensorflow/core:protos_all_py",
"//tensorflow/python/data/ops:dataset_ops",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -6146,6 +6266,7 @@
":variables",
"//tensorflow/core:protos_all_py",
],
+ python_version = "PY3",
tags = [
"manual",
"noasan", # http://b/30379628
@@ -6165,6 +6286,7 @@
":training",
":variables",
],
+ python_version = "PY3",
tags = [
"noasan", # http://b/30782289
"notsan", # http://b/30782289
@@ -6188,6 +6310,7 @@
],
grpc_enabled = True,
main = "training/session_manager_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -6211,6 +6334,7 @@
"//tensorflow/core:protos_all_py",
],
grpc_enabled = True,
+ python_version = "PY3",
tags = ["no_windows"],
)
@@ -6234,6 +6358,7 @@
":variables",
"//tensorflow/core:protos_all_py",
],
+ python_version = "PY3",
tags = [
"no_pip", # Relies on contrib
"no_windows",
@@ -6258,6 +6383,7 @@
":variable_scope",
":variables",
],
+ python_version = "PY3",
tags = [
"manual",
"no_cuda_on_cpu_tap",
@@ -6285,6 +6411,7 @@
":variable_scope",
":variables",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -6302,6 +6429,7 @@
":variables",
"//third_party/py/numpy",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -6363,6 +6491,7 @@
":training_util",
":variables",
],
+ python_version = "PY3",
)
tf_py_test(
@@ -6382,6 +6511,7 @@
":training",
"//third_party/py/numpy",
],
+ python_version = "PY3",
)
py_library(
@@ -6461,6 +6591,7 @@
":summary_ops_v2",
"//tensorflow/core:protos_all_py",
],
+ python_version = "PY3",
)
py_library(
@@ -6556,6 +6687,7 @@
"//tensorflow/python/eager:context",
],
main = "layers/base_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -6576,6 +6708,7 @@
"//third_party/py/numpy",
],
main = "layers/core_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -6592,6 +6725,7 @@
":random_ops",
],
main = "layers/convolutional_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -6603,6 +6737,7 @@
":layers",
],
main = "layers/utils_test.py",
+ python_version = "PY3",
)
tf_py_test(
@@ -6616,6 +6751,7 @@
":random_ops",
],
main = "layers/pooling_test.py",
+ python_version = "PY3",
)
cuda_py_test(
@@ -6634,6 +6770,7 @@
"//third_party/py/numpy",
],
main = "layers/normalization_test.py",
+ python_version = "PY3",
shard_count = 10,
)
@@ -6650,6 +6787,7 @@
":framework_for_generated_wrappers",
"//third_party/py/numpy",
],
+ python_version = "PY3",
tags = ["no_windows"],
)
@@ -6663,6 +6801,7 @@
":framework_for_generated_wrappers",
"//third_party/py/numpy",
],
+ python_version = "PY3",
tags = ["no_windows"],
)
@@ -6676,6 +6815,7 @@
":nn_ops",
"//third_party/py/numpy",
],
+ python_version = "PY3",
tags = ["no_windows"],
)
@@ -6714,6 +6854,7 @@
":state_ops_gen",
],
main = "ops/accumulate_n_benchmark.py",
+ python_version = "PY3",
shard_count = 6,
)
@@ -6735,6 +6876,7 @@
":variables",
],
main = "ops/batch_norm_benchmark.py",
+ python_version = "PY3",
)
cuda_py_test(
@@ -6751,6 +6893,7 @@
"//tensorflow/core:protos_all_py",
],
main = "ops/collective_ops_benchmark.py",
+ python_version = "PY3",
)
cuda_py_test(
@@ -6768,6 +6911,7 @@
"//tensorflow/core:protos_all_py",
],
main = "ops/concat_benchmark.py",
+ python_version = "PY3",
)
cuda_py_test(
@@ -6781,6 +6925,7 @@
"//tensorflow/python/eager:function",
],
main = "ops/control_flow_ops_benchmark.py",
+ python_version = "PY3",
)
cuda_py_test(
@@ -6801,6 +6946,7 @@
"//tensorflow/core:protos_all_py",
],
main = "ops/conv2d_benchmark.py",
+ python_version = "PY3",
)
cuda_py_test(
@@ -6819,6 +6965,7 @@
"//tensorflow/core:protos_all_py",
],
main = "ops/split_benchmark.py",
+ python_version = "PY3",
)
cuda_py_test(
@@ -6838,6 +6985,7 @@
"//tensorflow/core:protos_all_py",
],
main = "ops/transpose_benchmark.py",
+ python_version = "PY3",
)
cuda_py_test(
@@ -6846,6 +6994,7 @@
srcs = ["ops/matmul_benchmark.py"],
additional_deps = [":matmul_benchmark_main_lib"],
main = "ops/matmul_benchmark.py",
+ python_version = "PY3",
)
py_library(
@@ -6887,6 +7036,7 @@
"//tensorflow/core:protos_all_py",
],
main = "ops/matmul_benchmark_test.py",
+ python_version = "PY3",
tags = ["no_pip"],
)
@@ -6905,6 +7055,7 @@
],
grpc_enabled = True,
main = "client/session_benchmark.py",
+ python_version = "PY3",
)
cuda_py_test(
@@ -6918,6 +7069,7 @@
":nn_ops",
"//third_party/py/numpy",
],
+ python_version = "PY3",
)
py_library(
@@ -6946,6 +7098,7 @@
":tf_item",
"//tensorflow/core:protos_all_py",
],
+ python_version = "PY3",
tags = [
"grappler",
"no_pip", # tf_optimizer is not available in pip.
@@ -6967,6 +7120,7 @@
"//tensorflow/core:protos_all_py",
"//tensorflow/python/data",
],
+ python_version = "PY3",
tags = [
"grappler",
"no_pip", # tf_optimizer is not available in pip.
@@ -6999,6 +7153,7 @@
":tf_item",
"//tensorflow/core:protos_all_py",
],
+ python_version = "PY3",
shard_count = 10,
tags = [
"grappler",
@@ -7037,6 +7192,7 @@
"//third_party/py/numpy",
"//tensorflow/core:protos_all_py",
],
+ python_version = "PY3",
tags = [
"grappler",
"no_pip", # tf_optimizer is not available in pip.
@@ -7065,6 +7221,7 @@
":graph_placer",
"//tensorflow/python:math_ops",
],
+ python_version = "PY3",
tags = [
"grappler",
"no_pip", # graph_placer is not available in pip.
@@ -7091,6 +7248,7 @@
"//third_party/py/numpy",
"//tensorflow/core:protos_all_py",
],
+ python_version = "PY3",
tags = [
"grappler",
],
@@ -7114,6 +7272,7 @@
"//third_party/py/numpy",
"//tensorflow/core:protos_all_py",
],
+ python_version = "PY3",
tags = [
"grappler",
],
@@ -7171,7 +7330,7 @@
srcs = [
"grappler/cost_analyzer_tool.py",
],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":cost_analyzer",
@@ -7200,6 +7359,7 @@
"//third_party/py/numpy",
"//tensorflow/core:protos_all_py",
],
+ python_version = "PY3",
tags = [
"grappler",
"no_cuda_on_cpu_tap",
@@ -7258,6 +7418,7 @@
"//third_party/py/numpy",
"//tensorflow/core:protos_all_py",
],
+ python_version = "PY3",
tags = [
"grappler",
"no_rocm",
@@ -7301,6 +7462,7 @@
":framework_test_lib",
":platform_test",
],
+ python_version = "PY3",
# Disabled on jenkins until errors finding nvmlShutdown are found.
tags = [
"manual",
@@ -7350,7 +7512,7 @@
srcs = [
"grappler/graph_analyzer.py",
],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":_pywrap_graph_analyzer",
@@ -7389,4 +7551,5 @@
additional_deps = [
"//tensorflow/python:client_testlib",
],
+ python_version = "PY3",
)
diff --git a/tensorflow/python/autograph/converters/BUILD b/tensorflow/python/autograph/converters/BUILD
index d438dc6..7fe43cf 100644
--- a/tensorflow/python/autograph/converters/BUILD
+++ b/tensorflow/python/autograph/converters/BUILD
@@ -50,6 +50,7 @@
py_test(
name = "arg_defaults_test",
srcs = ["arg_defaults_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":converters",
@@ -61,6 +62,7 @@
py_test(
name = "asserts_test",
srcs = ["asserts_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":converters",
@@ -72,6 +74,7 @@
py_test(
name = "break_statements_test",
srcs = ["break_statements_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":converters",
@@ -101,6 +104,7 @@
py_test(
name = "conditional_expressions_test",
srcs = ["conditional_expressions_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":converters",
@@ -112,6 +116,7 @@
py_test(
name = "continue_statements_test",
srcs = ["continue_statements_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":converters",
@@ -123,6 +128,7 @@
py_test(
name = "control_flow_test",
srcs = ["control_flow_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":converters",
@@ -134,6 +140,7 @@
py_test(
name = "directives_test",
srcs = ["directives_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":converters",
@@ -146,6 +153,7 @@
py_test(
name = "function_scopes_test",
srcs = ["function_scopes_test.py"],
+ python_version = "PY3",
deps = [
":converters",
"//tensorflow/python:client_testlib",
@@ -157,6 +165,7 @@
py_test(
name = "list_comprehensions_test",
srcs = ["list_comprehensions_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":converters",
@@ -168,6 +177,7 @@
py_test(
name = "lists_test",
srcs = ["lists_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":converters",
@@ -179,6 +189,7 @@
py_test(
name = "logical_expressions_test",
srcs = ["logical_expressions_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":converters",
@@ -190,6 +201,7 @@
py_test(
name = "return_statements_test",
srcs = ["return_statements_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":converters",
@@ -202,6 +214,7 @@
py_test(
name = "slices_test",
srcs = ["slices_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":converters",
diff --git a/tensorflow/python/autograph/core/BUILD b/tensorflow/python/autograph/core/BUILD
index 8d7fc1d..1b44121 100644
--- a/tensorflow/python/autograph/core/BUILD
+++ b/tensorflow/python/autograph/core/BUILD
@@ -59,6 +59,7 @@
py_test(
name = "converter_test",
srcs = ["converter_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":core",
@@ -70,6 +71,7 @@
py_test(
name = "function_wrappers_test",
srcs = ["function_wrappers_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":core",
@@ -80,6 +82,7 @@
py_test(
name = "naming_test",
srcs = ["naming_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":core",
diff --git a/tensorflow/python/autograph/core/unsupported_features_checker.py b/tensorflow/python/autograph/core/unsupported_features_checker.py
index ee5b71a..b9694d6 100644
--- a/tensorflow/python/autograph/core/unsupported_features_checker.py
+++ b/tensorflow/python/autograph/core/unsupported_features_checker.py
@@ -20,6 +20,8 @@
import gast
+from tensorflow.python.autograph.pyct import errors
+
class UnsupportedFeaturesChecker(gast.NodeVisitor):
"""Quick check for Python features we know we don't support.
@@ -30,16 +32,18 @@
def visit_Attribute(self, node):
if (node.attr is not None
and node.attr.startswith('__') and not node.attr.endswith('__')):
- raise NotImplementedError(
- 'Mangled names are not yet supported by AutoGraph')
+ raise errors.UnsupportedLanguageElementError(
+ 'mangled names are not yet supported by AutoGraph')
# These checks could potentially be replaced with inspect.isgeneratorfunction
# to avoid a getsource/parse/ast-walk round trip.
def visit_Yield(self, node):
- raise NotImplementedError('Generators are not supported by AutoGraph')
+ raise errors.UnsupportedLanguageElementError(
+ 'generators are not supported by AutoGraph')
def visit_YieldFrom(self, node):
- raise NotImplementedError('Generators are not supported by AutoGraph')
+ raise errors.UnsupportedLanguageElementError(
+ 'generators are not supported by AutoGraph')
def verify(node):
diff --git a/tensorflow/python/autograph/impl/api.py b/tensorflow/python/autograph/impl/api.py
index 9a5c631..a8a64fb 100644
--- a/tensorflow/python/autograph/impl/api.py
+++ b/tensorflow/python/autograph/impl/api.py
@@ -37,6 +37,7 @@
from tensorflow.python.autograph.core import converter
from tensorflow.python.autograph.impl import conversion
from tensorflow.python.autograph.operators import py_builtins
+from tensorflow.python.autograph.pyct import error_utils
from tensorflow.python.autograph.pyct import errors
from tensorflow.python.autograph.pyct import inspect_utils
from tensorflow.python.autograph.pyct import origin_info
@@ -53,7 +54,7 @@
# TODO(mdan): Export this symbol.
-class AutoGraphError(Exception):
+class AutoGraphError(errors.PyCTError):
"""Base class for all AutoGraph exceptions."""
pass
@@ -68,7 +69,7 @@
pass
-class _ErrorMetadata(errors.ErrorMetadataBase):
+class _ErrorMetadata(error_utils.ErrorMetadataBase):
"""AutoGraph-specific error metadata. See base class."""
def create_exception(self, source_error):
@@ -100,8 +101,8 @@
op=source_error.op,
message=message)
- elif preferred_type in (AutoGraphError, ConversionError, StagingError,
- errors_impl.InaccessibleTensorError,
+ elif preferred_type in (errors.PyCTError, AutoGraphError, ConversionError,
+ StagingError, errors_impl.InaccessibleTensorError,
errors_impl.OperatorNotAllowedInGraphError):
return preferred_type(self.get_message())
@@ -379,12 +380,6 @@
return False
-def _errors_are_normally_possible(entity, error):
- if inspect_utils.islambda(entity) and isinstance(error, ValueError):
- return True
- return False
-
-
def converted_call(f,
args,
kwargs,
@@ -571,10 +566,14 @@
logging.log(1, 'Error transforming entity %s', target_entity, exc_info=True)
if is_autograph_strict_conversion_mode():
raise
- if _errors_are_normally_possible(target_entity, e):
- logging.warn(
- 'AutoGraph could not transform %s and will run it as-is.\n'
- 'Cause: %s', target_entity, e)
+
+ if isinstance(e, errors.UnsupportedLanguageElementError):
+ # Repeating the check made upon function entry because the state might
+ # have updated in the meantime.
+ if not conversion.check_cached_unconverted(f, options):
+ logging.warn(
+ 'AutoGraph could not transform %s and will run it as-is.\n'
+ 'Cause: %s', target_entity, e)
else:
logging.warn(
'AutoGraph could not transform %s and will run it as-is.\n'
@@ -582,6 +581,7 @@
' the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and'
' attach the full output.\n'
'Cause: %s', target_entity, e)
+
return _call_unconverted(f, args, kwargs, options)
with StackTraceMapper(converted_f), tf_stack.CurrentModuleFilter():
diff --git a/tensorflow/python/autograph/impl/api_test.py b/tensorflow/python/autograph/impl/api_test.py
index 0a81ec2..4b35b16 100644
--- a/tensorflow/python/autograph/impl/api_test.py
+++ b/tensorflow/python/autograph/impl/api_test.py
@@ -19,22 +19,27 @@
from __future__ import print_function
import collections
+import contextlib
import functools
import gc
import imp
import os
import re
+import sys
import textwrap
import types
import numpy as np
+import six
from tensorflow.python.autograph import utils
from tensorflow.python.autograph.core import ag_ctx
from tensorflow.python.autograph.core import converter
from tensorflow.python.autograph.impl import api
+from tensorflow.python.autograph.pyct import errors
from tensorflow.python.autograph.pyct import inspect_utils
from tensorflow.python.autograph.pyct import parser
+from tensorflow.python.autograph.utils import ag_logging
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function
@@ -64,6 +69,26 @@
class ApiTest(test.TestCase):
+ @contextlib.contextmanager
+ def assertPrints(self, expected, not_expected):
+ try:
+ out_capturer = six.StringIO()
+ sys.stdout = out_capturer
+ yield
+ self.assertIn(expected, out_capturer.getvalue())
+ self.assertNotIn(not_expected, out_capturer.getvalue())
+ finally:
+ sys.stdout = sys.__stdout__
+
+ def assertNoMemoryLeaks(self, f):
+ object_ids_before = {id(o) for o in gc.get_objects()}
+ f()
+ gc.collect()
+ objects_after = tuple(
+ o for o in gc.get_objects() if id(o) not in object_ids_before)
+ self.assertEmpty(
+ tuple(o for o in objects_after if isinstance(o, TestResource)))
+
@test_util.run_deprecated_v1
def test_decorator_recursive(self):
@@ -431,19 +456,24 @@
class TestClass(object):
- def __init__(self, x):
- self.__private = x
+ def __init__(self):
+ self.__private = constant_op.constant(-1)
def test_method(self):
- if self.__private < 0:
- return self.__private
return self.__private
- tc = TestClass(constant_op.constant(-1))
- # The error below is specific to the `if` statement not being converted.
- with self.assertRaisesRegex(NotImplementedError, 'Mangled names'):
+ tc = TestClass()
+ with self.assertRaisesRegex(
+ errors.UnsupportedLanguageElementError, 'mangled names'):
api.converted_call(tc.test_method, (), None, options=DEFAULT_RECURSIVE)
- tc.test_method()
+
+ # TODO(mdan): Refactor to avoid this use of global state.
+ ag_logging.set_verbosity(0, True)
+ os.environ['AUTOGRAPH_STRICT_CONVERSION'] = '0'
+ with self.assertPrints('could not transform', 'bug'):
+ api.converted_call(tc.test_method, (), None, options=DEFAULT_RECURSIVE)
+ ag_logging.set_verbosity(0, False)
+ os.environ['AUTOGRAPH_STRICT_CONVERSION'] = '1'
def test_converted_call_already_converted(self):
@@ -685,15 +715,6 @@
self.assertAllEqual(self.evaluate(graph_fn()), (3, 2, 1))
- def assertNoMemoryLeaks(self, f):
- object_ids_before = {id(o) for o in gc.get_objects()}
- f()
- gc.collect()
- objects_after = tuple(
- o for o in gc.get_objects() if id(o) not in object_ids_before)
- self.assertEmpty(
- tuple(o for o in objects_after if isinstance(o, TestResource)))
-
def test_converted_call_no_leaks_via_closure(self):
def test_fn():
diff --git a/tensorflow/python/autograph/lang/BUILD b/tensorflow/python/autograph/lang/BUILD
index 5ec729c..dca39bb 100644
--- a/tensorflow/python/autograph/lang/BUILD
+++ b/tensorflow/python/autograph/lang/BUILD
@@ -34,6 +34,7 @@
py_test(
name = "special_functions_test",
srcs = ["special_functions_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":lang",
diff --git a/tensorflow/python/autograph/operators/BUILD b/tensorflow/python/autograph/operators/BUILD
index 25fefbd..fd92a32 100644
--- a/tensorflow/python/autograph/operators/BUILD
+++ b/tensorflow/python/autograph/operators/BUILD
@@ -54,6 +54,7 @@
py_test(
name = "data_structures_test",
srcs = ["data_structures_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":operators",
@@ -64,6 +65,7 @@
py_test(
name = "control_flow_test",
srcs = ["control_flow_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = [
"no_gpu", # b/127001953
@@ -77,6 +79,7 @@
py_test(
name = "exceptions_test",
srcs = ["exceptions_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":operators",
@@ -87,6 +90,7 @@
py_test(
name = "logical_test",
srcs = ["logical_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":operators",
@@ -97,6 +101,7 @@
py_test(
name = "py_builtins_test",
srcs = ["py_builtins_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = ["no_windows"],
deps = [
@@ -126,6 +131,7 @@
py_test(
name = "slices_test",
srcs = ["slices_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":operators",
@@ -136,6 +142,7 @@
py_test(
name = "special_values_test",
srcs = ["special_values_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":operators",
@@ -146,6 +153,7 @@
py_test(
name = "symbols_test",
srcs = ["symbols_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":operators",
diff --git a/tensorflow/python/autograph/operators/control_flow.py b/tensorflow/python/autograph/operators/control_flow.py
index 70d265a..bbfee42 100644
--- a/tensorflow/python/autograph/operators/control_flow.py
+++ b/tensorflow/python/autograph/operators/control_flow.py
@@ -76,18 +76,22 @@
from tensorflow.python.framework import func_graph
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
+from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import control_flow_util
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import tensor_array_ops
+from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.util import nest
+
LIMIT_PYTHON_ITERATIONS = True
PYTHON_MAX_ITERATIONS = 100000000 # Fails in about one minute for empty loops.
WARN_INEFFICIENT_UNROLL = True
INEFFICIENT_UNROLL_MIN_ITERATIONS = 3000
INEFFICIENT_UNROLL_MIN_OPS = 1
+
def _disallow_undefs_into_loop(*values):
"""Ensures that all values in the state are defined when entering a loop."""
undefined = tuple(filter(special_values.is_undefined, values))
@@ -328,6 +332,11 @@
init_vars, basic_symbol_names,
composite_symbol_names)
+ if isinstance(iter_, ragged_tensor.RaggedTensor):
+ return _tf_ragged_for_stmt(iter_, extra_test, body, get_state, set_state,
+ init_vars, basic_symbol_names,
+ composite_symbol_names)
+
# Note: This experimental interface is subject to change.
custom_handler = getattr(iter_, '_autograph_for_loop', None)
if custom_handler is not None:
@@ -413,6 +422,60 @@
return results
+def _tf_ragged_for_stmt(iter_, extra_test, body, get_state, set_state,
+ init_vars, basic_symbol_names,
+ composite_symbol_names):
+ """Overload of for_stmt that iterates over TF ragged tensors."""
+ _disallow_undefs_into_loop(*init_vars)
+
+ # TODO(mdan): Move this into len()? Requires eager support.
+ if iter_.shape and iter_.shape[0] is not None:
+ n = iter_.shape[0]
+ else:
+ n = iter_.row_lengths()[0]
+
+ def while_body(iterate_index, *loop_vars):
+ """Main loop body."""
+ iterate = iter_[iterate_index]
+ new_vars = body(iterate, *loop_vars)
+ _verify_tf_loop_vars(loop_vars, new_vars, basic_symbol_names,
+ composite_symbol_names)
+
+ loop_vars = (iterate_index + 1,)
+ if new_vars:
+ loop_vars += new_vars
+
+ return loop_vars
+
+ def while_cond(iterate_index, *loop_vars):
+ if extra_test is not None:
+ return control_flow_ops.cond(
+ iterate_index < n, lambda: extra_test(*loop_vars), lambda: False)
+ return iterate_index < n
+
+ opts = {'maximum_iterations': n}
+
+ results = _tf_while_stmt(
+ while_cond,
+ while_body,
+ get_state,
+ set_state,
+ (array_ops.zeros_like(n),) + init_vars,
+ None,
+ None,
+ opts=opts,
+ )
+
+ if isinstance(results, (tuple, list)):
+ assert len(results) >= 1 # Has at least the iterate.
+ if len(results) > 1:
+ results = results[1:]
+ else:
+ results = ()
+
+ return results
+
+
def _tf_range_for_stmt(iter_, extra_test, body, get_state, set_state, init_vars,
basic_symbol_names, composite_symbol_names):
"""Overload of for_stmt that iterates over a TF range (and elides it)."""
diff --git a/tensorflow/python/autograph/operators/control_flow_test.py b/tensorflow/python/autograph/operators/control_flow_test.py
index 7b6217c..2290d61 100644
--- a/tensorflow/python/autograph/operators/control_flow_test.py
+++ b/tensorflow/python/autograph/operators/control_flow_test.py
@@ -36,82 +36,74 @@
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
+from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.platform import test
+@test_util.run_all_in_graph_and_eager_modes
class ForLoopTest(test.TestCase):
def test_tensor(self):
- with ops.Graph().as_default():
- s = control_flow.for_stmt(
- constant_op.constant([1, 2, 3, 4]),
- extra_test=lambda s: True,
- body=lambda i, s: (s * 10 + i,),
- get_state=lambda: (),
- set_state=lambda _: None,
- init_vars=(0,))
- self.assertEqual(self.evaluate(s), (1234,))
+ s = control_flow.for_stmt(
+ constant_op.constant([1, 2, 3, 4]),
+ extra_test=lambda s: True,
+ body=lambda i, s: (s * 10 + i,),
+ get_state=lambda: (),
+ set_state=lambda _: None,
+ init_vars=(0,))
+ self.assertEqual(self.evaluate(s), (1234,))
def test_range_tensor(self):
- with ops.Graph().as_default():
- s = control_flow.for_stmt(
- math_ops.range(5),
- extra_test=lambda s: True,
- body=lambda i, s: (s * 10 + i,),
- get_state=lambda: (),
- set_state=lambda _: None,
- init_vars=(0,))
- self.assertEqual(self.evaluate(s), (1234,))
+ s = control_flow.for_stmt(
+ math_ops.range(5),
+ extra_test=lambda s: True,
+ body=lambda i, s: (s * 10 + i,),
+ get_state=lambda: (),
+ set_state=lambda _: None,
+ init_vars=(0,))
+ self.assertEqual(self.evaluate(s), (1234,))
def test_range_tensor_random_delta(self):
-
- with ops.Graph().as_default():
- random_one = random_ops.random_uniform((), 1, 2, dtype=dtypes.int32)
- s = control_flow.for_stmt(
- math_ops.range(0, 5, random_one),
- extra_test=lambda s: True,
- body=lambda i, s: (s * 10 + i,),
- get_state=lambda: (),
- set_state=lambda _: None,
- init_vars=(0,))
- self.assertEqual(self.evaluate(s), (1234,))
+ random_one = random_ops.random_uniform((), 1, 2, dtype=dtypes.int32)
+ s = control_flow.for_stmt(
+ math_ops.range(0, 5, random_one),
+ extra_test=lambda s: True,
+ body=lambda i, s: (s * 10 + i,),
+ get_state=lambda: (),
+ set_state=lambda _: None,
+ init_vars=(0,))
+ self.assertEqual(self.evaluate(s), (1234,))
def test_range_tensor_explicit_limit_delta(self):
- with ops.Graph().as_default():
- s = control_flow.for_stmt(
- math_ops.range(-17, -3, 5),
- extra_test=lambda s: True,
- body=lambda i, s: (s * 100 + i,),
- get_state=lambda: (),
- set_state=lambda _: None,
- init_vars=(0,))
- self.assertEqual(self.evaluate(s), (-171207,))
+ s = control_flow.for_stmt(
+ math_ops.range(-17, -3, 5),
+ extra_test=lambda s: True,
+ body=lambda i, s: (s * 100 + i,),
+ get_state=lambda: (),
+ set_state=lambda _: None,
+ init_vars=(0,))
+ self.assertEqual(self.evaluate(s), (-171207,))
def test_range_tensor_random_negative_delta(self):
- with ops.Graph().as_default():
- random_neg_five = random_ops.random_uniform((),
- -5,
- -4,
- dtype=dtypes.int32)
- s = control_flow.for_stmt(
- math_ops.range(17, 3, random_neg_five),
- extra_test=lambda s: True,
- body=lambda i, s: (s * 100 + i,),
- get_state=lambda: (),
- set_state=lambda _: None,
- init_vars=(0,))
- self.assertEqual(self.evaluate(s), (171207,))
+ random_neg_five = random_ops.random_uniform((), -5, -4, dtype=dtypes.int32)
+ s = control_flow.for_stmt(
+ math_ops.range(17, 3, random_neg_five),
+ extra_test=lambda s: True,
+ body=lambda i, s: (s * 100 + i,),
+ get_state=lambda: (),
+ set_state=lambda _: None,
+ init_vars=(0,))
+ self.assertEqual(self.evaluate(s), (171207,))
def test_range_tensor_negative_delta(self):
- with ops.Graph().as_default():
- s = control_flow.for_stmt(
- math_ops.range(17, 3, -5),
- extra_test=lambda s: True,
- body=lambda i, s: (s * 100 + i,),
- get_state=lambda: (),
- set_state=lambda _: None,
- init_vars=(0,))
- self.assertEqual(self.evaluate(s), (171207,))
+ s = control_flow.for_stmt(
+ math_ops.range(17, 3, -5),
+ extra_test=lambda s: True,
+ body=lambda i, s: (s * 100 + i,),
+ get_state=lambda: (),
+ set_state=lambda _: None,
+ init_vars=(0,))
+ self.assertEqual(self.evaluate(s), (171207,))
def test_tensor_with_extra_test_only_python_state(self):
class MutableObject(object):
@@ -151,15 +143,14 @@
self.assertEqual(s, (1234,))
def test_tf_dataset(self):
- with ops.Graph().as_default():
- s = control_flow.for_stmt(
- dataset_ops.Dataset.range(5),
- extra_test=None,
- body=lambda i, s: (s * 10 + i,),
- get_state=lambda: (),
- set_state=lambda _: None,
- init_vars=(constant_op.constant(0, dtype=dtypes.int64),))
- self.assertEqual(self.evaluate(s), (1234,))
+ s = control_flow.for_stmt(
+ dataset_ops.Dataset.range(5),
+ extra_test=None,
+ body=lambda i, s: (s * 10 + i,),
+ get_state=lambda: (),
+ set_state=lambda _: None,
+ init_vars=(constant_op.constant(0, dtype=dtypes.int64),))
+ self.assertEqual(self.evaluate(s), (1234,))
def test_dataset_with_extra_test(self):
s = control_flow.for_stmt(
@@ -209,7 +200,6 @@
init_vars=(constant_op.constant(0, dtype=dtypes.int64),))
self.assertEqual(self.evaluate(s), (3,))
- @test_util.run_v2_only
def test_tf_dataset_no_loop_vars(self):
v = variables.Variable(0, dtype=dtypes.int64)
self.evaluate(v.initializer)
@@ -217,7 +207,8 @@
def stateless_with_side_effects(i):
v.assign(v.read_value() * 10 + i)
- # function is important here, because ops test for its presence.
+ # tf.function required for the automatic control dependencies, and because
+ # ops test for its presence.
@def_function.function(autograph=False)
def test_fn():
control_flow.for_stmt(
@@ -228,7 +219,7 @@
set_state=lambda _: None,
init_vars=())
- test_fn()
+ self.evaluate(test_fn())
self.assertEqual(self.evaluate(v.read_value()), 1234)
def test_tf_iterator(self):
@@ -246,14 +237,14 @@
s, = test_fn()
self.assertAllEqual(s, 1234)
- @test_util.run_v2_only
def test_tf_iterator_no_loop_vars(self):
v = variables.Variable(0, dtype=dtypes.int64)
+ self.evaluate(v.initializer)
def stateless_with_side_effects(i):
v.assign(v.read_value() * 10 + i)
- # graph-mode iterators are only supported inside tf.function.
+ # tf.function required for the automatic control dependencies.
@def_function.function(autograph=False)
def test_fn():
control_flow.for_stmt(
@@ -264,13 +255,59 @@
set_state=lambda _: None,
init_vars=())
- test_fn()
+ self.evaluate(test_fn())
self.assertEqual(self.evaluate(v.read_value()), 1234)
+ def test_tf_ragged_tensor(self):
+ s = control_flow.for_stmt(
+ ragged_factory_ops.constant([[1], [2, 4], [3]]),
+ extra_test=lambda s: True,
+ body=lambda i, s: (s * 10 + i[0],),
+ get_state=lambda: (),
+ set_state=lambda _: None,
+ init_vars=(0,))
+ self.assertEqual(self.evaluate(s), (123,))
+ def test_tf_ragged_tensor_higher_dimensional(self):
+ ragged_3d = [
+ [[1], [1, 1], [1]],
+ [[2], [2]],
+ ]
+ s = control_flow.for_stmt(
+ ragged_factory_ops.constant(ragged_3d),
+ extra_test=lambda s: True,
+ body=lambda i, s: (s * 10 + i[0][0],),
+ get_state=lambda: (),
+ set_state=lambda _: None,
+ init_vars=(0,))
+ self.assertEqual(self.evaluate(s), (12,))
+
+ def test_tf_ragged_tensor_no_loop_vars(self):
+ v = variables.Variable(0, dtype=dtypes.int32)
+ self.evaluate(v.initializer)
+
+ def stateless_with_side_effects(i):
+ v.assign(v.read_value() * 10 + i[0])
+
+ # tf.function required for the automatic control dependencies.
+ @def_function.function(autograph=False)
+ def test_fn():
+ control_flow.for_stmt(
+ ragged_factory_ops.constant([[1], [2, 4], [3]]),
+ extra_test=None,
+ body=stateless_with_side_effects,
+ get_state=lambda: (),
+ set_state=lambda _: None,
+ init_vars=())
+
+ self.evaluate(test_fn())
+ # Note: 123 = ((0*10 + 1)*10+2)*10+3 (first element of each row).
+ self.assertEqual(self.evaluate(v.read_value()), 123)
+
+
+@test_util.run_all_in_graph_and_eager_modes
class WhileLoopTest(test.TestCase):
- @test_util.run_deprecated_v1
def test_tensor(self):
n = constant_op.constant(5)
results = control_flow.while_stmt(
@@ -282,7 +319,6 @@
self.assertEqual((5, 10), self.evaluate(results))
def test_tensor_with_tf_side_effects_in_cond(self):
-
n = constant_op.constant(5, dtype=dtypes.int64)
v = variables.Variable(0, dtype=dtypes.int64)
@@ -290,7 +326,7 @@
v.assign(v.read_value() + 1)
return v.read_value()
- # function is important here, because ops test for its presence.
+ # tf.function required for the automatic control dependencies.
@def_function.function(autograph=False)
def test_fn():
return control_flow.while_stmt(
@@ -332,7 +368,6 @@
self.assertEqual(self.evaluate(s), (5, 10))
self.assertEqual(self.evaluate(state.field), 10)
- @test_util.run_deprecated_v1
def test_python_with_tensor_state(self):
n = 5
results = control_flow.while_stmt(
@@ -386,47 +421,61 @@
out_capturer.getvalue()))
+@test_util.run_all_in_graph_and_eager_modes
class IfStmtTest(test.TestCase):
- def single_return_if_stmt(self, cond):
- return control_flow.if_stmt(
- cond=cond,
- body=lambda: 1,
- orelse=lambda: -1,
- get_state=lambda: (),
- set_state=lambda _: None)
-
- def multi_return_if_stmt(self, cond):
- return control_flow.if_stmt(
- cond=cond,
- body=lambda: (1, 2),
- orelse=lambda: (-1, -2),
- get_state=lambda: (),
- set_state=lambda _: None)
-
- @test_util.run_deprecated_v1
def test_tensor(self):
- with self.cached_session():
- t = self.single_return_if_stmt(constant_op.constant(True))
- self.assertEqual(1, self.evaluate(t))
- t = self.single_return_if_stmt(constant_op.constant(False))
- self.assertEqual(-1, self.evaluate(t))
+
+ def test_fn(cond):
+ return control_flow.if_stmt(
+ cond=cond,
+ body=lambda: constant_op.constant(1),
+ orelse=lambda: constant_op.constant(-1),
+ get_state=lambda: (),
+ set_state=lambda _: None)
+
+ self.assertEqual(1, self.evaluate(test_fn(constant_op.constant(True))))
+ self.assertEqual(-1, self.evaluate(test_fn(constant_op.constant(False))))
+
+ def test_tensor_multiple_returns(self):
+
+ def test_fn(cond):
+ return control_flow.if_stmt(
+ cond=cond,
+ body=lambda: (constant_op.constant(1), constant_op.constant(2)),
+ orelse=lambda: (constant_op.constant(-1), constant_op.constant(-2)),
+ get_state=lambda: (),
+ set_state=lambda _: None)
+
+ self.assertEqual((1, 2), self.evaluate(test_fn(constant_op.constant(True))))
+ self.assertEqual((-1, -2),
+ self.evaluate(test_fn(constant_op.constant(False))))
def test_python(self):
- self.assertEqual(1, self.single_return_if_stmt(True))
- self.assertEqual(-1, self.single_return_if_stmt(False))
- @test_util.run_deprecated_v1
- def test_tensor_multiple_returns(self):
- with self.cached_session():
- t = self.multi_return_if_stmt(constant_op.constant(True))
- self.assertAllEqual([1, 2], self.evaluate(t))
- t = self.multi_return_if_stmt(constant_op.constant(False))
- self.assertAllEqual([-1, -2], self.evaluate(t))
+ def test_fn(cond):
+ return control_flow.if_stmt(
+ cond=cond,
+ body=lambda: 1,
+ orelse=lambda: -1,
+ get_state=lambda: (),
+ set_state=lambda _: None)
+
+ self.assertEqual(1, test_fn(True))
+ self.assertEqual(-1, test_fn(False))
def test_python_multiple_returns(self):
- self.assertEqual((1, 2), self.multi_return_if_stmt(True))
- self.assertEqual((-1, -2), self.multi_return_if_stmt(False))
+
+ def test_fn(cond):
+ return control_flow.if_stmt(
+ cond=cond,
+ body=lambda: (1, 2),
+ orelse=lambda: (-1, -2),
+ get_state=lambda: (),
+ set_state=lambda _: None)
+
+ self.assertEqual((1, 2), test_fn(True))
+ self.assertEqual((-1, -2), test_fn(False))
if __name__ == '__main__':
diff --git a/tensorflow/python/autograph/pyct/BUILD b/tensorflow/python/autograph/pyct/BUILD
index 6c45073..f7ae813 100644
--- a/tensorflow/python/autograph/pyct/BUILD
+++ b/tensorflow/python/autograph/pyct/BUILD
@@ -26,6 +26,7 @@
"ast_util.py",
"cfg.py",
"compiler.py",
+ "error_utils.py",
"errors.py",
"inspect_utils.py",
"origin_info.py",
@@ -94,8 +95,8 @@
)
py_test(
- name = "errors_test",
- srcs = ["errors_test.py"],
+ name = "error_utils_test",
+ srcs = ["error_utils_test.py"],
python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
diff --git a/tensorflow/python/autograph/pyct/common_transformers/BUILD b/tensorflow/python/autograph/pyct/common_transformers/BUILD
index 37742e0..61856a5 100644
--- a/tensorflow/python/autograph/pyct/common_transformers/BUILD
+++ b/tensorflow/python/autograph/pyct/common_transformers/BUILD
@@ -32,6 +32,7 @@
py_test(
name = "anf_test",
srcs = ["anf_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = ["no_oss"],
deps = [
diff --git a/tensorflow/python/autograph/pyct/error_utils.py b/tensorflow/python/autograph/pyct/error_utils.py
new file mode 100644
index 0000000..34c3a39
--- /dev/null
+++ b/tensorflow/python/autograph/pyct/error_utils.py
@@ -0,0 +1,196 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Code transformation exceptions."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import collections
+
+from tensorflow.python.autograph.pyct import origin_info
+
+
+class FrameInfo(
+ collections.namedtuple(
+ 'FrameInfo',
+ ('filename', 'lineno', 'function_name', 'code', 'converted'))):
+ pass
+
+
+def _stack_trace_inside_mapped_code(tb, source_map):
+ """Summarizes inner traceback frames up to the call to a given function.
+
+ This functions locates the innermost (i.e. most recent) frame that corresponds
+ to code that can be mapped by source_map originated from, and returns a
+ translated stack trace ending at that frame. If no such frame is found, the
+ entire stack trace is summarized.
+
+ For example, the following code:
+
+ def f():
+ for i in tf.range(1):
+ z = y + i # z only defined here
+
+ Would generate this traceback:
+
+ <converted code>
+ ag__.for_stmt(...)
+ <for_stmt>
+ return _known_len_tf_for_stmt(iter_, extra_test, body, init_state)
+ <_known_len_tf_for_stmt>
+ _disallow_undefs_into_loop(*init_state)
+ <_disallow_undefs_into_loop>
+ raise ...
+
+ Which is then processed into:
+
+ <f>
+ for i in tf.range(1):
+ <for_stmt>
+ return _known_len_tf_for_stmt(iter_, extra_test, body, init_state)
+ <_known_len_tf_for_stmt>
+ _disallow_undefs_into_loop(*init_state)
+ <_disallow_undefs_into_loop>
+ raise ...
+
+ Args:
+ tb: List[Tuple], the traceback corresponding to an error; typically,
+ the output of traceback.extract_tb.
+ source_map: Dict[LineLocation, OriginInfo], a source map as created by
+ origin_info.create_source_map.
+
+ Returns:
+ List[FrameInfo]
+ """
+ result_frames = []
+ for filename, line_number, function_name, text in reversed(tb):
+
+ loc = origin_info.LineLocation(filename=filename, lineno=line_number)
+ if loc in source_map:
+ origin = source_map[loc]
+ origin_frame_info = FrameInfo(
+ filename=origin.loc.filename,
+ lineno=origin.loc.lineno,
+ function_name=origin.function_name,
+ code=origin.source_code_line,
+ converted=True)
+ result_frames.append(origin_frame_info)
+ break
+
+ fi = FrameInfo(
+ filename=filename,
+ lineno=line_number,
+ function_name=function_name,
+ code=text,
+ converted=False)
+ result_frames.append(fi)
+
+ return tuple(result_frames)
+
+
+KNOWN_STRING_CONSTRUCTOR_ERRORS = (
+ AssertionError,
+ AttributeError,
+ NameError,
+ NotImplementedError,
+ RuntimeError,
+ StopIteration,
+ TypeError,
+ ValueError,
+)
+
+
+# KeyError escapes newlines in strings. We create a special subclass
+# that doesn't do that. Overriding the name for display purposes; hopefully
+# that won't create too many surprises.
+class MultilineMessageKeyError(KeyError):
+
+ def __init__(self, message, original_key):
+ super(MultilineMessageKeyError, self).__init__(original_key)
+ self.__message = message
+
+ def __str__(self):
+ return self.__message
+
+MultilineMessageKeyError.__name__ = KeyError.__name__
+
+
+class ErrorMetadataBase(object):
+ """Container objects attached to exceptions in converted code.
+
+ This metadata allows re-raising exceptions that occur in generated code, with
+ a custom error message that includes a stack trace relative to user-readable
+ code from which the generated code originated.
+ """
+
+ def __init__(self, callsite_tb, cause_metadata, cause_message, source_map):
+ translated_stack = _stack_trace_inside_mapped_code(callsite_tb, source_map)
+
+ if cause_metadata is None:
+ self.translated_stack = translated_stack
+ self.cause_message = cause_message
+ else:
+ # Daisy chain the translated stacks.
+ self.translated_stack = (
+ cause_metadata.translated_stack + (translated_stack[-1],))
+ self.cause_message = cause_metadata.cause_message
+
+ def get_message(self):
+ """Returns the message for the underlying exception."""
+ lines = []
+
+ lines.append('in converted code:')
+ lines.append('')
+
+ for frame_info in reversed(self.translated_stack):
+ lines.append(' {}:{} {}{}'.format(
+ frame_info.filename,
+ frame_info.lineno,
+ frame_info.function_name,
+ ' *' if frame_info.converted else '',
+ ))
+ if frame_info.code is None:
+ code_snippet = '<source unavailable>'
+ else:
+ code_snippet = frame_info.code.strip()
+ lines.append(' {}'.format(code_snippet))
+
+ lines.append('')
+
+ message_lines = self.cause_message.split('\n')
+ for i in range(len(message_lines)):
+ message_lines[i] = ' ' + message_lines[i]
+ lines.extend(message_lines)
+
+ lines.append('')
+
+ return '\n'.join(lines)
+
+ def create_exception(self, source_error):
+ preferred_type = type(source_error)
+ if preferred_type.__init__ is Exception.__init__:
+ return preferred_type(self.get_message())
+ if preferred_type in KNOWN_STRING_CONSTRUCTOR_ERRORS:
+ return preferred_type(self.get_message())
+ elif preferred_type is KeyError:
+ return MultilineMessageKeyError(self.get_message(), self.cause_message)
+ return None
+
+ def to_exception(self, source_error):
+ exc = self.create_exception(source_error)
+ exc.__suppress_context__ = True
+ exc.ag_error_metadata = self
+ return exc
diff --git a/tensorflow/python/autograph/pyct/errors_test.py b/tensorflow/python/autograph/pyct/error_utils_test.py
similarity index 90%
rename from tensorflow/python/autograph/pyct/errors_test.py
rename to tensorflow/python/autograph/pyct/error_utils_test.py
index 9640af1..9fdbc55 100644
--- a/tensorflow/python/autograph/pyct/errors_test.py
+++ b/tensorflow/python/autograph/pyct/error_utils_test.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
-"""Tests for errors module."""
+"""Tests for error_utils module."""
from __future__ import absolute_import
from __future__ import division
@@ -20,7 +20,7 @@
import re
-from tensorflow.python.autograph.pyct import errors
+from tensorflow.python.autograph.pyct import error_utils
from tensorflow.python.platform import test
@@ -31,7 +31,7 @@
class CustomError(Exception):
pass
- em = errors.ErrorMetadataBase(
+ em = error_utils.ErrorMetadataBase(
callsite_tb=(),
cause_metadata=None,
cause_message='test message',
@@ -47,7 +47,7 @@
def __init__(self):
super(CustomError, self).__init__('test_message')
- em = errors.ErrorMetadataBase(
+ em = error_utils.ErrorMetadataBase(
callsite_tb=(),
cause_metadata=None,
cause_message='test message',
@@ -61,7 +61,7 @@
('/path/two.py', 171, 'test_fn_2', 'test code'),
]
cause_message = 'Test message'
- em = errors.ErrorMetadataBase(
+ em = error_utils.ErrorMetadataBase(
callsite_tb=callsite_tb,
cause_metadata=None,
cause_message=cause_message,
diff --git a/tensorflow/python/autograph/pyct/errors.py b/tensorflow/python/autograph/pyct/errors.py
index 34c3a39..b8b2057 100644
--- a/tensorflow/python/autograph/pyct/errors.py
+++ b/tensorflow/python/autograph/pyct/errors.py
@@ -18,179 +18,12 @@
from __future__ import division
from __future__ import print_function
-import collections
-from tensorflow.python.autograph.pyct import origin_info
-
-
-class FrameInfo(
- collections.namedtuple(
- 'FrameInfo',
- ('filename', 'lineno', 'function_name', 'code', 'converted'))):
+class PyCTError(Exception):
+ """Base class for all exceptions."""
pass
-def _stack_trace_inside_mapped_code(tb, source_map):
- """Summarizes inner traceback frames up to the call to a given function.
-
- This functions locates the innermost (i.e. most recent) frame that corresponds
- to code that can be mapped by source_map originated from, and returns a
- translated stack trace ending at that frame. If no such frame is found, the
- entire stack trace is summarized.
-
- For example, the following code:
-
- def f():
- for i in tf.range(1):
- z = y + i # z only defined here
-
- Would generate this traceback:
-
- <converted code>
- ag__.for_stmt(...)
- <for_stmt>
- return _known_len_tf_for_stmt(iter_, extra_test, body, init_state)
- <_known_len_tf_for_stmt>
- _disallow_undefs_into_loop(*init_state)
- <_disallow_undefs_into_loop>
- raise ...
-
- Which is then processed into:
-
- <f>
- for i in tf.range(1):
- <for_stmt>
- return _known_len_tf_for_stmt(iter_, extra_test, body, init_state)
- <_known_len_tf_for_stmt>
- _disallow_undefs_into_loop(*init_state)
- <_disallow_undefs_into_loop>
- raise ...
-
- Args:
- tb: List[Tuple], the traceback corresponding to an error; typically,
- the output of traceback.extract_tb.
- source_map: Dict[LineLocation, OriginInfo], a source map as created by
- origin_info.create_source_map.
-
- Returns:
- List[FrameInfo]
- """
- result_frames = []
- for filename, line_number, function_name, text in reversed(tb):
-
- loc = origin_info.LineLocation(filename=filename, lineno=line_number)
- if loc in source_map:
- origin = source_map[loc]
- origin_frame_info = FrameInfo(
- filename=origin.loc.filename,
- lineno=origin.loc.lineno,
- function_name=origin.function_name,
- code=origin.source_code_line,
- converted=True)
- result_frames.append(origin_frame_info)
- break
-
- fi = FrameInfo(
- filename=filename,
- lineno=line_number,
- function_name=function_name,
- code=text,
- converted=False)
- result_frames.append(fi)
-
- return tuple(result_frames)
-
-
-KNOWN_STRING_CONSTRUCTOR_ERRORS = (
- AssertionError,
- AttributeError,
- NameError,
- NotImplementedError,
- RuntimeError,
- StopIteration,
- TypeError,
- ValueError,
-)
-
-
-# KeyError escapes newlines in strings. We create a special subclass
-# that doesn't do that. Overriding the name for display purposes; hopefully
-# that won't create too many surprises.
-class MultilineMessageKeyError(KeyError):
-
- def __init__(self, message, original_key):
- super(MultilineMessageKeyError, self).__init__(original_key)
- self.__message = message
-
- def __str__(self):
- return self.__message
-
-MultilineMessageKeyError.__name__ = KeyError.__name__
-
-
-class ErrorMetadataBase(object):
- """Container objects attached to exceptions in converted code.
-
- This metadata allows re-raising exceptions that occur in generated code, with
- a custom error message that includes a stack trace relative to user-readable
- code from which the generated code originated.
- """
-
- def __init__(self, callsite_tb, cause_metadata, cause_message, source_map):
- translated_stack = _stack_trace_inside_mapped_code(callsite_tb, source_map)
-
- if cause_metadata is None:
- self.translated_stack = translated_stack
- self.cause_message = cause_message
- else:
- # Daisy chain the translated stacks.
- self.translated_stack = (
- cause_metadata.translated_stack + (translated_stack[-1],))
- self.cause_message = cause_metadata.cause_message
-
- def get_message(self):
- """Returns the message for the underlying exception."""
- lines = []
-
- lines.append('in converted code:')
- lines.append('')
-
- for frame_info in reversed(self.translated_stack):
- lines.append(' {}:{} {}{}'.format(
- frame_info.filename,
- frame_info.lineno,
- frame_info.function_name,
- ' *' if frame_info.converted else '',
- ))
- if frame_info.code is None:
- code_snippet = '<source unavailable>'
- else:
- code_snippet = frame_info.code.strip()
- lines.append(' {}'.format(code_snippet))
-
- lines.append('')
-
- message_lines = self.cause_message.split('\n')
- for i in range(len(message_lines)):
- message_lines[i] = ' ' + message_lines[i]
- lines.extend(message_lines)
-
- lines.append('')
-
- return '\n'.join(lines)
-
- def create_exception(self, source_error):
- preferred_type = type(source_error)
- if preferred_type.__init__ is Exception.__init__:
- return preferred_type(self.get_message())
- if preferred_type in KNOWN_STRING_CONSTRUCTOR_ERRORS:
- return preferred_type(self.get_message())
- elif preferred_type is KeyError:
- return MultilineMessageKeyError(self.get_message(), self.cause_message)
- return None
-
- def to_exception(self, source_error):
- exc = self.create_exception(source_error)
- exc.__suppress_context__ = True
- exc.ag_error_metadata = self
- return exc
+class UnsupportedLanguageElementError(PyCTError, NotImplementedError):
+ """Raised for code patterns that AutoGraph does not support."""
+ pass
diff --git a/tensorflow/python/autograph/pyct/parser.py b/tensorflow/python/autograph/pyct/parser.py
index 1ec21e5..c5b2fe5 100644
--- a/tensorflow/python/autograph/pyct/parser.py
+++ b/tensorflow/python/autograph/pyct/parser.py
@@ -28,6 +28,7 @@
import gast
import six
+from tensorflow.python.autograph.pyct import errors
from tensorflow.python.autograph.pyct import inspect_utils
@@ -81,7 +82,7 @@
# TODO(mdan): We could attempt to convert tabs to spaces by unix rule.
# See:
# https://docs.python.org/3/reference/lexical_analysis.html#indentation
- raise ValueError(
+ raise errors.UnsupportedLanguageElementError(
'code mixing tabs and spaces for intentation is not allowed')
if len(tok_string) >= block_level:
tok_string = tok_string[block_level:]
@@ -143,7 +144,7 @@
# Note: the ValueError may be raised by parse_str.
except (SyntaxError, ValueError) as e:
def fail():
- raise ValueError(
+ raise errors.UnsupportedLanguageElementError(
'could not parse the source code:'
'\n\n{}\n'
'This error may be avoided by creating the lambda in a standalone'
diff --git a/tensorflow/python/autograph/pyct/static_analysis/BUILD b/tensorflow/python/autograph/pyct/static_analysis/BUILD
index 1194165..3620cff 100644
--- a/tensorflow/python/autograph/pyct/static_analysis/BUILD
+++ b/tensorflow/python/autograph/pyct/static_analysis/BUILD
@@ -37,6 +37,7 @@
py_test(
name = "activity_test",
srcs = ["activity_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":static_analysis",
@@ -85,6 +86,7 @@
name = "liveness_test",
testonly = True,
srcs = ["liveness_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":static_analysis",
@@ -126,6 +128,7 @@
py_test(
name = "reaching_definitions_test",
srcs = ["reaching_definitions_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":static_analysis",
diff --git a/tensorflow/python/autograph/pyct/testing/BUILD b/tensorflow/python/autograph/pyct/testing/BUILD
index d861cbb..59b15ce 100644
--- a/tensorflow/python/autograph/pyct/testing/BUILD
+++ b/tensorflow/python/autograph/pyct/testing/BUILD
@@ -44,7 +44,7 @@
name = "codegen_test",
size = "large",
srcs = ["codegen_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = [
"manual",
diff --git a/tensorflow/python/autograph/utils/BUILD b/tensorflow/python/autograph/utils/BUILD
index 5b87cf0..60e1a0a 100644
--- a/tensorflow/python/autograph/utils/BUILD
+++ b/tensorflow/python/autograph/utils/BUILD
@@ -48,6 +48,7 @@
py_test(
name = "context_managers_test",
srcs = ["context_managers_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":utils",
@@ -58,6 +59,7 @@
py_test(
name = "misc_test",
srcs = ["misc_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":utils",
@@ -68,6 +70,7 @@
py_test(
name = "py_func_test",
srcs = ["py_func_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = ["no_windows"],
deps = [
@@ -79,6 +82,7 @@
py_test(
name = "type_check_test",
srcs = ["type_check_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":utils",
@@ -89,6 +93,7 @@
py_test(
name = "tensor_list_test",
srcs = ["tensor_list_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":utils",
@@ -100,6 +105,7 @@
py_test(
name = "tensors_test",
srcs = ["tensors_test.py"],
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":utils",
diff --git a/tensorflow/python/compat/compat.py b/tensorflow/python/compat/compat.py
index 56b1497..ca4ea8a 100644
--- a/tensorflow/python/compat/compat.py
+++ b/tensorflow/python/compat/compat.py
@@ -31,7 +31,7 @@
# This value changes every day with an automatic CL. It can be modified in code
# via `forward_compatibility_horizon()` or with the environment variable
# TF_FORWARD_COMPATIBILITY_DELTA_DAYS, which is added to the compatibility date.
-_FORWARD_COMPATIBILITY_HORIZON = datetime.date(2019, 11, 20)
+_FORWARD_COMPATIBILITY_HORIZON = datetime.date(2019, 11, 22)
_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME = "TF_FORWARD_COMPATIBILITY_DELTA_DAYS"
_FORWARD_COMPATIBILITY_DATE_NUMBER = None
diff --git a/tensorflow/python/data/util/BUILD b/tensorflow/python/data/util/BUILD
index 3bf25c5..b5dc355 100644
--- a/tensorflow/python/data/util/BUILD
+++ b/tensorflow/python/data/util/BUILD
@@ -21,7 +21,7 @@
name = "nest_test",
size = "small",
srcs = ["nest_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":nest",
@@ -54,7 +54,7 @@
name = "sparse_test",
size = "small",
srcs = ["sparse_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":nest",
@@ -90,7 +90,7 @@
name = "structure_test",
size = "small",
srcs = ["structure_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":nest",
@@ -127,7 +127,7 @@
name = "options_test",
size = "small",
srcs = ["options_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":options",
@@ -151,7 +151,7 @@
name = "convert_test",
size = "small",
srcs = ["convert_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":convert",
@@ -176,7 +176,7 @@
name = "random_seed_test",
size = "small",
srcs = ["random_seed_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":random_seed",
@@ -198,7 +198,7 @@
name = "traverse_test",
size = "small",
srcs = ["traverse_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":traverse",
diff --git a/tensorflow/python/debug/BUILD b/tensorflow/python/debug/BUILD
index a6a41bc..1c30328 100644
--- a/tensorflow/python/debug/BUILD
+++ b/tensorflow/python/debug/BUILD
@@ -190,7 +190,7 @@
py_binary(
name = "grpc_tensorflow_server",
srcs = ["lib/grpc_tensorflow_server.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [":grpc_tensorflow_server_lib"],
)
@@ -464,7 +464,7 @@
py_binary(
name = "offline_analyzer",
srcs = ["cli/offline_analyzer.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [":offline_analyzer_lib"],
)
@@ -504,7 +504,7 @@
py_binary(
name = "debug_fibonacci",
srcs = ["examples/v1/debug_fibonacci.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [":debug_fibonacci_lib"],
)
@@ -512,7 +512,7 @@
py_binary(
name = "debug_fibonacci_v2",
srcs = ["examples/v2/debug_fibonacci_v2.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [":debug_fibonacci_lib"],
)
@@ -535,7 +535,7 @@
py_binary(
name = "debug_errors",
srcs = ["examples/v1/debug_errors.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [":debug_errors_lib"],
)
@@ -554,7 +554,7 @@
py_binary(
name = "debug_mnist",
srcs = ["examples/debug_mnist.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [":debug_mnist_lib"],
)
@@ -562,7 +562,7 @@
py_binary(
name = "debug_mnist_v1",
srcs = ["examples/v1/debug_mnist_v1.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [":debug_mnist_lib"],
)
@@ -570,7 +570,7 @@
py_binary(
name = "debug_mnist_v2",
srcs = ["examples/v2/debug_mnist_v2.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [":debug_mnist_lib"],
)
@@ -592,7 +592,7 @@
py_binary(
name = "debug_tflearn_iris",
srcs = ["examples/v1/debug_tflearn_iris.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [":debug_tflearn_iris_lib"],
)
@@ -611,7 +611,7 @@
py_binary(
name = "debug_keras",
srcs = ["examples/v1/debug_keras.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [":debug_keras_lib"],
)
@@ -631,7 +631,7 @@
name = "common_test",
size = "small",
srcs = ["lib/common_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":common",
@@ -664,7 +664,7 @@
name = "debug_graphs_test",
size = "small",
srcs = ["lib/debug_graphs_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":debug_graphs",
@@ -677,7 +677,7 @@
name = "debug_data_test",
size = "small",
srcs = ["lib/debug_data_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":debug_data",
@@ -702,6 +702,7 @@
"//tensorflow/python:platform_test",
"//tensorflow/python:variables",
],
+ python_version = "PY3",
)
cuda_py_test(
@@ -723,6 +724,7 @@
"//tensorflow/python/distribute:strategy_combinations",
"//tensorflow/python/keras",
],
+ python_version = "PY3",
tags = [
"guitar",
"multi_and_single_gpu",
@@ -749,6 +751,7 @@
"//tensorflow/python:variables",
"//tensorflow/python/keras",
],
+ python_version = "PY3",
shard_count = 8,
tags = [
"no_windows", # TODO(b/142475891): Enable this test on Windows.
@@ -774,6 +777,7 @@
"//tensorflow/python/data/ops:dataset_ops",
"//tensorflow/python/keras",
],
+ python_version = "PY3",
tags = [
"no_windows", # b/142475891
],
@@ -795,6 +799,7 @@
"//tensorflow/python:training",
"//tensorflow/python:variables",
],
+ python_version = "PY3",
xla_enable_strict_auto_jit = False, # Node names are different with autojit
)
@@ -802,7 +807,7 @@
name = "debug_utils_test",
size = "small",
srcs = ["lib/debug_utils_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":debug_utils",
@@ -822,7 +827,7 @@
name = "source_utils_test",
size = "small",
srcs = ["lib/source_utils_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":debug_data",
@@ -849,7 +854,7 @@
name = "source_remote_test",
size = "small",
srcs = ["lib/source_remote_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = [
"no_windows",
@@ -876,7 +881,7 @@
name = "framework_test",
size = "medium",
srcs = ["wrappers/framework_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":debug_data",
@@ -901,7 +906,7 @@
name = "profiling_test",
size = "small",
srcs = ["lib/profiling_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":profiling",
@@ -915,7 +920,7 @@
name = "curses_ui_test",
size = "small",
srcs = ["cli/curses_ui_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = [
"no_windows",
@@ -936,7 +941,7 @@
name = "readline_ui_test",
size = "small",
srcs = ["cli/readline_ui_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":cli_config",
@@ -1031,6 +1036,7 @@
"//tensorflow/python:platform_test",
"//tensorflow/python:variables",
],
+ python_version = "PY3",
xla_enable_strict_auto_jit = False, # Tests TF:Classic implementation.
)
@@ -1048,6 +1054,7 @@
"//tensorflow/python:platform_test",
"//tensorflow/python:variables",
],
+ python_version = "PY3",
tags = ["notsan"],
xla_enable_strict_auto_jit = False, # Node names are different with autojit
)
@@ -1067,6 +1074,7 @@
"//tensorflow/python:training",
"//tensorflow/python:variables",
],
+ python_version = "PY3",
xla_enable_strict_auto_jit = False, # Node names are different with autojit
)
@@ -1084,6 +1092,7 @@
"//tensorflow/python:platform_test",
"//tensorflow/python:variables",
],
+ python_version = "PY3",
tags = ["no_windows_gpu"],
xla_enable_strict_auto_jit = False, # Node names are different with autojit
)
@@ -1092,7 +1101,7 @@
name = "debugger_cli_common_test",
size = "small",
srcs = ["cli/debugger_cli_common_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":debugger_cli_common",
@@ -1107,7 +1116,7 @@
name = "cli_config_test",
size = "small",
srcs = ["cli/cli_config_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":cli_config",
@@ -1121,7 +1130,7 @@
name = "command_parser_test",
size = "small",
srcs = ["cli/command_parser_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":command_parser",
@@ -1134,7 +1143,7 @@
name = "tensor_format_test",
size = "small",
srcs = ["cli/tensor_format_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":cli_test_utils",
@@ -1152,7 +1161,7 @@
name = "cli_shared_test",
size = "small",
srcs = ["cli/cli_shared_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":cli_shared",
@@ -1172,7 +1181,7 @@
srcs = [
"cli/evaluator_test.py",
],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":debug_data",
@@ -1216,6 +1225,7 @@
"//tensorflow/python:util",
"//tensorflow/python:variables",
],
+ python_version = "PY3",
tags = ["no_windows"], # TODO: needs investigation on Windows
xla_enable_strict_auto_jit = False, # Node names are different with autojit
)
@@ -1224,7 +1234,7 @@
name = "profile_analyzer_cli_test",
size = "small",
srcs = ["cli/profile_analyzer_cli_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":debugger_cli_common",
@@ -1260,6 +1270,7 @@
"//tensorflow/python:platform_test",
"//tensorflow/python:variables",
],
+ python_version = "PY3",
tags = [
"no_oss", # Test flaky due to port collisions.
"no_windows",
@@ -1285,6 +1296,7 @@
"//tensorflow/python:platform_test",
"//tensorflow/python:variables",
],
+ python_version = "PY3",
tags = [
"no_oss", # Test flaky due to port collisions.
"no_windows",
@@ -1315,6 +1327,7 @@
"//tensorflow/python:variables",
],
grpc_enabled = True,
+ python_version = "PY3",
tags = [
"no_oss", # Incompatible with bazel_pip.
"no_windows",
@@ -1327,7 +1340,7 @@
name = "dumping_wrapper_test",
size = "small",
srcs = ["wrappers/dumping_wrapper_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":debug_data",
@@ -1350,7 +1363,7 @@
name = "local_cli_wrapper_test",
size = "small",
srcs = ["wrappers/local_cli_wrapper_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":cli_shared",
@@ -1379,7 +1392,7 @@
name = "disk_usage_test",
size = "small",
srcs = ["wrappers/disk_usage_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":dumping_wrapper",
diff --git a/tensorflow/python/debug/lib/op_callbacks_common.py b/tensorflow/python/debug/lib/op_callbacks_common.py
index 3fb2ab0..76d44dd 100644
--- a/tensorflow/python/debug/lib/op_callbacks_common.py
+++ b/tensorflow/python/debug/lib/op_callbacks_common.py
@@ -28,10 +28,17 @@
b"Exit",
b"Identity",
b"If",
+ b"LoopCond",
b"Merge",
b"NextIteration",
b"StatelessIf",
b"StatefulPartitionedCall",
b"Switch",
b"While",
+ # TPU-specific ops begin.
+ b"TPUReplicatedInput",
+ b"TPUReplicateMetadata",
+ b"TPUCompilationResult",
+ b"TPUReplicatedOutput",
+ b"ConfigureDistributedTPU",
)
diff --git a/tensorflow/python/distribute/BUILD b/tensorflow/python/distribute/BUILD
index ee820d0..16a49b0 100644
--- a/tensorflow/python/distribute/BUILD
+++ b/tensorflow/python/distribute/BUILD
@@ -56,6 +56,7 @@
"//tensorflow/python:platform_test",
"//tensorflow/python:state_ops",
],
+ python_version = "PY3",
)
py_library(
@@ -118,6 +119,7 @@
"//tensorflow/python:client_testlib",
"//tensorflow/python:framework_ops",
],
+ python_version = "PY3",
)
py_library(
@@ -170,7 +172,7 @@
name = "distribute_lib_test",
size = "small",
srcs = ["distribute_lib_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = [
"no_rocm",
@@ -220,7 +222,7 @@
name = "distribute_coordinator_test",
size = "medium",
srcs = ["distribute_coordinator_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = ["no_oss_py2"], # b/138443278
deps = [
@@ -389,6 +391,7 @@
"//tensorflow/python/eager:def_function",
"//tensorflow/python/eager:test",
],
+ python_version = "PY3",
tags = ["no_pip"],
)
@@ -426,6 +429,7 @@
"//tensorflow/python/eager:test",
"//tensorflow/python:framework_ops",
],
+ python_version = "PY3",
)
py_library(
@@ -447,7 +451,7 @@
name = "numpy_dataset_test",
size = "small",
srcs = ["numpy_dataset_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":numpy_dataset",
@@ -498,12 +502,13 @@
"//tensorflow/python:io_ops",
"//tensorflow/python:util",
],
+ python_version = "PY3",
)
py_test(
name = "multi_worker_util_test",
srcs = ["multi_worker_util_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":multi_worker_util",
@@ -584,7 +589,7 @@
py_test(
name = "shared_variable_creator_test",
srcs = ["shared_variable_creator_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":shared_variable_creator",
@@ -658,7 +663,7 @@
py_test(
name = "strategy_combinations_test",
srcs = ["strategy_combinations_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
deps = [
":combinations",
":reduce_util",
@@ -712,6 +717,7 @@
"//tensorflow/python:variable_scope",
"//tensorflow/python:variables",
],
+ python_version = "PY3",
tags = [
"multi_and_single_gpu",
],
@@ -721,6 +727,7 @@
name = "checkpointing_test",
srcs = ["checkpointing_test.py"],
main = "checkpointing_test.py",
+ python_version = "PY3",
deps = [
":tpu_strategy",
"//tensorflow/compiler/tests:xla_test",
@@ -774,6 +781,7 @@
"//tensorflow/python/eager:context",
"//tensorflow/python/eager:test",
],
+ python_version = "PY3",
)
cuda_py_test(
@@ -795,6 +803,7 @@
"//tensorflow/python/eager:context",
"//tensorflow/python/eager:test",
],
+ python_version = "PY3",
tags = [
# TODO(b/138143527): Re-enable after fixing Guitar failure.
# "multi_and_single_gpu",
@@ -811,6 +820,7 @@
"//tensorflow/python/eager:test",
],
grpc_enabled = True,
+ python_version = "PY3",
)
py_library(
@@ -837,6 +847,7 @@
"//tensorflow/python/compat:v2_compat",
"//tensorflow/python/training/tracking:util",
],
+ python_version = "PY3",
)
py_library(
@@ -872,6 +883,7 @@
name = "values_test",
srcs = ["values_test.py"],
main = "values_test.py",
+ python_version = "PY3",
tags = [
"no_oss", # http://b/119349471
],
@@ -902,6 +914,7 @@
name = "moving_averages_test",
srcs = ["moving_averages_test.py"],
main = "moving_averages_test.py",
+ python_version = "PY3",
deps = [
"//tensorflow/python:array_ops",
"//tensorflow/python:constant_op",
@@ -919,6 +932,7 @@
name = "custom_training_loop_test",
srcs = ["custom_training_loop_test.py"],
main = "custom_training_loop_test.py",
+ python_version = "PY3",
tags = [
"multi_and_single_gpu",
],
@@ -937,6 +951,7 @@
name = "minimize_loss_test",
srcs = ["minimize_loss_test.py"],
main = "minimize_loss_test.py",
+ python_version = "PY3",
tags = [
"multi_and_single_gpu",
"no_oss", # TODO(b/139815303): enable after this is fixed.
@@ -992,6 +1007,7 @@
name = "step_fn_test",
srcs = ["step_fn_test.py"],
main = "step_fn_test.py",
+ python_version = "PY3",
tags = [
"multi_and_single_gpu",
"no_rocm",
@@ -1022,6 +1038,7 @@
"//tensorflow/python:variable_scope",
"//tensorflow/python:variables",
],
+ python_version = "PY3",
tags = [
"multi_and_single_gpu",
],
@@ -1048,6 +1065,7 @@
"//tensorflow/python/eager:context",
"//tensorflow/python/eager:test",
],
+ python_version = "PY3",
)
cuda_py_test(
@@ -1073,6 +1091,7 @@
"//tensorflow/python/eager:context",
"//tensorflow/python/eager:test",
],
+ python_version = "PY3",
shard_count = 5,
tags = [
"guitar",
@@ -1101,6 +1120,7 @@
"//tensorflow/python/eager:context",
"//tensorflow/python/eager:test",
],
+ python_version = "PY3",
tags = [
"guitar",
"multi_and_single_gpu",
@@ -1111,6 +1131,7 @@
name = "metrics_v1_test",
srcs = ["metrics_v1_test.py"],
main = "metrics_v1_test.py",
+ python_version = "PY3",
tags = [
"multi_and_single_gpu",
],
@@ -1131,6 +1152,7 @@
name = "keras_metrics_test",
srcs = ["keras_metrics_test.py"],
main = "keras_metrics_test.py",
+ python_version = "PY3",
tags = [
"multi_and_single_gpu",
],
@@ -1151,6 +1173,7 @@
name = "zero_batch_test",
srcs = ["zero_batch_test.py"],
main = "zero_batch_test.py",
+ python_version = "PY3",
deps = [
":combinations",
":multi_worker_test_base",
@@ -1195,6 +1218,7 @@
srcs = ["saved_model_save_load_test.py"],
full_precision = True,
main = "saved_model_save_load_test.py",
+ python_version = "PY3",
shard_count = 5,
deps = [
":saved_model_test_base",
@@ -1208,6 +1232,7 @@
srcs = ["keras_save_load_test.py"],
full_precision = True,
main = "keras_save_load_test.py",
+ python_version = "PY3",
shard_count = 5,
deps = [
":saved_model_test_base",
@@ -1221,6 +1246,7 @@
srcs = ["saved_model_mixed_api_test.py"],
full_precision = True,
main = "saved_model_mixed_api_test.py",
+ python_version = "PY3",
shard_count = 5,
deps = [
":saved_model_test_base",
@@ -1233,6 +1259,7 @@
name = "ctl_correctness_test",
srcs = ["ctl_correctness_test.py"],
main = "ctl_correctness_test.py",
+ python_version = "PY3",
shard_count = 10,
tags = [
"multi_and_single_gpu",
@@ -1277,6 +1304,7 @@
"//tensorflow/python/estimator:estimator_py",
"//tensorflow/python/keras/mixed_precision/experimental:test_util",
],
+ python_version = "PY3",
tags = [
"multi_and_single_gpu",
],
@@ -1311,6 +1339,7 @@
"//tensorflow/python/eager:context",
"//tensorflow/python/estimator:estimator_py",
],
+ python_version = "PY3",
tags = [
"multi_and_single_gpu",
"no_oss", # TODO(b/133330625)
diff --git a/tensorflow/python/distribute/combinations.py b/tensorflow/python/distribute/combinations.py
index 0c1fdbe..c4915eb 100644
--- a/tensorflow/python/distribute/combinations.py
+++ b/tensorflow/python/distribute/combinations.py
@@ -79,7 +79,8 @@
if not number_of_required_gpus and GPUCombination.GPU_TEST:
return (False, "Test that doesn't require GPUs.")
- elif context.num_gpus() < number_of_required_gpus:
+ elif (number_of_required_gpus > 0
+ and context.num_gpus() < number_of_required_gpus):
return (False, ("Only {} of {} required GPUs are available.".format(
context.num_gpus(), number_of_required_gpus)))
else:
diff --git a/tensorflow/python/distribute/input_lib.py b/tensorflow/python/distribute/input_lib.py
index 98c915f..f1f9a0e 100644
--- a/tensorflow/python/distribute/input_lib.py
+++ b/tensorflow/python/distribute/input_lib.py
@@ -22,7 +22,6 @@
import six
-from tensorflow.python import tf2
from tensorflow.python.data.experimental.ops import batching
from tensorflow.python.data.experimental.ops import distribute
from tensorflow.python.data.ops import dataset_ops
@@ -33,7 +32,6 @@
from tensorflow.python.distribute import reduce_util
from tensorflow.python.distribute import values
from tensorflow.python.eager import context
-from tensorflow.python.framework import composite_tensor
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import device as tf_device
from tensorflow.python.framework import dtypes
@@ -42,7 +40,6 @@
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
-from tensorflow.python.framework import type_spec
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
@@ -76,21 +73,15 @@
Returns:
A wrapped tf.data.DatasetV1 or tf.data.DatasetV2 instance.
"""
- # We create a DistributedDataset if TF 2.x is enabled. This is to allow us to
- # expose a subset of APIs on the dataset and create a DistributedIterator vs
- # a DistributedIteratorV1.
- # In TF 2 we condition on being in eager/tf.function since the distributed
- # dataset and iterator we create is only supported in eager/tf.function.
- # TODO(b/143568310): Condition only on TF 2 vs TF 1 consistent with tf.data.
- if tf2.enabled() and ops.executing_eagerly_outside_functions():
- return DistributedDataset(
+ if isinstance(dataset, dataset_ops.DatasetV1):
+ return DistributedDatasetV1(
dataset,
input_workers,
strategy,
split_batch_by=split_batch_by,
input_context=input_context)
else:
- return DistributedDatasetV1(
+ return DistributedDataset(
dataset,
input_workers,
strategy,
@@ -122,13 +113,7 @@
Returns:
A wrapped tf.data.DatasetV1 or tf.data.DatasetV2 instance.
"""
- # We create a DistributedDataset if TF 2.x is enabled. This is to allow us to
- # expose a subset of APIs on the dataset and create a DistributedIterator vs
- # a DistributedIteratorV1.
- # In TF 2 we condition on being in eager/tf.function since the distributed
- # dataset and iterator we create is only supported in eager/tf.function.
- # TODO(b/143568310): Condition only on TF 2 vs TF 1 consistent with tf.data.
- if tf2.enabled() and ops.executing_eagerly_outside_functions():
+ if ops.executing_eagerly_outside_functions():
return DistributedDatasetsFromFunction(
dataset_fn,
input_workers,
@@ -157,15 +142,14 @@
"""
self._device_map = device_map
self._logical_device = logical_device
- self._worker_device_pairs = worker_device_pairs
if worker_device_pairs is None:
devices = device_map.logical_to_actual_devices(logical_device)
- self._worker_device_pairs = ((
+ worker_device_pairs = ((
device_util.canonicalize("/device:CPU:0", devices[0]),
devices),)
- self._input_worker_devices = tuple(d for d, _ in self._worker_device_pairs)
+ self._input_worker_devices = tuple(d for d, _ in worker_device_pairs)
self._fed_devices = tuple(tuple(device_util.canonicalize(d) for d in f)
- for _, f in self._worker_device_pairs)
+ for _, f in worker_device_pairs)
flattened = tuple(d for l in self._fed_devices for d in l)
assert (flattened ==
device_map.logical_to_actual_devices(logical_device)), (
@@ -173,21 +157,6 @@
(flattened, logical_device,
device_map.logical_to_actual_devices(logical_device)))
- def serialize(self):
- config = {}
- config["_device_map"] = values._serialize_device_map(self._device_map) # pylint: disable=protected-access
- config["_logical_device"] = self._logical_device
- if self._worker_device_pairs is not None:
- config["_worker_device_pairs"] = tuple((d, tuple(l)) for d, l in
- self._worker_device_pairs)
- return config
-
- @staticmethod
- def deserialize(config):
- return InputWorkers(values._deserialize_device_map(config["_device_map"]), # pylint: disable=protected-access
- config["_worker_device_pairs"],
- config["_logical_device"])
-
@property
def device_map(self):
return self._device_map
@@ -260,26 +229,19 @@
return global_has_value, replicas
-def _get_static_shape(iterators):
- """Returns a boolean indicating if the input is fully defined."""
- static_shape = True
- for iterator in iterators:
- if not isinstance(iterator, (_SingleWorkerOwnedDatasetIterator,
- _SingleWorkerDatasetIterator)):
- continue
- flattened_shapes = nest.flatten(iterator.output_shapes)
- for output_shape in flattened_shapes:
- if not output_shape.is_fully_defined():
- static_shape = False
- break
- return static_shape
-
-
-class DistributedIteratorBase(object):
+class DistributedIterator(object):
"""Common implementation for all input iterators."""
def __init__(self, input_workers, iterators, strategy):
- static_shape = _get_static_shape(iterators)
+ static_shape = True
+ for iterator in iterators:
+ if not isinstance(iterator, _SingleWorkerDatasetIterator):
+ continue
+ flattened_shapes = nest.flatten(iterator.output_shapes)
+ for output_shape in flattened_shapes:
+ if not output_shape.is_fully_defined():
+ static_shape = False
+ break
# TODO(b/133073708): we currently need a flag to control the usage because
# there is a performance difference between get_next() and
@@ -392,10 +354,6 @@
return values.regroup(self._input_workers.device_map, replicas)
-
-class DistributedIteratorV1(DistributedIteratorBase):
- """Input Iterator for tf.data.DatasetV1."""
-
# We need a private initializer method for re-initializing multidevice
# iterators when used with Keras training loops. If we don't reinitialize the
# iterator we run into memory leak issues (b/123315763).
@@ -406,15 +364,19 @@
init_ops.extend(it.initialize())
return control_flow_ops.group(init_ops)
+
+class DistributedIteratorV1(DistributedIterator):
+ """Input Iterator for tf.data.DatasetV1."""
+
# TODO(anjalisridhar): Move to using `initializer` instead to be consistent
# with tf.data iterator APIs.
def initialize(self):
- """Initialize underlying iterators.
+ """Initialze underlying iterators.
Returns:
A list of any initializer ops that should be run.
"""
- return self._initializer
+ return super(DistributedIteratorV1, self)._initializer
@property
def initializer(self):
@@ -443,156 +405,6 @@
return None
-class DistributedIteratorSpec(type_spec.TypeSpec):
- """Type specification for `DistributedIterator`."""
-
- __slots__ = ["_input_workers", "_element_spec", "_strategy"]
-
- def __init__(self, input_workers, element_spec, strategy):
- # We don't want to allow deserialization of this class because we don't
- # serialize the strategy object. Currently the only places where
- # _deserialize is called is when we save/restore using SavedModels.
- if isinstance(input_workers, tuple):
- raise NotImplementedError("DistributedIteratorSpec does not have support "
- "for deserialization.")
- else:
- self._input_workers = input_workers
- self._element_spec = element_spec
- self._strategy = strategy
-
- @property
- def value_type(self):
- return DistributedIterator
-
- def _serialize(self):
- # We cannot serialize the strategy object so we convert it to an id that we
- # can use for comparison.
- return (self._input_workers.serialize(),
- self._element_spec, id(self._strategy))
-
- def _deserialize(self):
- raise ValueError("Deserialization is currently unsupported for "
- "DistributedIteratorSpec.")
-
- @staticmethod
- def _is_compatible(a, b):
- """Returns true if the given type serializations compatible."""
- if type(a) is not type(b):
- return False
- if isinstance(a, tuple):
- return (len(a) == len(b) and
- all(DistributedIteratorSpec._is_compatible(x, y) for (x, y) in
- zip(a, b)))
- if isinstance(a, dict):
- return (len(a) == len(b) and sorted(a.keys()) == sorted(b.keys()) and all(
- DistributedIteratorSpec._is_compatible(a[k], b[k]) for k in a.keys()))
- if isinstance(a, (type_spec.TypeSpec, tensor_shape.TensorShape,
- dtypes.DType)):
- return a.is_compatible_with(b)
- return a == b
-
- # Overriding this method so that we can merge and reconstruct the spec object
- def most_specific_compatible_type(self, other):
- """Returns the most specific TypeSpec compatible with `self` and `other`.
-
- Args:
- other: A `TypeSpec`.
-
- Raises:
- ValueError: If there is no TypeSpec that is compatible with both `self`
- and `other`.
- """
- # pylint: disable=protected-access
- if type(self) is not type(other):
- raise ValueError("No TypeSpec is compatible with both %s and %s" %
- (self, other))
- if not self._is_compatible(self._input_workers.serialize(),
- other._input_workers.serialize()):
- raise ValueError("_input_workers is not compatible with both %s "
- "and %s" % (self, other))
- if self._element_spec != other._element_spec:
- raise ValueError("_element_spec is not compatible with both %s "
- "and %s" % (self, other))
- if id(self._strategy) != id(other._strategy):
- raise ValueError("tf.distribute strategy is not compatible with both %s "
- "and %s" % (self, other))
- return DistributedIteratorSpec(self._input_workers, self._element_spec,
- self._strategy)
-
- @property
- def _component_specs(self):
- specs = []
- worker_device_pairs = self._input_workers._worker_device_pairs # pylint: disable=protected-access
- for i in range(len(worker_device_pairs)):
- input_device, compute_devices = worker_device_pairs[i]
- specs.append(_SingleWorkerDatasetIteratorSpec(input_device,
- compute_devices,
- element_spec=
- self._element_spec))
- return specs
-
- def _to_components(self, value):
- return value._iterators # pylint: disable=protected-access
-
- def _from_components(self, components):
- return DistributedIterator(input_workers=self._input_workers,
- iterators=None,
- components=components,
- element_spec=self._element_spec,
- strategy=self._strategy)
-
- @staticmethod
- def from_value(value):
- # pylint: disable=protected-access
- return DistributedIteratorSpec(value._input_workers, value._element_spec,
- value._strategy)
-
-
-class DistributedIterator(DistributedIteratorBase,
- composite_tensor.CompositeTensor):
- """Input Iterator for tf.data.DatasetV2."""
-
- def __init__(self, input_workers=None, iterators=None, strategy=None,
- components=None, element_spec=None):
- if input_workers is None:
- raise ValueError("`input_workers` should be "
- "provided.")
-
- error_message = ("Either `input_workers` or "
- "both `components` and `element_spec` need to be "
- "provided.")
-
- if iterators is None:
- if (components is None or element_spec is None):
- raise ValueError(error_message)
- self._element_spec = element_spec
- self._input_workers = input_workers
- self._iterators = components
- static_shape = _get_static_shape(self._iterators)
- self._strategy = strategy
- if getattr(
- strategy.extended, "experimental_enable_get_next_as_optional", False):
- self._enable_get_next_as_optional = not static_shape
- else:
- self._enable_get_next_as_optional = False
- else:
- if (components is not None and element_spec is not None):
- raise ValueError(error_message)
-
- super(DistributedIterator, self).__init__(input_workers, iterators,
- strategy)
-
- @property
- def element_spec(self):
- return self._element_spec
-
- @property
- def _type_spec(self):
- return DistributedIteratorSpec(self._input_workers,
- self.element_spec,
- self._strategy)
-
-
class _IterableInput(object):
"""Base class for iterable inputs for distribution strategies."""
@@ -684,6 +496,7 @@
`num_input_pipelines` in the `InputContext`.
"""
super(DistributedDataset, self).__init__(input_workers=input_workers)
+
# We clone and shard the dataset on each worker. The current setup tries to
# shard the dataset by files if possible so that each worker sees a
# different subset of files. If that is not possible, will attempt to shard
@@ -743,17 +556,17 @@
self._strategy = strategy
def __iter__(self):
- if (ops.executing_eagerly_outside_functions() or
- ops.get_default_graph().building_function):
- worker_iterators = _create_iterators_per_worker(self._cloned_datasets,
- self._input_workers)
- iterator = DistributedIterator(self._input_workers, worker_iterators,
- self._strategy)
- iterator._element_spec = self.element_spec # pylint: disable=protected-access
- return iterator
+ if not (context.executing_eagerly() or
+ ops.get_default_graph().building_function):
+ raise RuntimeError("__iter__() is only supported inside of tf.function "
+ "or when eager execution is enabled.")
- raise RuntimeError("__iter__() is only supported inside of tf.function "
- "or when eager execution is enabled.")
+ worker_iterators = _create_iterators_per_worker(self._cloned_datasets,
+ self._input_workers)
+ iterator = DistributedIterator(self._input_workers, worker_iterators,
+ self._strategy)
+ iterator.element_spec = self.element_spec # pylint: disable=protected-access
+ return iterator
class DistributedDatasetV1(DistributedDataset):
@@ -778,7 +591,6 @@
Note: This API is deprecated. Please use `for ... in dataset:` to iterate
over the dataset or `iter` to create an iterator.
- over the dataset or `iter` to create an iterator.
Returns:
A DistributedIteratorV1 instance.
@@ -818,21 +630,12 @@
def _get_iterator(self):
worker_iterators = _create_iterators_per_worker(self._cloned_datasets,
- self._input_workers,
- graph_and_eager=True)
+ self._input_workers)
iterator = DistributedIteratorV1(self._input_workers, worker_iterators,
self._strategy)
iterator.element_spec = self.element_spec # pylint: disable=protected-access
return iterator
- def __iter__(self):
- if (ops.executing_eagerly_outside_functions() or
- ops.get_default_graph().building_function):
- return self._get_iterator()
-
- raise RuntimeError("__iter__() is only supported inside of tf.function "
- "or when eager execution is enabled.")
-
# TODO(priyag): Add other replication modes.
class DistributedDatasetsFromFunction(_IterableInput):
@@ -864,21 +667,15 @@
self._input_contexts = input_contexts
self._strategy = strategy
- super(DistributedDatasetsFromFunction, self).__init__(
- input_workers=input_workers)
-
def __iter__(self):
- if (ops.executing_eagerly_outside_functions() or
- ops.get_default_graph().building_function):
- iterators, element_spec = _create_iterators_per_worker_with_input_context(
- self._input_contexts, self._input_workers, self._dataset_fn)
- iterator = DistributedIterator(self._input_workers, iterators,
- self._strategy)
- iterator._element_spec = element_spec # pylint: disable=protected-access
- return iterator
+ if not (context.executing_eagerly() or
+ ops.get_default_graph().building_function):
+ raise RuntimeError("__iter__() is only supported inside of tf.function "
+ "or when eager execution is enabled.")
- raise RuntimeError("__iter__() is only supported inside of tf.function "
- "or when eager execution is enabled.")
+ iterators = _create_iterators_per_worker_with_input_context(
+ self._input_contexts, self._input_workers, self._dataset_fn)
+ return DistributedIterator(self._input_workers, iterators, self._strategy)
class DistributedDatasetsFromFunctionV1(DistributedDatasetsFromFunction):
@@ -905,21 +702,9 @@
return self._get_iterator()
def _get_iterator(self):
- iterators, element_spec = _create_iterators_per_worker_with_input_context(
- self._input_contexts, self._input_workers, self._dataset_fn,
- graph_and_eager=True)
- iterator = DistributedIteratorV1(self._input_workers, iterators,
- self._strategy)
- iterator._element_spec = element_spec # pylint: disable=protected-access
- return iterator
-
- def __iter__(self):
- if (ops.executing_eagerly_outside_functions() or
- ops.get_default_graph().building_function):
- return self._get_iterator()
-
- raise RuntimeError("__iter__() is only supported inside of tf.function "
- "or when eager execution is enabled.")
+ iterators = _create_iterators_per_worker_with_input_context(
+ self._input_contexts, self._input_workers, self._dataset_fn)
+ return DistributedIteratorV1(self._input_workers, iterators, self._strategy)
# TODO(anjalisridhar): This class will be soon be removed in favor of newer
@@ -1005,7 +790,7 @@
split_batch_by=split_batch_by,
input_context=input_context)
worker_iterators = _create_iterators_per_worker(
- dist_dataset._cloned_datasets, input_workers, graph_and_eager=True) # pylint: disable=protected-access
+ dist_dataset._cloned_datasets, input_workers) # pylint: disable=protected-access
super(DatasetIterator, self).__init__(
input_workers,
worker_iterators, # pylint: disable=protected-access
@@ -1059,14 +844,14 @@
return nest.map_structure(create_dummy_tensor, value_structure)
-class _SingleWorkerDatasetIteratorBase(object):
+class _SingleWorkerDatasetIterator(object):
"""Iterator for a single `tf.data.Dataset`."""
def __init__(self, dataset, worker, devices):
"""Create iterator for the `dataset` to fetch data to worker's `devices` .
- A `MultiDeviceIterator` or `OwnedMultiDeviceIterator` is used to prefetch
- input to the devices on the given worker.
+ `MultiDeviceIterator` is used to prefetch input to the devices on the
+ given worker.
Args:
dataset: A `tf.data.Dataset` instance.
@@ -1076,11 +861,13 @@
self._dataset = dataset
self._worker = worker
self._devices = devices
- self._element_spec = dataset.element_spec
self._make_iterator()
def _make_iterator(self):
- raise NotImplementedError("must be implemented in descendants")
+ """Make appropriate iterator on the dataset."""
+ with ops.device(self._worker):
+ self._iterator = multi_device_iterator_ops.MultiDeviceIterator(
+ self._dataset, self._devices)
def get_next(self, device, name=None):
"""Get next element for the given device."""
@@ -1129,9 +916,9 @@
# Place the condition op in the same device as the data so the data
# doesn't need to be sent back to the worker.
with ops.device(self._devices[i]):
- # Data will be fetched in order, so we only need to check if the first
- # replica has value to see whether there is data left for this single
- # worker.
+ # As MultiDeviceIterator will fetch data in order, so we only need to
+ # check if the first replica has value to see whether there is data
+ # left for this single worker.
if i == 0:
worker_has_value = data.has_value()
@@ -1149,155 +936,8 @@
return worker_has_value, result
-
-class _SingleWorkerDatasetIteratorSpec(type_spec.TypeSpec):
- """Type specification for `_SingleWorkerOwnedDatasetIterator`."""
-
- __slots__ = ["_worker", "_devices", "_element_spec"]
-
- def __init__(self, worker, devices, element_spec):
- self._worker = worker
- self._devices = devices
- self._element_spec = element_spec
-
- @property
- def value_type(self):
- return _SingleWorkerOwnedDatasetIterator
-
- def _serialize(self):
- return (self._worker, tuple(self._devices), self._element_spec)
-
- @property
- def _component_specs(self):
- specs = []
- specs.append(multi_device_iterator_ops.MultiDeviceIteratorSpec(
- self._devices, self._worker, element_spec=self._element_spec))
- return specs
-
- def _to_components(self, value):
- return [value._iterator] # pylint: disable=protected-access
-
- def _from_components(self, components):
- return _SingleWorkerOwnedDatasetIterator(
- dataset=None,
- worker=self._worker,
- devices=self._devices,
- components=components,
- element_spec=self._element_spec)
-
- @staticmethod
- def from_value(value):
- # pylint: disable=protected-access
- return _SingleWorkerDatasetIteratorSpec(value._worker, value._devices,
- value._element_spec)
-
-
-class _SingleWorkerOwnedDatasetIterator(_SingleWorkerDatasetIteratorBase,
- composite_tensor.CompositeTensor):
- """Iterator for a DistributedDataset instance."""
-
- def __init__(self, dataset=None, worker=None, devices=None, components=None,
- element_spec=None):
- """Create iterator for the `dataset` to fetch data to worker's `devices` .
-
- `OwnedMultiDeviceIterator` is used to prefetch input to the devices on the
- given worker. The lifetime of this iterator is tied to the encompassing
- python object. Once we go out of scope of the python object or return from
- a tf.function the underlying iterator resource is deleted.
-
- Args:
- dataset: A `tf.data.Dataset` instance.
- worker: Worker on which ops should be created.
- devices: Distribute data from `dataset` to these devices.
- components: Tensor components to construct the
- _SingleWorkerOwnedDatasetIterator from.
- element_spec: A nested structure of `TypeSpec` objects that represents the
- type specification of elements of the iterator.
- """
- if worker is None or devices is None:
- raise ValueError("Both `worker` and `devices` should be provided")
-
- error_message = ("Either `dataset` or both `components` and `element_spec` "
- "need to be provided.")
-
- if dataset is None:
- if (components is None or element_spec is None):
- raise ValueError(error_message)
- self._element_spec = element_spec
- self._worker = worker
- self._devices = devices
- self._iterator = components[0]
- else:
- if (components is not None or element_spec is not None):
- raise ValueError(error_message)
- super(_SingleWorkerOwnedDatasetIterator, self).__init__(dataset, worker,
- devices)
-
- def _make_iterator(self):
- """Make appropriate iterator on the dataset."""
- with ops.device(self._worker):
- self._iterator = multi_device_iterator_ops.OwnedMultiDeviceIterator(
- self._dataset, self._devices)
-
- @property
- def element_spec(self):
- return self._element_spec
-
- @property
- def _type_spec(self):
- return _SingleWorkerDatasetIteratorSpec(self._worker, self._devices,
- self._element_spec)
-
- @property
- def output_classes(self):
- """Returns the class of each component of an element of this iterator.
-
- The expected values are `tf.Tensor` and `tf.SparseTensor`.
-
- Returns:
- A nested structure of Python `type` objects corresponding to each
- component of an element of this dataset.
- """
- return nest.map_structure(
- lambda component_spec: component_spec._to_legacy_output_classes(), # pylint: disable=protected-access
- self._element_spec)
-
- @property
- def output_shapes(self):
- """Returns the shape of each component of an element of this iterator.
-
- Returns:
- A nested structure of `tf.TensorShape` objects corresponding to each
- component of an element of this dataset.
- """
- return nest.map_structure(
- lambda component_spec: component_spec._to_legacy_output_shapes(), # pylint: disable=protected-access
- self._element_spec)
-
- @property
- def output_types(self):
- """Returns the type of each component of an element of this iterator.
-
- Returns:
- A nested structure of `tf.DType` objects corresponding to each component
- of an element of this dataset.
- """
- return nest.map_structure(
- lambda component_spec: component_spec._to_legacy_output_types(), # pylint: disable=protected-access
- self._element_spec)
-
-
-class _SingleWorkerDatasetIterator(_SingleWorkerDatasetIteratorBase):
- """Iterator for a single DistributedDatasetV1 instance."""
-
- def _make_iterator(self):
- """Make appropriate iterator on the dataset."""
- with ops.device(self._worker):
- self._iterator = multi_device_iterator_ops.MultiDeviceIterator(
- self._dataset, self._devices)
-
def initialize(self):
- """Initialize underlying iterator.
+ """Initialze underlying iterator.
In eager execution, this simply recreates the underlying iterator.
In graph execution, it returns the initializer ops for the underlying
@@ -1358,8 +998,7 @@
return []
-def _create_iterators_per_worker(worker_datasets, input_workers,
- graph_and_eager=False):
+def _create_iterators_per_worker(worker_datasets, input_workers):
"""Create a multidevice iterator on each of the workers."""
assert isinstance(input_workers, InputWorkers)
@@ -1368,59 +1007,29 @@
for i, worker in enumerate(input_workers.worker_devices):
with ops.device(worker):
worker_devices = input_workers.compute_devices_for_worker(i)
- # We need an additional graph_and_eager condition to test for when we
- # create a DistributedDatasetV1 in TF 2.x and graph mode.
- # TODO(b/143568310): Condition only on graph vs eager consistent with
- # tf.data.
- if (tf2.enabled() and ops.executing_eagerly_outside_functions() and
- not graph_and_eager):
- iterator = _SingleWorkerOwnedDatasetIterator(worker_datasets[i], worker,
- worker_devices)
- else:
- iterator = _SingleWorkerDatasetIterator(worker_datasets[i], worker,
- worker_devices)
+ iterator = _SingleWorkerDatasetIterator(worker_datasets[i], worker,
+ worker_devices)
iterators.append(iterator)
return iterators
def _create_iterators_per_worker_with_input_context(input_contexts,
input_workers,
- dataset_fn,
- graph_and_eager=False):
+ dataset_fn):
"""Create a multidevice iterator per workers given a dataset function."""
iterators = []
- element_specs = []
for i, ctx in enumerate(input_contexts):
worker = input_workers.worker_devices[i]
with ops.device(worker):
dataset = dataset_fn(ctx)
- element_specs.append(dataset.element_spec)
# TODO(b/138745411): Remove once stateful transformations are supported.
options = dataset_ops.Options()
options.experimental_distribute._make_stateless = True # pylint: disable=protected-access
dataset = dataset.with_options(options)
devices = input_workers.compute_devices_for_worker(i)
- # We need an additional graph_and_eager condition to test for when we
- # create a DistributedDatasetV1 in TF 2.x and graph mode.
- # TODO(b/143568310): Condition only on graph vs eager consistent with
- # tf.data.
- if (tf2.enabled() and ops.executing_eagerly_outside_functions() and
- not graph_and_eager):
- iterator = _SingleWorkerOwnedDatasetIterator(dataset, worker,
- devices)
- else:
- iterator = _SingleWorkerDatasetIterator(dataset, worker,
- devices)
+ iterator = _SingleWorkerDatasetIterator(dataset, worker, devices)
iterators.append(iterator)
-
- if not element_specs:
- raise ValueError("You should have at least 1 element_spec from the dataset "
- "on a given input worker. Instead found an empty list "
- "of element_specs.")
- for spec in element_specs:
- nest.assert_same_structure(element_specs[0], spec)
-
- return iterators, element_specs[0]
+ return iterators
# TODO(sourabhbajaj): Remove this in lieu of distributed datasets
diff --git a/tensorflow/python/distribute/input_lib_test.py b/tensorflow/python/distribute/input_lib_test.py
index c00251f..9636305 100644
--- a/tensorflow/python/distribute/input_lib_test.py
+++ b/tensorflow/python/distribute/input_lib_test.py
@@ -42,11 +42,8 @@
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.eager import test
-from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
-from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
-from tensorflow.python.framework import tensor_spec
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import sparse_ops
@@ -106,21 +103,20 @@
split_batch_by,
strategy,
input_context=None):
- if input_type == "dataset":
- if tf2.enabled():
- return input_lib.DistributedDataset(
- dataset,
- input_workers,
- strategy,
- split_batch_by=split_batch_by,
- input_context=input_context)
- else:
- return input_lib.DistributedDatasetV1(
- dataset,
- input_workers,
- strategy,
- split_batch_by=split_batch_by,
- input_context=input_context)
+ if isinstance(dataset, (dataset_ops.Dataset, dataset_ops.DatasetV1Adapter)):
+ return input_lib.DistributedDatasetV1(
+ dataset,
+ input_workers,
+ strategy,
+ split_batch_by=split_batch_by,
+ input_context=input_context)
+ elif input_type == "dataset":
+ return input_lib.DistributedDataset(
+ dataset,
+ input_workers,
+ strategy,
+ split_batch_by=split_batch_by,
+ input_context=input_context)
else:
return strategy.experimental_distribute_datasets_from_function(dataset)
@@ -141,9 +137,6 @@
if api_type == "wrap_into_iterator" and iteration_type == "for_loop":
self.skipTest("unsupported test combination.")
- if api_type == "wrap_into_iterator" and input_type == "input_fn":
- self.skipTest("unsupported test combination.")
-
devices = nest.flatten([ds for _, ds in worker_device_pairs])
device_map = values.ReplicaDeviceMap(devices)
input_workers = input_lib.InputWorkers(device_map, worker_device_pairs)
@@ -167,7 +160,7 @@
strategy,
input_context=input_context)
- if ops.executing_eagerly_outside_functions():
+ if context.executing_eagerly():
iterator = iter(dataset)
else:
if isinstance(dataset, input_lib.DistributedDatasetV1):
@@ -177,8 +170,10 @@
if iteration_type == "get_next":
evaluate = lambda x: sess.run(x) if sess else self.evaluate(x)
- if not ops.executing_eagerly_outside_functions():
+ if isinstance(iterator, input_lib.DistributedIteratorV1):
evaluate(control_flow_ops.group(iterator.initialize()))
+ else:
+ evaluate(control_flow_ops.group(iterator._initializer))
for expected_value in expected_values:
next_element = iterator.get_next()
@@ -196,13 +191,10 @@
next_element) for r in range(len(devices))])
# After re-initializing the iterator, should be able to iterate again.
- if not ops.executing_eagerly_outside_functions():
+ if isinstance(iterator, input_lib.DistributedIteratorV1):
evaluate(control_flow_ops.group(iterator.initialize()))
else:
- if api_type == "wrap_into_iterator":
- self.skipTest("unsupported test combination")
- else:
- iterator = iter(dataset)
+ evaluate(control_flow_ops.group(iterator._initializer))
for expected_value in expected_values:
next_element = iterator.get_next()
@@ -241,9 +233,6 @@
strategy_combinations.mirrored_strategy_with_gpu_and_cpu
]))
def testMultiDeviceIterInitialize(self, distribution):
- if tf2.enabled():
- self.skipTest("unsupported test combination")
-
worker_device_pairs = [("", ["/device:GPU:0", "/device:CPU:0"])]
dataset_fn = lambda _: dataset_ops.DatasetV1.range(10)
@@ -264,6 +253,27 @@
@combinations.generate(
combinations.combine(
+ mode=["graph"],
+ distribution=[
+ strategy_combinations.one_device_strategy,
+ strategy_combinations.mirrored_strategy_with_one_cpu
+ ]))
+ def testDatasetV2IterError(self, distribution):
+ worker_device_pairs = [("", ["/device:CPU:0"])]
+ devices = nest.flatten([ds for _, ds in worker_device_pairs])
+ device_map = values.ReplicaDeviceMap(devices)
+ input_workers = input_lib.InputWorkers(device_map, worker_device_pairs)
+ dataset_fn = lambda _: dataset_ops.DatasetV2.range(10).batch(2)
+
+ dist_dataset = input_lib.get_distributed_dataset(
+ dataset_fn(distribute_lib.InputContext()), input_workers, distribution)
+
+ with self.assertRaisesRegexp(RuntimeError,
+ "or when eager execution is enabled"):
+ iter(dist_dataset)
+
+ @combinations.generate(
+ combinations.combine(
mode=["graph", "eager"],
input_type=["input_fn", "dataset"],
api_type=["wrap_into_iterator", "wrap_into_dataset"],
@@ -279,7 +289,7 @@
if tf2.enabled():
dataset_fn = lambda _: dataset_ops.DatasetV2.range(10)
else:
- dataset_fn = lambda _: dataset_ops.DatasetV1.range(10)
+ dataset_fn = lambda _: dataset_ops.Dataset.range(10)
dataset_or_input_fn = self._create_dataset_or_input_fn(
input_type, dataset_fn)
@@ -931,256 +941,5 @@
sess=sess)
-class DistributedIteratorTest(DistributedIteratorTestBase,
- parameterized.TestCase):
-
- @combinations.generate(
- combinations.combine(
- mode=["eager"],
- input_type=["dataset"],
- distribution=[
- strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
- strategy_combinations.tpu_strategy,
- ],
- enable_get_next_as_optional=[True, False]))
- def testTypeSpec(self, input_type, distribution,
- enable_get_next_as_optional):
- if not tf2.enabled():
- self.skipTest("DistributedIterator has CompositeTensor support in "
- "TF 2.0 only.")
- dataset = dataset_ops.DatasetV2.range(10).batch(2)
-
- distribution.extended.experimental_enable_get_next_as_optional = (
- enable_get_next_as_optional)
-
- dist_dataset = distribution.experimental_distribute_dataset(dataset)
- with distribution.scope():
- iterator = iter(dist_dataset)
-
- spec = iterator._type_spec
- self.assertEqual(spec._input_workers, iterator._input_workers)
- self.assertEqual(spec._element_spec,
- tensor_spec.TensorSpec(shape=(None,), dtype=dtypes.int64,
- name=None))
-
- @combinations.generate(
- combinations.combine(
- mode=["eager"],
- input_type=["dataset"],
- distribution=[
- strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
- strategy_combinations.tpu_strategy,
- ],
- enable_get_next_as_optional=[True, False]))
- def testTypeSpecRoundTrip(self, input_type,
- distribution, enable_get_next_as_optional):
- if not tf2.enabled():
- self.skipTest("DistributedIterator CompositeTensor support is only "
- "present in TF 2.0 only.")
-
- dataset = dataset_ops.DatasetV2.range(10).batch(2)
-
- distribution.extended.experimental_enable_get_next_as_optional = (
- enable_get_next_as_optional)
-
- dist_dataset = distribution.experimental_distribute_dataset(dataset)
- with distribution.scope():
- iterator = iter(dist_dataset)
-
- spec = iterator._type_spec
-
- tensor_list = spec._to_components(iterator)
- re_iterator = spec._from_components(tensor_list)
-
- self.assertEqual(iterator._input_workers, re_iterator._input_workers)
- self.assertAllEqual(iterator._iterators, re_iterator._iterators)
-
- @combinations.generate(
- combinations.combine(
- mode=["eager"],
- input_type=["dataset"],
- distribution=[
- strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
- strategy_combinations.tpu_strategy,
- ],
- enable_get_next_as_optional=[True, False]))
- def testDoesNotTriggerFunctionTracing(self, input_type, distribution,
- enable_get_next_as_optional):
- if not tf2.enabled():
- self.skipTest("DistributedIterator CompositeTensor support is only "
- "present in TF 2.0 only.")
-
- trace_count = [0]
-
- @def_function.function
- def f(iterator):
- trace_count[0] += 1
- counter = np.int64(0)
- for _ in range(5):
- next(iterator)
- counter += 1
- return counter
-
- dataset = dataset_ops.DatasetV2.range(10).batch(2)
-
- distribution.extended.experimental_enable_get_next_as_optional = (
- enable_get_next_as_optional)
-
- dist_dataset = distribution.experimental_distribute_dataset(dataset)
- with distribution.scope():
- for _ in range(3):
- iterator = iter(dist_dataset)
- counter = f(iterator)
-
- self.assertEqual(trace_count[0], 1)
- self.assertEqual(counter, 5)
-
-
-class RaggedTensorDistributedIteratorTest(DistributedIteratorTestBase,
- parameterized.TestCase):
-
- @combinations.generate(
- combinations.combine(
- mode=["eager"],
- input_type=["dataset"],
- distribution=[
- strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
- strategy_combinations.tpu_strategy,
- ],
- enable_get_next_as_optional=[True, False]))
- def testTypeSpec(self, input_type, distribution,
- enable_get_next_as_optional):
- if not tf2.enabled():
- self.skipTest("DistributedIterator has CompositeTensor support in "
- "TF 2.0 only.")
- ctx = distribute_lib.InputContext()
- batch_size = ctx.get_per_replica_batch_size(8)
- # Use 20 which isn't divisible by 8 to test partial batch behavior.
- row_lengths = np.mod(np.arange(20), 4).astype(np.int64)
- ragged_tensor = ragged_tensor_lib.RaggedTensor.from_row_lengths(
- np.repeat(np.arange(20, dtype=np.float32), row_lengths), row_lengths)
- dataset = dataset_ops.DatasetV2.from_tensor_slices({
- "dense": ragged_tensor.to_tensor(),
- "ragged": ragged_tensor,
- "sparse": ragged_tensor.to_sparse(),
- })
- dataset = dataset.shard(ctx.num_input_pipelines, ctx.input_pipeline_id)
- dataset = dataset.batch(batch_size)
-
- distribution.extended.experimental_enable_get_next_as_optional = (
- enable_get_next_as_optional)
-
- dist_dataset = distribution.experimental_distribute_dataset(dataset)
- with distribution.scope():
- iterator = iter(dist_dataset)
-
- spec = iterator._type_spec
- self.assertEqual(spec._input_workers, iterator._input_workers)
- self.assertEqual(spec._element_spec,
- {"sparse": sparse_tensor.SparseTensorSpec(
- ([None, 3]), dtypes.float32),
- "dense": tensor_spec.TensorSpec(
- shape=(None, 3), dtype=dtypes.float32, name=None),
- "ragged": ragged_tensor_lib.RaggedTensorSpec(
- ([None, None]), dtypes.float32, 1,
- dtypes.int64)})
-
- @combinations.generate(
- combinations.combine(
- mode=["eager"],
- input_type=["dataset"],
- distribution=[
- strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
- strategy_combinations.tpu_strategy,
- ],
- enable_get_next_as_optional=[True, False]))
- def testTypeSpecRoundTrip(self, input_type,
- distribution, enable_get_next_as_optional):
- if not tf2.enabled():
- self.skipTest("DistributedIterator CompositeTensor support is only "
- "present in TF 2.0 only.")
-
- ctx = distribute_lib.InputContext()
- batch_size = ctx.get_per_replica_batch_size(8)
- # Use 20 which isn't divisible by 8 to test partial batch behavior.
- row_lengths = np.mod(np.arange(20), 4).astype(np.int64)
- ragged_tensor = ragged_tensor_lib.RaggedTensor.from_row_lengths(
- np.repeat(np.arange(20, dtype=np.float32), row_lengths), row_lengths)
- dataset = dataset_ops.DatasetV2.from_tensor_slices({
- "dense": ragged_tensor.to_tensor(),
- "ragged": ragged_tensor,
- "sparse": ragged_tensor.to_sparse(),
- })
- dataset = dataset.shard(ctx.num_input_pipelines, ctx.input_pipeline_id)
- dataset = dataset.batch(batch_size)
-
- distribution.extended.experimental_enable_get_next_as_optional = (
- enable_get_next_as_optional)
-
- dist_dataset = distribution.experimental_distribute_dataset(dataset)
- with distribution.scope():
- iterator = iter(dist_dataset)
-
- spec = iterator._type_spec
-
- tensor_list = spec._to_components(iterator)
- re_iterator = spec._from_components(tensor_list)
-
- self.assertEqual(iterator._input_workers, re_iterator._input_workers)
- self.assertAllEqual(iterator._iterators, re_iterator._iterators)
-
- @combinations.generate(
- combinations.combine(
- mode=["eager"],
- input_type=["dataset"],
- distribution=[
- strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
- strategy_combinations.tpu_strategy,
- ],
- enable_get_next_as_optional=[True, False]))
- def testDoesNotTriggerFunctionTracing(self, input_type, distribution,
- enable_get_next_as_optional):
- if not tf2.enabled():
- self.skipTest("DistributedIterator CompositeTensor support is only "
- "present in TF 2.0 only.")
-
- trace_count = [0]
-
- @def_function.function
- def f(iterator):
- trace_count[0] += 1
- counter = np.int64(0)
- for _ in range(5):
- next(iterator)
- counter += 1
- return counter
-
- ctx = distribute_lib.InputContext()
- batch_size = ctx.get_per_replica_batch_size(8)
- # Use 20 which isn't divisible by 8 to test partial batch behavior.
- row_lengths = np.mod(np.arange(50), 4).astype(np.int64)
- ragged_tensor = ragged_tensor_lib.RaggedTensor.from_row_lengths(
- np.repeat(np.arange(50, dtype=np.float32), row_lengths), row_lengths)
- dataset = dataset_ops.DatasetV2.from_tensor_slices({
- "dense": ragged_tensor.to_tensor(),
- "ragged": ragged_tensor,
- "sparse": ragged_tensor.to_sparse(),
- })
- dataset = dataset.shard(ctx.num_input_pipelines, ctx.input_pipeline_id)
- dataset = dataset.batch(batch_size)
-
- distribution.extended.experimental_enable_get_next_as_optional = (
- enable_get_next_as_optional)
-
- dist_dataset = distribution.experimental_distribute_dataset(dataset)
- with distribution.scope():
- for _ in range(3):
- iterator = iter(dist_dataset)
- counter = f(iterator)
-
- self.assertEqual(trace_count[0], 1)
- self.assertEqual(counter, 5)
-
-
if __name__ == "__main__":
test.main()
diff --git a/tensorflow/python/distribute/multi_worker_util.py b/tensorflow/python/distribute/multi_worker_util.py
index c804ed9..4d89b2f 100644
--- a/tensorflow/python/distribute/multi_worker_util.py
+++ b/tensorflow/python/distribute/multi_worker_util.py
@@ -53,7 +53,10 @@
It checks:
0) None of `cluster_spec`, `task_type`, and `task_id` is `None`.
1) task type is one of "chief", "worker" or "evaluator".
- 2) whether there is such a task type as `task_type` in the `cluster_spec`.
+ 2) whether there is such a task type as `task_type` in the `cluster_spec`. The
+ only exception is `evaluator`. In other words, it is still a valid
+ configuration when `task_type` is `evaluator` but it doesn't appear in
+ `cluster_spec`. This is to be compatible with `TF_CONFIG` in Estimator.
3) whether there is at most one "chief" job.
4) whether there is at most one "evaluator" job.
5) whether the `task_id` is smaller than the number of tasks for that
@@ -76,7 +79,7 @@
"Unrecognized task_type: %r, valid task types are: \"chief\", "
"\"worker\", \"evaluator\" and \"ps\"." % task_type)
- if task_type and task_type not in cluster_spec:
+ if task_type and task_type not in cluster_spec and task_type != "evaluator":
raise ValueError("`task_type` %r not found in cluster_spec." % task_type)
if len(cluster_spec.get("chief", [])) > 1:
@@ -85,7 +88,8 @@
if len(cluster_spec.get("evaluator", [])) > 1:
raise ValueError("There must be at most one 'evaluator' job.")
- if task_id >= len(cluster_spec[task_type]):
+ # The `evaluator` job is allowed to be missing in `cluster_spec`.
+ if task_type in cluster_spec and task_id >= len(cluster_spec[task_type]):
raise ValueError(
"The `task_id` %d exceeds the maximum id of %s." % (task_id, task_type))
diff --git a/tensorflow/python/distribute/multi_worker_util_test.py b/tensorflow/python/distribute/multi_worker_util_test.py
index dbe57b2..6a51e71 100644
--- a/tensorflow/python/distribute/multi_worker_util_test.py
+++ b/tensorflow/python/distribute/multi_worker_util_test.py
@@ -237,5 +237,30 @@
multi_worker_util.collective_leader(cluster_spec, None, 0), "")
+# Most of the validation logic is tested by above tests except for some.
+class ClusterSpecValidationTest(test.TestCase):
+
+ def testEvaluatorNotInCluster(self):
+ cluster_spec = {
+ "chief": ["127.0.0.1:1234"],
+ "worker": ["127.0.0.1:8964", "127.0.0.1:2333"],
+ "ps": ["127.0.0.1:1926", "127.0.0.1:3141"]
+ }
+ multi_worker_util._validate_cluster_spec(cluster_spec, "chief", 0)
+ multi_worker_util._validate_cluster_spec(cluster_spec, "worker", 0)
+ multi_worker_util._validate_cluster_spec(cluster_spec, "ps", 0)
+ multi_worker_util._validate_cluster_spec(cluster_spec, "evaluator", 0)
+
+ def testWorkerNotInCluster(self):
+ cluster_spec = {
+ "chief": ["127.0.0.1:1234"],
+ "ps": ["127.0.0.1:1926", "127.0.0.1:3141"]
+ }
+ multi_worker_util._validate_cluster_spec(cluster_spec, "evaluator", 0)
+ with self.assertRaisesRegexp(
+ ValueError, "`task_type` 'worker' not found in cluster_spec."):
+ multi_worker_util._validate_cluster_spec(cluster_spec, "worker", 0)
+
+
if __name__ == "__main__":
test.main()
diff --git a/tensorflow/python/distribute/saved_model_test_base.py b/tensorflow/python/distribute/saved_model_test_base.py
index 1001dd4..0815629 100644
--- a/tensorflow/python/distribute/saved_model_test_base.py
+++ b/tensorflow/python/distribute/saved_model_test_base.py
@@ -51,7 +51,7 @@
strategies = [
- # TODO(b/132702156): include default strategy
+ strategy_combinations.default_strategy,
strategy_combinations.one_device_strategy,
strategy_combinations.one_device_strategy_gpu,
strategy_combinations.mirrored_strategy_with_one_cpu,
diff --git a/tensorflow/python/distribute/values.py b/tensorflow/python/distribute/values.py
index 20282f9..0c2a9cc 100644
--- a/tensorflow/python/distribute/values.py
+++ b/tensorflow/python/distribute/values.py
@@ -537,13 +537,13 @@
def __init__(self, value_specs, device_map, logical_device):
if isinstance(device_map, tuple):
- device_map = _deserialize_device_map(device_map)
+ device_map = self._deserialize_device_map(device_map)
self._value_specs = tuple(value_specs)
self._device_map = device_map
self._logical_device = logical_device
def _serialize(self):
- device_map = _serialize_device_map(self._device_map)
+ device_map = self._serialize_device_map(self._device_map)
return (self._value_specs, device_map, self._logical_device)
@property
@@ -610,33 +610,6 @@
return obj
-def _serialize_device_map(device_map):
- if isinstance(device_map, SingleDeviceMap):
- return ("single", device_map.all_devices[0])
- elif isinstance(device_map, ReplicaDeviceMap):
- return ("replica", device_map.all_devices)
- elif isinstance(device_map, WorkerDeviceMap):
- return ("worker", device_map.all_devices,
- device_map.num_replicas_per_worker)
- else:
- raise ValueError("device_map type %s is unsupported "
- % type(device_map).__name__)
-
-
-def _deserialize_device_map(device_map_info):
- """Deserialize a DeviceMap object from a tuple of values."""
- device_map_type = device_map_info[0]
- device_map_args = device_map_info[1:]
- if device_map_type == "single":
- return SingleDeviceMap(*device_map_args)
- elif device_map_type == "replica":
- return ReplicaDeviceMap(*device_map_args)
- elif device_map_type == "worker":
- return WorkerDeviceMap(*device_map_args)
- else:
- raise ValueError("Unexpected value in state tuple")
-
-
def _assign_on_device(device, variable, tensor):
with ops.device(device):
return variable.assign(tensor)
diff --git a/tensorflow/python/eager/function.py b/tensorflow/python/eager/function.py
index 93fd8e9..07619c88 100644
--- a/tensorflow/python/eager/function.py
+++ b/tensorflow/python/eager/function.py
@@ -2527,7 +2527,14 @@
# already.
executing_eagerly = ctx.executing_eagerly()
parent_graph = None
+ xla_context_id = 0
if not executing_eagerly:
+ # We want to force function retracing for each different
+ # XLAControlFlowContext, so add `xla_context_id` to the cache key.
+ tpu_context = _enclosing_xla_context()
+ if tpu_context is not None:
+ xla_context_id = id(tpu_context)
+
with ops.init_scope():
# The graph, or whether we're executing eagerly, should be a part of the
# cache key so we don't improperly capture tensors such as variables.
@@ -2550,10 +2557,6 @@
device_functions = (pydev.merge_device(ctx.device_name),)
else:
device_functions = ()
-
- # We should not be in XLA context in eager mode. So always set
- # `xla_context_id` to 0.
- xla_context_id = 0
else:
colocation_stack = tuple(default_graph._colocation_stack.peek_objs())
if (uses_distribution_strategy
@@ -2565,14 +2568,6 @@
else:
device_functions = ()
- # We want to force function retracing for each different
- # XLAControlFlowContext, so add `xla_context_id` to the cache key.
- tpu_context = _enclosing_xla_context()
- if tpu_context is not None:
- xla_context_id = id(tpu_context)
- else:
- xla_context_id = 0
-
in_cross_replica_context = False
try:
in_cross_replica_context = (strategy_stack[-1].replica_context is None) # pylint: disable=protected-access
diff --git a/tensorflow/python/eager/pywrap_tensor.cc b/tensorflow/python/eager/pywrap_tensor.cc
index 8ed9e04..5e463b2 100644
--- a/tensorflow/python/eager/pywrap_tensor.cc
+++ b/tensorflow/python/eager/pywrap_tensor.cc
@@ -632,13 +632,13 @@
return PyLong_FromLongLong(n);
}
-static PyObject* EagerTensor_tensor_handle(EagerTensor* self, void* unused) {
+static PyObject* EagerTensor_handle_data(EagerTensor* self, void* unused) {
Py_INCREF(self->handle_data);
return self->handle_data;
}
-static int EagerTensor_settensor_handle(EagerTensor* self, PyObject* value,
- void* unused) {
+static int EagerTensor_sethandle_data(EagerTensor* self, PyObject* value,
+ void* unused) {
Py_DECREF(self->handle_data);
Py_INCREF(value);
self->handle_data = value;
@@ -733,17 +733,19 @@
static PyGetSetDef EagerTensor_getseters[] = {
{const_cast<char*>("_id"), (getter)EagerTensor_getid, nullptr,
- const_cast<char*>("_id"), nullptr},
+ const_cast<char*>("Tensor ID."), nullptr},
{const_cast<char*>("device"), (getter)EagerTensor_device, nullptr,
- const_cast<char*>("device"), nullptr},
+ const_cast<char*>("Device of op that produced the tensor."), nullptr},
{const_cast<char*>("backing_device"), (getter)EagerTensor_backing_device,
- nullptr, const_cast<char*>("backing_device"), nullptr},
- {const_cast<char*>("_handle_data"), (getter)EagerTensor_tensor_handle,
- (setter)EagerTensor_settensor_handle, const_cast<char*>("_tensor_handle"),
+ nullptr, const_cast<char*>("Device on which tensor's memory is resident."),
+ nullptr},
+ {const_cast<char*>("_handle_data"), (getter)EagerTensor_handle_data,
+ (setter)EagerTensor_sethandle_data,
+ const_cast<char*>("Shape/DType data if the EagerTensor is a DT_RESOURCE"),
nullptr},
{const_cast<char*>("_tensor_shape"), (getter)EagerTensor_tensor_shape,
- (setter)EagerTensor_settensor_shape, const_cast<char*>("_tensor_shape"),
- nullptr},
+ (setter)EagerTensor_settensor_shape,
+ const_cast<char*>("Shape of the tensor."), nullptr},
{nullptr} /* Sentinel */
};
@@ -758,16 +760,18 @@
static PyMethodDef EagerTensor_methods[] = {
{"_numpy_internal", (PyCFunction)EagerTensor_numpy_internal, METH_NOARGS,
- PyDoc_STR("_numpy_internal")},
+ PyDoc_STR("Internal method to get a NumPy array for the tensor.")},
{"_datatype_enum", (PyCFunction)EagerTensor_datatype_enum, METH_NOARGS,
- PyDoc_STR("_datatype_enum")},
+ PyDoc_STR("The DType of the tensor as an enum.")},
{"_shape_tuple", (PyCFunction)EagerTensor_shape_tuple, METH_NOARGS,
- PyDoc_STR("_shape_tuple")},
- {"_rank", (PyCFunction)EagerTensor_rank, METH_NOARGS, PyDoc_STR("_rank")},
+ PyDoc_STR("The shape of the tensor as a python tuple.")},
+ {"_rank", (PyCFunction)EagerTensor_rank, METH_NOARGS,
+ PyDoc_STR("The rank of the tensor.")},
{"_copy_to_device", (PyCFunction)EagerTensor_copy_to_device,
- METH_VARARGS | METH_KEYWORDS, PyDoc_STR("_copy_to_device")},
+ METH_VARARGS | METH_KEYWORDS,
+ PyDoc_STR("Copies the tensor to the desired device.")},
{"_num_elements", (PyCFunction)EagerTensor_num_elements, METH_NOARGS,
- PyDoc_STR("_num_elements")},
+ PyDoc_STR("Number of elements in the tensor.")},
{nullptr, nullptr},
};
diff --git a/tensorflow/python/eager/remote_cloud_tpu_test.py b/tensorflow/python/eager/remote_cloud_tpu_test.py
index c63832c..d63a892 100644
--- a/tensorflow/python/eager/remote_cloud_tpu_test.py
+++ b/tensorflow/python/eager/remote_cloud_tpu_test.py
@@ -32,12 +32,12 @@
flags.DEFINE_string('zone', None, 'Name of GCP zone with TPU.')
EXPECTED_DEVICES_PRE_CONNECT = [
- '/job:localhost/replica:0/task:0/device:CPU:0',
- '/job:localhost/replica:0/task:0/device:XLA_CPU:0'
+ '/device:CPU:0',
+ '/device:XLA_CPU:0',
]
EXPECTED_DEVICES_AFTER_CONNECT = [
- '/job:localhost/replica:0/task:0/device:CPU:0',
- '/job:localhost/replica:0/task:0/device:XLA_CPU:0',
+ '/device:CPU:0',
+ '/device:XLA_CPU:0',
'/job:worker/replica:0/task:0/device:CPU:0',
'/job:worker/replica:0/task:0/device:XLA_CPU:0',
'/job:worker/replica:0/task:0/device:TPU_SYSTEM:0',
diff --git a/tensorflow/python/feature_column/BUILD b/tensorflow/python/feature_column/BUILD
index 38c3657..ca58ad5 100644
--- a/tensorflow/python/feature_column/BUILD
+++ b/tensorflow/python/feature_column/BUILD
@@ -110,6 +110,7 @@
additional_deps = [
":feature_column_test_main_lib",
],
+ python_version = "PY3",
tags = [
"no_cuda_on_cpu_tap",
"no_pip",
@@ -123,6 +124,7 @@
additional_deps = [
":feature_column_test_main_lib",
],
+ python_version = "PY3",
tags = ["no_pip"],
)
@@ -159,6 +161,7 @@
name = "feature_column_v2_test",
srcs = ["feature_column_v2_test.py"],
additional_deps = [":feature_column_v2_test_main_lib"],
+ python_version = "PY3",
shard_count = 5,
tags = [
"no_cuda_on_cpu_tap",
@@ -173,6 +176,7 @@
additional_deps = [
":feature_column_v2_test_main_lib",
],
+ python_version = "PY3",
tags = ["no_pip"],
)
@@ -235,12 +239,13 @@
"//tensorflow/python:sparse_tensor",
"//tensorflow/python:training",
],
+ python_version = "PY3",
)
py_test(
name = "sequence_feature_column_integration_test",
srcs = ["sequence_feature_column_integration_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = ["no_pip"],
deps = [
@@ -263,4 +268,5 @@
"//tensorflow/python:client_testlib",
"//tensorflow/python:util",
],
+ python_version = "PY3",
)
diff --git a/tensorflow/python/keras/BUILD b/tensorflow/python/keras/BUILD
index 0584d1c..80fcf11 100755
--- a/tensorflow/python/keras/BUILD
+++ b/tensorflow/python/keras/BUILD
@@ -20,28 +20,6 @@
name = "keras",
srcs = [
"__init__.py",
- "applications/__init__.py",
- "applications/densenet.py",
- "applications/imagenet_utils.py",
- "applications/inception_resnet_v2.py",
- "applications/inception_v3.py",
- "applications/mobilenet.py",
- "applications/mobilenet_v2.py",
- "applications/nasnet.py",
- "applications/resnet.py",
- "applications/resnet_v2.py",
- "applications/vgg16.py",
- "applications/vgg19.py",
- "applications/xception.py",
- "datasets/__init__.py",
- "datasets/boston_housing.py",
- "datasets/cifar.py",
- "datasets/cifar10.py",
- "datasets/cifar100.py",
- "datasets/fashion_mnist.py",
- "datasets/imdb.py",
- "datasets/mnist.py",
- "datasets/reuters.py",
"estimator/__init__.py",
"keras_parameterized.py",
"ops.py",
@@ -68,11 +46,12 @@
":saving",
"//tensorflow/python:training",
"//tensorflow/python/eager:monitoring",
+ "//tensorflow/python/keras/applications",
+ "//tensorflow/python/keras/datasets",
"//tensorflow/python/keras/mixed_precision/experimental:mixed_precision_experimental",
"//tensorflow/python/keras/optimizer_v2",
"//tensorflow/python/keras/premade",
"//tensorflow/python/saved_model",
- "@keras_applications_archive//:keras_applications",
],
)
@@ -176,11 +155,13 @@
"engine/training_eager.py",
"engine/training_generator.py",
"engine/training_utils.py",
+ "engine/training_v1.py",
"engine/training_v2.py",
"engine/training_v2_utils.py",
"metrics.py", # Need base_layer
"models.py",
"utils/metrics_utils.py",
+ "utils/version_utils.py",
],
srcs_version = "PY2AND3",
deps = [
@@ -743,18 +724,6 @@
)
tf_py_test(
- name = "applications_test",
- size = "medium",
- srcs = ["applications/applications_test.py"],
- additional_deps = [
- ":keras",
- "@absl_py//absl/testing:parameterized",
- "//tensorflow/python:client_testlib",
- ],
- shard_count = 11,
-)
-
-tf_py_test(
name = "advanced_activations_test",
size = "medium",
srcs = ["layers/advanced_activations_test.py"],
@@ -1252,6 +1221,17 @@
)
tf_py_test(
+ name = "version_utils_test",
+ size = "small",
+ srcs = ["utils/version_utils_test.py"],
+ additional_deps = [
+ ":keras",
+ "@absl_py//absl/testing:parameterized",
+ "//tensorflow/python:client_testlib",
+ ],
+)
+
+tf_py_test(
name = "tf_utils_test",
size = "small",
srcs = ["utils/tf_utils_test.py"],
diff --git a/tensorflow/python/keras/applications/BUILD b/tensorflow/python/keras/applications/BUILD
new file mode 100644
index 0000000..69f2692
--- /dev/null
+++ b/tensorflow/python/keras/applications/BUILD
@@ -0,0 +1,51 @@
+# Description:
+# Contains the Keras Application package (internal TensorFlow version).
+
+load("//tensorflow:tensorflow.bzl", "tf_py_test")
+
+package(
+ default_visibility = ["//visibility:public"],
+ licenses = ["notice"], # Apache 2.0
+)
+
+exports_files(["LICENSE"])
+
+py_library(
+ name = "applications",
+ srcs = [
+ "__init__.py",
+ "densenet.py",
+ "imagenet_utils.py",
+ "inception_resnet_v2.py",
+ "inception_v3.py",
+ "mobilenet.py",
+ "mobilenet_v2.py",
+ "nasnet.py",
+ "resnet.py",
+ "resnet_v2.py",
+ "vgg16.py",
+ "vgg19.py",
+ "xception.py",
+ ],
+ srcs_version = "PY2AND3",
+ visibility = ["//visibility:public"],
+ deps = [
+ "//tensorflow/python:util",
+ "//tensorflow/python/keras:backend",
+ "//tensorflow/python/keras:engine",
+ "//tensorflow/python/keras:layers_base",
+ "@keras_applications_archive//:keras_applications",
+ ],
+)
+
+tf_py_test(
+ name = "applications_test",
+ size = "medium",
+ srcs = ["applications_test.py"],
+ additional_deps = [
+ ":applications",
+ "@absl_py//absl/testing:parameterized",
+ "//tensorflow/python:client_testlib",
+ ],
+ shard_count = 11,
+)
diff --git a/tensorflow/python/keras/datasets/BUILD b/tensorflow/python/keras/datasets/BUILD
new file mode 100644
index 0000000..4675922
--- /dev/null
+++ b/tensorflow/python/keras/datasets/BUILD
@@ -0,0 +1,34 @@
+# Description:
+# Contains the Keras datasets package (internal TensorFlow version).
+
+package(
+ default_visibility = ["//visibility:public"],
+ licenses = ["notice"], # Apache 2.0
+)
+
+exports_files(["LICENSE"])
+
+py_library(
+ name = "datasets",
+ srcs = [
+ "__init__.py",
+ "boston_housing.py",
+ "cifar.py",
+ "cifar10.py",
+ "cifar100.py",
+ "fashion_mnist.py",
+ "imdb.py",
+ "mnist.py",
+ "reuters.py",
+ ],
+ srcs_version = "PY2AND3",
+ visibility = ["//visibility:public"],
+ deps = [
+ "//tensorflow/python:platform",
+ "//tensorflow/python:util",
+ "//tensorflow/python/keras:backend",
+ "//tensorflow/python/keras:engine_utils",
+ "//third_party/py/numpy",
+ "@six_archive//:six",
+ ],
+)
diff --git a/tensorflow/python/keras/engine/network_test.py b/tensorflow/python/keras/engine/network_test.py
index ff47e46..efa151d 100644
--- a/tensorflow/python/keras/engine/network_test.py
+++ b/tensorflow/python/keras/engine/network_test.py
@@ -357,17 +357,17 @@
x = keras.layers.Dropout(0.5)(x, training=True)
model = keras.models.Model(inp, x)
# Would be `dropout/cond/Merge` by default
- self.assertTrue(model.output.op.name.endswith('dropout/mul_1'))
+ self.assertIn('dropout', model.output.op.name)
# Test that argument is kept when applying the model
inp2 = keras.layers.Input(shape=(2,))
out2 = model(inp2)
- self.assertTrue(out2.op.name.endswith('dropout/mul_1'))
+ self.assertIn('dropout', out2.op.name)
# Test that argument is kept after loading a model
config = model.get_config()
model = keras.models.Model.from_config(config)
- self.assertTrue(model.output.op.name.endswith('dropout/mul_1'))
+ self.assertIn('dropout', model.output.op.name)
def test_node_construction(self):
# test basics
diff --git a/tensorflow/python/keras/engine/training.py b/tensorflow/python/keras/engine/training.py
index eef8ad8..40fdb0c 100644
--- a/tensorflow/python/keras/engine/training.py
+++ b/tensorflow/python/keras/engine/training.py
@@ -55,6 +55,7 @@
from tensorflow.python.keras.saving.saved_model import model_serialization
from tensorflow.python.keras.utils import data_utils
from tensorflow.python.keras.utils import losses_utils
+from tensorflow.python.keras.utils import version_utils
from tensorflow.python.keras.utils.mode_keys import ModeKeys
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
@@ -78,7 +79,7 @@
@keras_export('keras.models.Model', 'keras.Model')
-class Model(network.Network):
+class Model(network.Network, version_utils.VersionSelector):
"""`Model` groups layers into an object with training and inference features.
There are two ways to instantiate a `Model`:
@@ -760,6 +761,8 @@
and what the model expects.
"""
_keras_api_gauge.get_cell('fit').set(True)
+ # Legacy graph support is contained in `training_v1.Model`.
+ version_utils.disallow_legacy_graph('Model', 'fit')
# Legacy support
if 'nb_epoch' in kwargs:
logging.warning(
@@ -880,6 +883,7 @@
ValueError: in case of invalid arguments.
"""
_keras_api_gauge.get_cell('evaluate').set(True)
+ version_utils.disallow_legacy_graph('Model', 'evaluate')
self._assert_compile_was_called()
self._check_call_args('evaluate')
@@ -959,6 +963,7 @@
that is not a multiple of the batch size.
"""
_keras_api_gauge.get_cell('predict').set(True)
+ version_utils.disallow_legacy_graph('Model', 'predict')
self._check_call_args('predict')
func = self._select_training_loop(x)
diff --git a/tensorflow/python/keras/engine/training_v1.py b/tensorflow/python/keras/engine/training_v1.py
new file mode 100644
index 0000000..1afd525
--- /dev/null
+++ b/tensorflow/python/keras/engine/training_v1.py
@@ -0,0 +1,3190 @@
+# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""V1 Training-related part of the Keras engine."""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import collections
+import numpy as np
+
+from tensorflow.python import tf2
+from tensorflow.python.data.ops import dataset_ops
+from tensorflow.python.data.ops import iterator_ops
+from tensorflow.python.distribute import distribution_strategy_context
+from tensorflow.python.eager import context
+from tensorflow.python.eager import def_function
+from tensorflow.python.eager import monitoring
+from tensorflow.python.framework import composite_tensor
+from tensorflow.python.framework import composite_tensor_utils
+from tensorflow.python.framework import constant_op
+from tensorflow.python.framework import ops
+from tensorflow.python.framework import sparse_tensor
+from tensorflow.python.framework import tensor_shape
+from tensorflow.python.framework import tensor_spec
+from tensorflow.python.framework import tensor_util
+from tensorflow.python.framework import type_spec
+from tensorflow.python.keras import backend as K
+from tensorflow.python.keras import losses
+from tensorflow.python.keras import metrics as metrics_module
+from tensorflow.python.keras import optimizers
+from tensorflow.python.keras.distribute import distributed_training_utils
+from tensorflow.python.keras.engine import network
+from tensorflow.python.keras.engine import training as training_lib
+from tensorflow.python.keras.engine import training_arrays
+from tensorflow.python.keras.engine import training_distributed
+from tensorflow.python.keras.engine import training_eager
+from tensorflow.python.keras.engine import training_generator
+from tensorflow.python.keras.engine import training_utils
+from tensorflow.python.keras.engine import training_v2
+from tensorflow.python.keras.engine import training_v2_utils
+from tensorflow.python.keras.mixed_precision.experimental import loss_scale_optimizer
+from tensorflow.python.keras.optimizer_v2 import optimizer_v2
+from tensorflow.python.keras.saving.saved_model import model_serialization
+from tensorflow.python.keras.utils import data_utils
+from tensorflow.python.keras.utils import losses_utils
+from tensorflow.python.keras.utils.mode_keys import ModeKeys
+from tensorflow.python.ops import array_ops
+from tensorflow.python.ops import math_ops
+from tensorflow.python.ops.losses import util as tf_losses_utils
+from tensorflow.python.platform import tf_logging as logging
+from tensorflow.python.training.tracking import base as trackable
+from tensorflow.python.training.tracking import layer_utils as trackable_layer_utils
+from tensorflow.python.util import deprecation
+from tensorflow.python.util import nest
+from tensorflow.python.util import tf_inspect
+from tensorflow.python.util.compat import collections_abc
+
+try:
+ from scipy.sparse import issparse # pylint: disable=g-import-not-at-top
+except ImportError:
+ issparse = None
+
+_keras_api_gauge = monitoring.BoolGauge('/tensorflow/api/keras/model_v1',
+ 'keras model v1 usage', 'method')
+
+
+class Model(training_lib.Model):
+ """`Model` groups layers into an object with training and inference features.
+
+ There are two ways to instantiate a `Model`:
+
+ 1 - With the "functional API", where you start from `Input`,
+ you chain layer calls to specify the model's forward pass,
+ and finally you create your model from inputs and outputs:
+
+ ```python
+ import tensorflow as tf
+
+ inputs = tf.keras.Input(shape=(3,))
+ x = tf.keras.layers.Dense(4, activation=tf.nn.relu)(inputs)
+ outputs = tf.keras.layers.Dense(5, activation=tf.nn.softmax)(x)
+ model = tf.keras.Model(inputs=inputs, outputs=outputs)
+ ```
+
+ 2 - By subclassing the `Model` class: in that case, you should define your
+ layers in `__init__` and you should implement the model's forward pass
+ in `call`.
+
+ ```python
+ import tensorflow as tf
+
+ class MyModel(tf.keras.Model):
+
+ def __init__(self):
+ super(MyModel, self).__init__()
+ self.dense1 = tf.keras.layers.Dense(4, activation=tf.nn.relu)
+ self.dense2 = tf.keras.layers.Dense(5, activation=tf.nn.softmax)
+
+ def call(self, inputs):
+ x = self.dense1(inputs)
+ return self.dense2(x)
+
+ model = MyModel()
+ ```
+
+ If you subclass `Model`, you can optionally have
+ a `training` argument (boolean) in `call`, which you can use to specify
+ a different behavior in training and inference:
+
+ ```python
+ import tensorflow as tf
+
+ class MyModel(tf.keras.Model):
+
+ def __init__(self):
+ super(MyModel, self).__init__()
+ self.dense1 = tf.keras.layers.Dense(4, activation=tf.nn.relu)
+ self.dense2 = tf.keras.layers.Dense(5, activation=tf.nn.softmax)
+ self.dropout = tf.keras.layers.Dropout(0.5)
+
+ def call(self, inputs, training=False):
+ x = self.dense1(inputs)
+ if training:
+ x = self.dropout(x, training=training)
+ return self.dense2(x)
+
+ model = MyModel()
+ ```
+ """
+
+ def __init__(self, *args, **kwargs):
+ super(Model, self).__init__(*args, **kwargs)
+ _keras_api_gauge.get_cell('model_v1').set(True)
+ # initializing _distribution_strategy here since it is possible to call
+ # predict on a model without compiling it.
+ self._distribution_strategy = None
+ self._compile_time_distribution_strategy = None
+ if (ops.executing_eagerly_outside_functions() and
+ distribution_strategy_context.has_strategy()):
+ self._set_strategy(
+ distribution_strategy_context.get_strategy())
+
+ # This flag is used to track if the user is using the deprecated path of
+ # passing distribution strategy to compile rather than creating the model
+ # under distribution strategy scope.
+ self._compile_distribution = False
+
+ self._run_eagerly = None
+ self._experimental_run_tf_function = (
+ ops.executing_eagerly_outside_functions())
+
+ @trackable.no_automatic_dependency_tracking
+ def _set_strategy(self, strategy):
+ self._compile_time_distribution_strategy = strategy
+
+ def get_weights(self):
+ """Retrieves the weights of the model.
+
+ Returns:
+ A flat list of Numpy arrays.
+ """
+ strategy = (self._distribution_strategy or
+ self._compile_time_distribution_strategy)
+ if strategy:
+ with strategy.scope():
+ return super(Model, self).get_weights()
+ return super(Model, self).get_weights()
+
+ def load_weights(self, filepath, by_name=False, skip_mismatch=False):
+ """Loads all layer weights, either from a TensorFlow or an HDF5 weight file.
+
+ If `by_name` is False weights are loaded based on the network's
+ topology. This means the architecture should be the same as when the weights
+ were saved. Note that layers that don't have weights are not taken into
+ account in the topological ordering, so adding or removing layers is fine as
+ long as they don't have weights.
+
+ If `by_name` is True, weights are loaded into layers only if they share the
+ same name. This is useful for fine-tuning or transfer-learning models where
+ some of the layers have changed.
+
+ Only topological loading (`by_name=False`) is supported when loading weights
+ from the TensorFlow format. Note that topological loading differs slightly
+ between TensorFlow and HDF5 formats for user-defined classes inheriting from
+ `tf.keras.Model`: HDF5 loads based on a flattened list of weights, while the
+ TensorFlow format loads based on the object-local names of attributes to
+ which layers are assigned in the `Model`'s constructor.
+
+ Arguments:
+ filepath: String, path to the weights file to load. For weight files in
+ TensorFlow format, this is the file prefix (the same as was passed
+ to `save_weights`).
+ by_name: Boolean, whether to load weights by name or by topological
+ order. Only topological loading is supported for weight files in
+ TensorFlow format.
+ skip_mismatch: Boolean, whether to skip loading of layers where there is
+ a mismatch in the number of weights, or a mismatch in the shape of
+ the weight (only valid when `by_name=True`).
+
+ Returns:
+ When loading a weight file in TensorFlow format, returns the same status
+ object as `tf.train.Checkpoint.restore`. When graph building, restore
+ ops are run automatically as soon as the network is built (on first call
+ for user-defined classes inheriting from `Model`, immediately if it is
+ already built).
+
+ When loading weights in HDF5 format, returns `None`.
+
+ Raises:
+ ImportError: If h5py is not available and the weight file is in HDF5
+ format.
+ ValueError: If `skip_mismatch` is set to `True` when `by_name` is
+ `False`.
+ """
+ if distributed_training_utils.is_tpu_strategy(self._distribution_strategy):
+ if (self._distribution_strategy.extended.steps_per_run > 1 and
+ (not network._is_hdf5_filepath(filepath))): # pylint: disable=protected-access
+ raise ValueError('Load weights is not yet supported with TPUStrategy '
+ 'with steps_per_run greater than 1.')
+ return super(Model, self).load_weights(filepath, by_name, skip_mismatch)
+
+ @trackable.no_automatic_dependency_tracking
+ def compile(self,
+ optimizer='rmsprop',
+ loss=None,
+ metrics=None,
+ loss_weights=None,
+ sample_weight_mode=None,
+ weighted_metrics=None,
+ target_tensors=None,
+ distribute=None,
+ **kwargs):
+ """Configures the model for training.
+
+ Arguments:
+ optimizer: String (name of optimizer) or optimizer instance.
+ See `tf.keras.optimizers`.
+ loss: String (name of objective function), objective function or
+ `tf.keras.losses.Loss` instance. See `tf.keras.losses`. An objective
+ function is any callable with the signature
+ `scalar_loss = fn(y_true, y_pred)`. If the model has multiple
+ outputs, you can use a different loss on each output by passing a
+ dictionary or a list of losses. The loss value that will be
+ minimized by the model will then be the sum of all individual
+ losses.
+ metrics: List of metrics to be evaluated by the model during training
+ and testing. Typically you will use `metrics=['accuracy']`.
+ To specify different metrics for different outputs of a
+ multi-output model, you could also pass a dictionary, such as
+ `metrics={'output_a': 'accuracy', 'output_b': ['accuracy', 'mse']}`.
+ You can also pass a list (len = len(outputs)) of lists of metrics
+ such as `metrics=[['accuracy'], ['accuracy', 'mse']]` or
+ `metrics=['accuracy', ['accuracy', 'mse']]`.
+ loss_weights: Optional list or dictionary specifying scalar
+ coefficients (Python floats) to weight the loss contributions
+ of different model outputs.
+ The loss value that will be minimized by the model
+ will then be the *weighted sum* of all individual losses,
+ weighted by the `loss_weights` coefficients.
+ If a list, it is expected to have a 1:1 mapping
+ to the model's outputs. If a tensor, it is expected to map
+ output names (strings) to scalar coefficients.
+ sample_weight_mode: If you need to do timestep-wise
+ sample weighting (2D weights), set this to `"temporal"`.
+ `None` defaults to sample-wise weights (1D).
+ If the model has multiple outputs, you can use a different
+ `sample_weight_mode` on each output by passing a
+ dictionary or a list of modes.
+ weighted_metrics: List of metrics to be evaluated and weighted
+ by sample_weight or class_weight during training and testing.
+ target_tensors: By default, Keras will create placeholders for the
+ model's target, which will be fed with the target data during
+ training. If instead you would like to use your own
+ target tensors (in turn, Keras will not expect external
+ Numpy data for these targets at training time), you
+ can specify them via the `target_tensors` argument. It can be
+ a single tensor (for a single-output model), a list of tensors,
+ or a dict mapping output names to target tensors.
+ distribute: NOT SUPPORTED IN TF 2.0, please create and compile the
+ model under distribution strategy scope instead of passing it to
+ compile.
+ **kwargs: Any additional arguments.
+
+ Raises:
+ ValueError: In case of invalid arguments for
+ `optimizer`, `loss`, `metrics` or `sample_weight_mode`.
+ """
+ self._run_eagerly = kwargs.pop('run_eagerly', None)
+ self._experimental_run_tf_function = kwargs.pop(
+ 'experimental_run_tf_function', True)
+
+ # Prepare Session arguments (legacy).
+ kwargs.pop('cloning', None) # Legacy DistStrat argument, never used.
+ allowed_kwargs = {'feed_dict', 'fetches', 'options', 'run_metadata'}
+ unknown_kwargs = set(kwargs.keys()) - allowed_kwargs
+ if unknown_kwargs:
+ raise TypeError(
+ 'Invalid keyword argument(s) in `compile`: %s' % (unknown_kwargs,))
+ self._function_kwargs = kwargs
+ if self._function_kwargs:
+ self._experimental_run_tf_function = False
+ if self.run_eagerly:
+ raise ValueError(
+ 'Session keyword arguments are not supported '
+ 'when `run_eagerly=True`. You passed the following '
+ 'Session arguments: %s' % (self._function_kwargs,))
+
+ self._set_optimizer(optimizer)
+ is_any_keras_optimizer_v1 = any(
+ (isinstance(opt, optimizers.Optimizer)
+ and not isinstance(opt, optimizers.TFOptimizer)
+ ) for opt in nest.flatten(self.optimizer))
+
+ if is_any_keras_optimizer_v1 and ops.executing_eagerly_outside_functions():
+ raise ValueError('`tf.compat.v1.keras` Optimizer (', optimizer, ') is '
+ 'not supported when eager execution is enabled. Use a '
+ '`tf.keras` Optimizer instead, or disable eager '
+ 'execution.')
+
+ if ((target_tensors is not None)
+ or not ops.executing_eagerly_outside_functions()):
+ # Fallback out of things that aren't supported with v2 loops
+ self._experimental_run_tf_function = False
+
+ if distribute is not None:
+ if tf2.enabled() or self._experimental_run_tf_function:
+ raise ValueError(
+ 'Distribute argument in compile is not available in TF 2.0 please '
+ 'create the model under the distribution strategy scope.')
+ logging.warning('Distribute argument in compile is deprecated please '
+ 'create the model under the distribution strategy scope.')
+ self._distribution_strategy = distribute
+ self._compile_distribution = True
+ else:
+ if distribution_strategy_context.has_strategy():
+ # When the user builds the model in the DS scope and cross replica
+ # context we want distribution strategy to be set but when building the
+ # replica copies of the models internally we should not be compiling
+ # with distribution strategy and use the default compilation path.
+ if distribution_strategy_context.in_cross_replica_context():
+ self._distribution_strategy = (
+ distribution_strategy_context.get_strategy())
+
+ if not self._experimental_run_tf_function:
+ self._validate_compile_param_for_distribution_strategy(self.run_eagerly,
+ sample_weight_mode,
+ target_tensors,
+ weighted_metrics)
+ # We've disabled automatic dependency tracking for this method, but do want
+ # to add a checkpoint dependency on the optimizer if it's trackable.
+ if isinstance(self.optimizer, trackable.Trackable):
+ self._track_trackable(
+ self.optimizer, name='optimizer', overwrite=True)
+ self.loss = loss or {}
+ self.loss_weights = loss_weights
+ self.sample_weight_mode = sample_weight_mode
+ self._compile_metrics = metrics or []
+ self._compile_weighted_metrics = weighted_metrics
+ if self.run_eagerly and target_tensors is not None:
+ raise ValueError(
+ 'target_tensors argument is not supported when '
+ 'running a model eagerly.')
+
+ # _training_endpoints contains a list of _TrainingEndpoint object, which has
+ # all the model output/target/loss and related metadata.
+ self._training_endpoints = []
+
+ # Used to freeze the behavior of the Model once `compile` has been called.
+ self._compiled_trainable_state = self._get_trainable_state()
+
+ # Set tf.distribute.Strategy specific parameters.
+ self._distributed_model_cache = {}
+ self._distributed_function_cache = {}
+
+ # Clear any `_eager_losses` that was added.
+ self._clear_losses()
+
+ if (not context.executing_eagerly() and
+ self._distribution_strategy is not None):
+ # Ensures a Session is created and configured correctly for Distribution
+ # Strategy.
+ K.configure_and_create_distributed_session(self._distribution_strategy)
+ # Initialize model metric attributes.
+ self._init_metric_attributes()
+ if not self.built or not self.inputs or not self.outputs:
+ # Model is not compilable because it does not know its number of inputs
+ # and outputs, nor their shapes and names. We will compile after the first
+ # time the model gets called on training data.
+ return
+ self._is_compiled = True
+ _keras_api_gauge.get_cell('compile_v1').set(True)
+
+ # Prepare list of loss functions, same size of model outputs.
+ self.loss_functions = training_utils.prepare_loss_functions(
+ self.loss, self.output_names)
+
+ target_tensors = self._process_target_tensor_for_compile(target_tensors)
+
+ for o, n, l, t in zip(self.outputs, self.output_names,
+ self.loss_functions, target_tensors):
+ endpoint = _TrainingEndpoint(o, n, l)
+ endpoint.create_training_target(t, run_eagerly=self.run_eagerly)
+ self._training_endpoints.append(endpoint)
+
+ # Prepare list loss weights, same size of model outputs.
+ training_utils.prepare_loss_weights(self._training_endpoints, loss_weights)
+
+ # Initialization for Eager mode execution.
+ if self.run_eagerly:
+ self._compile_eagerly(metrics, weighted_metrics, sample_weight_mode)
+ return
+
+ with K.get_graph().as_default():
+ # Save all metric attributes per output of the model.
+ self._cache_output_metric_attributes(metrics, weighted_metrics)
+
+ # Set metric attributes on model.
+ self._set_metric_attributes()
+
+ # Invoke metric functions (unweighted) for all the outputs.
+ self._handle_metrics(
+ self.outputs,
+ targets=self._targets,
+ skip_target_masks=self._prepare_skip_target_masks(),
+ masks=self._prepare_output_masks())
+
+ # Prepare sample weight modes. List with the same length as model outputs.
+ training_utils.prepare_sample_weight_modes(
+ self._training_endpoints, sample_weight_mode)
+
+ # Creates the model loss and weighted metrics sub-graphs.
+ self._compile_weights_loss_and_weighted_metrics()
+
+ # Functions for train, test and predict will
+ # be compiled lazily when required.
+ # This saves time when the user is not using all functions.
+ self.train_function = None
+ self.test_function = None
+ self.predict_function = None
+
+ # Collected trainable weights, sorted in topological order.
+ self._collected_trainable_weights = self.trainable_weights
+
+ # Validate all variables were correctly created in distribution scope.
+ if self._distribution_strategy and not self._compile_distribution:
+ for v in self.variables:
+ strategy = self._distribution_strategy
+ if not strategy.extended.variable_created_in_scope(v):
+ raise ValueError(
+ 'Variable (%s) was not created in the distribution strategy '
+ 'scope of (%s). It is most likely due to not all layers or '
+ 'the model or optimizer being created outside the distribution '
+ 'strategy scope. Try to make sure your code looks similar '
+ 'to the following.\n'
+ 'with strategy.scope():\n'
+ ' model=_create_model()\n'
+ ' model.compile(...)'% (v, strategy))
+
+ @trackable.no_automatic_dependency_tracking
+ def _init_distributed_function_cache_if_not_compiled(self):
+ if not hasattr(self, '_distributed_function_cache'):
+ self._distributed_function_cache = {}
+
+ @property
+ def metrics(self):
+ """Returns the model's metrics added using `compile`, `add_metric` APIs."""
+ metrics = []
+ if self._is_compiled:
+ metrics += self._compile_metric_functions
+ metrics.extend(self._metrics)
+ metrics.extend(_get_metrics_from_layers(self._layers))
+ return metrics
+
+ @property
+ def metrics_names(self):
+ """Returns the model's display labels for all outputs."""
+
+ # This property includes all output names including `loss` and per-output
+ # losses for backward compatibility.
+ metrics_names = ['loss']
+ if self._is_compiled:
+ # Add output loss metric names to the metric names list.
+ if len(self._training_endpoints) > 1:
+ metrics_names.extend([
+ e.loss_name()
+ for e in self._training_endpoints
+ if not e.should_skip_target()
+ ])
+
+ # Add all metric names.
+ metrics_names += [m.name for m in self.metrics]
+ return metrics_names
+
+ @property
+ def run_eagerly(self):
+ """Settable attribute indicating whether the model should run eagerly.
+
+ Running eagerly means that your model will be run step by step,
+ like Python code. Your model might run slower, but it should become easier
+ for you to debug it by stepping into individual layer calls.
+
+ By default, we will attempt to compile your model to a static graph to
+ deliver the best execution performance.
+
+ Returns:
+ Boolean, whether the model should run eagerly.
+ """
+ if self._run_eagerly is True and not context.executing_eagerly():
+ raise ValueError('You can only set `run_eagerly=True` if eager execution '
+ 'is enabled.')
+ if not self.dynamic:
+ if self._run_eagerly is None:
+ # Respect `tf.config.experimental_run_functions_eagerly` unless
+ # `run_eagerly` was explicitly passed to `compile`.
+ return def_function.RUN_FUNCTIONS_EAGERLY
+ else:
+ return self._run_eagerly
+ else:
+ if not context.executing_eagerly():
+ raise ValueError('Your model contains layers that can only be '
+ 'successfully run in eager execution (layers '
+ 'constructed with `dynamic=True`). '
+ 'You must enable eager execution with '
+ '`tf.enable_eager_execution()`.')
+ if self._run_eagerly is False:
+ # TODO(fchollet): consider using py_func to enable this.
+ raise ValueError('Your model contains layers that can only be '
+ 'successfully run in eager execution (layers '
+ 'constructed with `dynamic=True`). '
+ 'You cannot set `run_eagerly=False`.')
+ return context.executing_eagerly()
+
+ @run_eagerly.setter
+ def run_eagerly(self, value):
+ self._run_eagerly = value
+
+ def _select_training_loop(self, inputs):
+ """Select training loop for fit/eval/predict based on the inputs."""
+ # TODO(kaftan) or TODO(scottzhu): This check should eventually be nicely
+ # integrated into the data adapters in the v2 loop. We can't do this yet
+ # because we currently have to fall back for unhandled data types.
+ if isinstance(inputs, (iterator_ops.Iterator,
+ iterator_ops.OwnedIterator)):
+ raise ValueError('For performance reasons Keras `fit`, `evaluate` and'
+ '`predict` accept tf.data `Datasets` as input but not '
+ 'iterators that have been manually generated from '
+ 'Datasets by users. Please directly pass in the '
+ 'original `Dataset` object instead of passing in '
+ '`iter(dataset)`.')
+
+ # Experiment training loop with default DS path.
+ if context.executing_eagerly() and self._experimental_run_tf_function:
+ if self._in_multi_worker_mode():
+ return training_distributed.DistributionMultiWorkerTrainingLoop(
+ training_v2.Loop())
+ else:
+ return training_v2.Loop()
+
+ # Case 1: distribution strategy.
+ if self._distribution_strategy:
+ if self._in_multi_worker_mode():
+ return training_distributed.DistributionMultiWorkerTrainingLoop(
+ training_distributed.DistributionSingleWorkerTrainingLoop())
+ else:
+ return training_distributed.DistributionSingleWorkerTrainingLoop()
+
+ # Case 2: generator-like. Input is Python generator, or Sequence object,
+ # or a non-distributed Dataset or iterator in eager execution.
+ if data_utils.is_generator_or_sequence(inputs):
+ return training_generator.GeneratorOrSequenceTrainingLoop()
+ if training_utils.is_eager_dataset_or_iterator(inputs):
+ return training_generator.EagerDatasetOrIteratorTrainingLoop()
+
+ # Case 3: Symbolic tensors or Numpy array-like.
+ # This includes Datasets and iterators in graph mode (since they
+ # generate symbolic tensors).
+ if self.run_eagerly:
+ return training_generator.GeneratorLikeTrainingLoop()
+ else:
+ return training_arrays.ArrayLikeTrainingLoop()
+
+ def fit(self,
+ x=None,
+ y=None,
+ batch_size=None,
+ epochs=1,
+ verbose=1,
+ callbacks=None,
+ validation_split=0.,
+ validation_data=None,
+ shuffle=True,
+ class_weight=None,
+ sample_weight=None,
+ initial_epoch=0,
+ steps_per_epoch=None,
+ validation_steps=None,
+ validation_freq=1,
+ max_queue_size=10,
+ workers=1,
+ use_multiprocessing=False,
+ **kwargs):
+ """Trains the model for a fixed number of epochs (iterations on a dataset).
+
+ Arguments:
+ x: Input data. It could be:
+ - A Numpy array (or array-like), or a list of arrays
+ (in case the model has multiple inputs).
+ - A TensorFlow tensor, or a list of tensors
+ (in case the model has multiple inputs).
+ - A dict mapping input names to the corresponding array/tensors,
+ if the model has named inputs.
+ - A `tf.data` dataset. Should return a tuple
+ of either `(inputs, targets)` or
+ `(inputs, targets, sample_weights)`.
+ - A generator or `keras.utils.Sequence` returning `(inputs, targets)`
+ or `(inputs, targets, sample weights)`.
+ y: Target data. Like the input data `x`,
+ it could be either Numpy array(s) or TensorFlow tensor(s).
+ It should be consistent with `x` (you cannot have Numpy inputs and
+ tensor targets, or inversely). If `x` is a dataset, generator,
+ or `keras.utils.Sequence` instance, `y` should
+ not be specified (since targets will be obtained from `x`).
+ batch_size: Integer or `None`.
+ Number of samples per gradient update.
+ If unspecified, `batch_size` will default to 32.
+ Do not specify the `batch_size` if your data is in the
+ form of symbolic tensors, datasets,
+ generators, or `keras.utils.Sequence` instances (since they generate
+ batches).
+ epochs: Integer. Number of epochs to train the model.
+ An epoch is an iteration over the entire `x` and `y`
+ data provided.
+ Note that in conjunction with `initial_epoch`,
+ `epochs` is to be understood as "final epoch".
+ The model is not trained for a number of iterations
+ given by `epochs`, but merely until the epoch
+ of index `epochs` is reached.
+ verbose: 0, 1, or 2. Verbosity mode.
+ 0 = silent, 1 = progress bar, 2 = one line per epoch.
+ Note that the progress bar is not particularly useful when
+ logged to a file, so verbose=2 is recommended when not running
+ interactively (eg, in a production environment).
+ callbacks: List of `keras.callbacks.Callback` instances.
+ List of callbacks to apply during training.
+ See `tf.keras.callbacks`.
+ validation_split: Float between 0 and 1.
+ Fraction of the training data to be used as validation data.
+ The model will set apart this fraction of the training data,
+ will not train on it, and will evaluate
+ the loss and any model metrics
+ on this data at the end of each epoch.
+ The validation data is selected from the last samples
+ in the `x` and `y` data provided, before shuffling. This argument is
+ not supported when `x` is a dataset, generator or
+ `keras.utils.Sequence` instance.
+ validation_data: Data on which to evaluate
+ the loss and any model metrics at the end of each epoch.
+ The model will not be trained on this data.
+ `validation_data` will override `validation_split`.
+ `validation_data` could be:
+ - tuple `(x_val, y_val)` of Numpy arrays or tensors
+ - tuple `(x_val, y_val, val_sample_weights)` of Numpy arrays
+ - dataset
+ For the first two cases, `batch_size` must be provided.
+ For the last case, `validation_steps` could be provided.
+ shuffle: Boolean (whether to shuffle the training data
+ before each epoch) or str (for 'batch').
+ 'batch' is a special option for dealing with the
+ limitations of HDF5 data; it shuffles in batch-sized chunks.
+ Has no effect when `steps_per_epoch` is not `None`.
+ class_weight: Optional dictionary mapping class indices (integers)
+ to a weight (float) value, used for weighting the loss function
+ (during training only).
+ This can be useful to tell the model to
+ "pay more attention" to samples from
+ an under-represented class.
+ sample_weight: Optional Numpy array of weights for
+ the training samples, used for weighting the loss function
+ (during training only). You can either pass a flat (1D)
+ Numpy array with the same length as the input samples
+ (1:1 mapping between weights and samples),
+ or in the case of temporal data,
+ you can pass a 2D array with shape
+ `(samples, sequence_length)`,
+ to apply a different weight to every timestep of every sample.
+ In this case you should make sure to specify
+ `sample_weight_mode="temporal"` in `compile()`. This argument is not
+ supported when `x` is a dataset, generator, or
+ `keras.utils.Sequence` instance, instead provide the sample_weights
+ as the third element of `x`.
+ initial_epoch: Integer.
+ Epoch at which to start training
+ (useful for resuming a previous training run).
+ steps_per_epoch: Integer or `None`.
+ Total number of steps (batches of samples)
+ before declaring one epoch finished and starting the
+ next epoch. When training with input tensors such as
+ TensorFlow data tensors, the default `None` is equal to
+ the number of samples in your dataset divided by
+ the batch size, or 1 if that cannot be determined. If x is a
+ `tf.data` dataset, and 'steps_per_epoch'
+ is None, the epoch will run until the input dataset is exhausted.
+ This argument is not supported with array inputs.
+ validation_steps: Only relevant if `validation_data` is provided and
+ is a `tf.data` dataset. Total number of steps (batches of
+ samples) to draw before stopping when performing validation
+ at the end of every epoch. If 'validation_steps' is None, validation
+ will run until the `validation_data` dataset is exhausted. In the
+ case of a infinite dataset, it will run into a infinite loop.
+ If 'validation_steps' is specified and only part of the dataset
+ will be consumed, the evaluation will start from the beginning of
+ the dataset at each epoch. This ensures that the same validation
+ samples are used every time.
+ validation_freq: Only relevant if validation data is provided. Integer
+ or `collections_abc.Container` instance (e.g. list, tuple, etc.).
+ If an integer, specifies how many training epochs to run before a
+ new validation run is performed, e.g. `validation_freq=2` runs
+ validation every 2 epochs. If a Container, specifies the epochs on
+ which to run validation, e.g. `validation_freq=[1, 2, 10]` runs
+ validation at the end of the 1st, 2nd, and 10th epochs.
+ max_queue_size: Integer. Used for generator or `keras.utils.Sequence`
+ input only. Maximum size for the generator queue.
+ If unspecified, `max_queue_size` will default to 10.
+ workers: Integer. Used for generator or `keras.utils.Sequence` input
+ only. Maximum number of processes to spin up
+ when using process-based threading. If unspecified, `workers`
+ will default to 1. If 0, will execute the generator on the main
+ thread.
+ use_multiprocessing: Boolean. Used for generator or
+ `keras.utils.Sequence` input only. If `True`, use process-based
+ threading. If unspecified, `use_multiprocessing` will default to
+ `False`. Note that because this implementation relies on
+ multiprocessing, you should not pass non-picklable arguments to
+ the generator as they can't be passed easily to children processes.
+ **kwargs: Used for backwards compatibility.
+
+ Returns:
+ A `History` object. Its `History.history` attribute is
+ a record of training loss values and metrics values
+ at successive epochs, as well as validation loss values
+ and validation metrics values (if applicable).
+
+ Raises:
+ RuntimeError: If the model was never compiled.
+ ValueError: In case of mismatch between the provided input data
+ and what the model expects.
+ """
+ _keras_api_gauge.get_cell('fit_v1').set(True)
+ # Legacy support
+ if 'nb_epoch' in kwargs:
+ logging.warning(
+ 'The `nb_epoch` argument in `fit` has been renamed `epochs`.')
+ epochs = kwargs.pop('nb_epoch')
+ if kwargs:
+ raise TypeError('Unrecognized keyword arguments: ' + str(kwargs))
+ self._assert_compile_was_called()
+ self._check_call_args('fit')
+
+ func = self._select_training_loop(x)
+ return func.fit(
+ self,
+ x=x,
+ y=y,
+ batch_size=batch_size,
+ epochs=epochs,
+ verbose=verbose,
+ callbacks=callbacks,
+ validation_split=validation_split,
+ validation_data=validation_data,
+ shuffle=shuffle,
+ class_weight=class_weight,
+ sample_weight=sample_weight,
+ initial_epoch=initial_epoch,
+ steps_per_epoch=steps_per_epoch,
+ validation_steps=validation_steps,
+ validation_freq=validation_freq,
+ max_queue_size=max_queue_size,
+ workers=workers,
+ use_multiprocessing=use_multiprocessing)
+
+ def evaluate(self,
+ x=None,
+ y=None,
+ batch_size=None,
+ verbose=1,
+ sample_weight=None,
+ steps=None,
+ callbacks=None,
+ max_queue_size=10,
+ workers=1,
+ use_multiprocessing=False):
+ """Returns the loss value & metrics values for the model in test mode.
+
+ Computation is done in batches.
+
+ Arguments:
+ x: Input data. It could be:
+ - A Numpy array (or array-like), or a list of arrays
+ (in case the model has multiple inputs).
+ - A TensorFlow tensor, or a list of tensors
+ (in case the model has multiple inputs).
+ - A dict mapping input names to the corresponding array/tensors,
+ if the model has named inputs.
+ - A `tf.data` dataset.
+ - A generator or `keras.utils.Sequence` instance.
+ y: Target data. Like the input data `x`,
+ it could be either Numpy array(s) or TensorFlow tensor(s).
+ It should be consistent with `x` (you cannot have Numpy inputs and
+ tensor targets, or inversely).
+ If `x` is a dataset, generator or
+ `keras.utils.Sequence` instance, `y` should not be specified (since
+ targets will be obtained from the iterator/dataset).
+ batch_size: Integer or `None`.
+ Number of samples per gradient update.
+ If unspecified, `batch_size` will default to 32.
+ Do not specify the `batch_size` if your data is in the
+ form of symbolic tensors, dataset,
+ generators, or `keras.utils.Sequence` instances (since they generate
+ batches).
+ verbose: 0 or 1. Verbosity mode.
+ 0 = silent, 1 = progress bar.
+ sample_weight: Optional Numpy array of weights for
+ the test samples, used for weighting the loss function.
+ You can either pass a flat (1D)
+ Numpy array with the same length as the input samples
+ (1:1 mapping between weights and samples),
+ or in the case of temporal data,
+ you can pass a 2D array with shape
+ `(samples, sequence_length)`,
+ to apply a different weight to every timestep of every sample.
+ In this case you should make sure to specify
+ `sample_weight_mode="temporal"` in `compile()`. This argument is not
+ supported when `x` is a dataset, instead pass
+ sample weights as the third element of `x`.
+ steps: Integer or `None`.
+ Total number of steps (batches of samples)
+ before declaring the evaluation round finished.
+ Ignored with the default value of `None`.
+ If x is a `tf.data` dataset and `steps` is
+ None, 'evaluate' will run until the dataset is exhausted.
+ This argument is not supported with array inputs.
+ callbacks: List of `keras.callbacks.Callback` instances.
+ List of callbacks to apply during evaluation.
+ See [callbacks](/api_docs/python/tf/keras/callbacks).
+ max_queue_size: Integer. Used for generator or `keras.utils.Sequence`
+ input only. Maximum size for the generator queue.
+ If unspecified, `max_queue_size` will default to 10.
+ workers: Integer. Used for generator or `keras.utils.Sequence` input
+ only. Maximum number of processes to spin up when using
+ process-based threading. If unspecified, `workers` will default
+ to 1. If 0, will execute the generator on the main thread.
+ use_multiprocessing: Boolean. Used for generator or
+ `keras.utils.Sequence` input only. If `True`, use process-based
+ threading. If unspecified, `use_multiprocessing` will default to
+ `False`. Note that because this implementation relies on
+ multiprocessing, you should not pass non-picklable arguments to
+ the generator as they can't be passed easily to children processes.
+
+ Returns:
+ Scalar test loss (if the model has a single output and no metrics)
+ or list of scalars (if the model has multiple outputs
+ and/or metrics). The attribute `model.metrics_names` will give you
+ the display labels for the scalar outputs.
+
+ Raises:
+ ValueError: in case of invalid arguments.
+ """
+ _keras_api_gauge.get_cell('evaluate_v1').set(True)
+ self._assert_compile_was_called()
+ self._check_call_args('evaluate')
+
+ func = self._select_training_loop(x)
+ return func.evaluate(
+ self,
+ x=x,
+ y=y,
+ batch_size=batch_size,
+ verbose=verbose,
+ sample_weight=sample_weight,
+ steps=steps,
+ callbacks=callbacks,
+ max_queue_size=max_queue_size,
+ workers=workers,
+ use_multiprocessing=use_multiprocessing)
+
+ def predict(self,
+ x,
+ batch_size=None,
+ verbose=0,
+ steps=None,
+ callbacks=None,
+ max_queue_size=10,
+ workers=1,
+ use_multiprocessing=False):
+ """Generates output predictions for the input samples.
+
+ Computation is done in batches.
+
+ Arguments:
+ x: Input samples. It could be:
+ - A Numpy array (or array-like), or a list of arrays
+ (in case the model has multiple inputs).
+ - A TensorFlow tensor, or a list of tensors
+ (in case the model has multiple inputs).
+ - A `tf.data` dataset.
+ - A generator or `keras.utils.Sequence` instance.
+ batch_size: Integer or `None`.
+ Number of samples per gradient update.
+ If unspecified, `batch_size` will default to 32.
+ Do not specify the `batch_size` if your data is in the
+ form of symbolic tensors, dataset,
+ generators, or `keras.utils.Sequence` instances (since they generate
+ batches).
+ verbose: Verbosity mode, 0 or 1.
+ steps: Total number of steps (batches of samples)
+ before declaring the prediction round finished.
+ Ignored with the default value of `None`. If x is a `tf.data`
+ dataset and `steps` is None, `predict` will
+ run until the input dataset is exhausted.
+ callbacks: List of `keras.callbacks.Callback` instances.
+ List of callbacks to apply during prediction.
+ See [callbacks](/api_docs/python/tf/keras/callbacks).
+ max_queue_size: Integer. Used for generator or `keras.utils.Sequence`
+ input only. Maximum size for the generator queue.
+ If unspecified, `max_queue_size` will default to 10.
+ workers: Integer. Used for generator or `keras.utils.Sequence` input
+ only. Maximum number of processes to spin up when using
+ process-based threading. If unspecified, `workers` will default
+ to 1. If 0, will execute the generator on the main thread.
+ use_multiprocessing: Boolean. Used for generator or
+ `keras.utils.Sequence` input only. If `True`, use process-based
+ threading. If unspecified, `use_multiprocessing` will default to
+ `False`. Note that because this implementation relies on
+ multiprocessing, you should not pass non-picklable arguments to
+ the generator as they can't be passed easily to children processes.
+
+
+ Returns:
+ Numpy array(s) of predictions.
+
+ Raises:
+ ValueError: In case of mismatch between the provided
+ input data and the model's expectations,
+ or in case a stateful model receives a number of samples
+ that is not a multiple of the batch size.
+ """
+ _keras_api_gauge.get_cell('predict_v1').set(True)
+ self._check_call_args('predict')
+
+ func = self._select_training_loop(x)
+ return func.predict(
+ self,
+ x=x,
+ batch_size=batch_size,
+ verbose=verbose,
+ steps=steps,
+ callbacks=callbacks,
+ max_queue_size=max_queue_size,
+ workers=workers,
+ use_multiprocessing=use_multiprocessing)
+
+ def reset_metrics(self):
+ """Resets the state of metrics."""
+ metrics = self._get_training_eval_metrics()
+ for m in metrics:
+ m.reset_states()
+
+ # Reset metrics on all the distributed (cloned) models.
+ if self._distribution_strategy:
+ distributed_training_utils._reset_metrics(self) # pylint: disable=protected-access
+
+ def train_on_batch(self,
+ x,
+ y=None,
+ sample_weight=None,
+ class_weight=None,
+ reset_metrics=True):
+ """Runs a single gradient update on a single batch of data.
+
+ Arguments:
+ x: Input data. It could be:
+ - A Numpy array (or array-like), or a list of arrays
+ (in case the model has multiple inputs).
+ - A TensorFlow tensor, or a list of tensors
+ (in case the model has multiple inputs).
+ - A dict mapping input names to the corresponding array/tensors,
+ if the model has named inputs.
+ - A `tf.data` dataset.
+ y: Target data. Like the input data `x`, it could be either Numpy
+ array(s) or TensorFlow tensor(s). It should be consistent with `x`
+ (you cannot have Numpy inputs and tensor targets, or inversely). If
+ `x` is a dataset, `y` should not be specified
+ (since targets will be obtained from the iterator).
+ sample_weight: Optional array of the same length as x, containing
+ weights to apply to the model's loss for each sample. In the case of
+ temporal data, you can pass a 2D array with shape (samples,
+ sequence_length), to apply a different weight to every timestep of
+ every sample. In this case you should make sure to specify
+ sample_weight_mode="temporal" in compile(). This argument is not
+ supported when `x` is a dataset.
+ class_weight: Optional dictionary mapping class indices (integers) to a
+ weight (float) to apply to the model's loss for the samples from this
+ class during training. This can be useful to tell the model to "pay
+ more attention" to samples from an under-represented class.
+ reset_metrics: If `True`, the metrics returned will be only for this
+ batch. If `False`, the metrics will be statefully accumulated across
+ batches.
+
+ Returns:
+ Scalar training loss
+ (if the model has a single output and no metrics)
+ or list of scalars (if the model has multiple outputs
+ and/or metrics). The attribute `model.metrics_names` will give you
+ the display labels for the scalar outputs.
+
+ Raises:
+ ValueError: In case of invalid user-provided arguments.
+ """
+ self._assert_compile_was_called()
+ self._check_call_args('train_on_batch')
+ if self._experimental_run_tf_function:
+ outputs = training_v2_utils.train_on_batch(
+ self, x, y=y, sample_weight=sample_weight,
+ class_weight=class_weight, reset_metrics=reset_metrics,
+ standalone=True)
+ outputs = (outputs['total_loss'] + outputs['output_losses'] +
+ outputs['metrics'])
+ outputs = [
+ training_v2_utils._non_none_constant_value(v) for v in outputs] # pylint: disable=protected-access
+ if len(outputs) == 1:
+ outputs = outputs[0]
+ return outputs
+
+ # If at this point we are in the replica context, then it is okay to execute
+ # the Eager code path. The expected way to get here is to call `fit` that
+ # calls `train_on_batch` on each replica.
+ if (self._distribution_strategy and
+ distribution_strategy_context.in_cross_replica_context()):
+ raise NotImplementedError('`train_on_batch` is not supported for models '
+ 'distributed with tf.distribute.Strategy.')
+ # Validate and standardize user data.
+ x, y, sample_weights = self._standardize_user_data(
+ x, y, sample_weight=sample_weight, class_weight=class_weight,
+ extract_tensors_from_dataset=True)
+
+ # If `self._distribution_strategy` is True, then we are in a replica context
+ # at this point because of the check above. `train_on_batch` is being run
+ # for each replica by `self._distribution_strategy` and the same code path
+ # as Eager is expected to be taken.
+ if self.run_eagerly or self._distribution_strategy:
+ output_dict = training_eager.train_on_batch(
+ self,
+ x,
+ y,
+ sample_weights=sample_weights,
+ output_loss_metrics=self._output_loss_metrics)
+ outputs = (output_dict['total_loss'] + output_dict['output_losses']
+ + output_dict['metrics'])
+ outputs = [
+ training_v2_utils._non_none_constant_value(v) for v in outputs] # pylint: disable=protected-access
+ else:
+ x = training_utils.ModelInputs(x).as_list()
+ ins = x + list(y or []) + list(sample_weights or [])
+
+ if not isinstance(K.symbolic_learning_phase(), int):
+ ins += [True] # Add learning phase value.
+
+ self._update_sample_weight_modes(sample_weights=sample_weights)
+ self._make_train_function()
+ outputs = self.train_function(ins) # pylint: disable=not-callable
+
+ if reset_metrics:
+ self.reset_metrics()
+
+ if len(outputs) == 1:
+ return outputs[0]
+ return outputs
+
+ def test_on_batch(self, x, y=None, sample_weight=None, reset_metrics=True):
+ """Test the model on a single batch of samples.
+
+ Arguments:
+ x: Input data. It could be:
+ - A Numpy array (or array-like), or a list of arrays
+ (in case the model has multiple inputs).
+ - A TensorFlow tensor, or a list of tensors
+ (in case the model has multiple inputs).
+ - A dict mapping input names to the corresponding array/tensors,
+ if the model has named inputs.
+ - A `tf.data` dataset.
+ y: Target data. Like the input data `x`,
+ it could be either Numpy array(s) or TensorFlow tensor(s).
+ It should be consistent with `x` (you cannot have Numpy inputs and
+ tensor targets, or inversely). If `x` is a dataset `y` should
+ not be specified (since targets will be obtained from the iterator).
+ sample_weight: Optional array of the same length as x, containing
+ weights to apply to the model's loss for each sample.
+ In the case of temporal data, you can pass a 2D array
+ with shape (samples, sequence_length),
+ to apply a different weight to every timestep of every sample.
+ In this case you should make sure to specify
+ sample_weight_mode="temporal" in compile(). This argument is not
+ supported when `x` is a dataset.
+ reset_metrics: If `True`, the metrics returned will be only for this
+ batch. If `False`, the metrics will be statefully accumulated across
+ batches.
+
+ Returns:
+ Scalar test loss (if the model has a single output and no metrics)
+ or list of scalars (if the model has multiple outputs
+ and/or metrics). The attribute `model.metrics_names` will give you
+ the display labels for the scalar outputs.
+
+ Raises:
+ ValueError: In case of invalid user-provided arguments.
+ """
+ self._assert_compile_was_called()
+ self._check_call_args('test_on_batch')
+ if self._experimental_run_tf_function:
+ outputs = training_v2_utils.test_on_batch(
+ self, x, y=y, sample_weight=sample_weight,
+ reset_metrics=reset_metrics, standalone=True)
+ outputs = (outputs['total_loss'] + outputs['output_losses'] +
+ outputs['metrics'])
+ outputs = [
+ training_v2_utils._non_none_constant_value(v) for v in outputs] # pylint: disable=protected-access
+ if len(outputs) == 1:
+ outputs = outputs[0]
+ return outputs
+
+ if (self._distribution_strategy and
+ distribution_strategy_context.in_cross_replica_context()):
+ raise NotImplementedError('`test_on_batch` is not supported for models '
+ 'distributed with tf.distribute.Strategy.')
+ # Validate and standardize user data.
+ x, y, sample_weights = self._standardize_user_data(
+ x, y, sample_weight=sample_weight, extract_tensors_from_dataset=True)
+
+ # If `self._distribution_strategy` is True, then we are in a replica context
+ # at this point.
+ if self.run_eagerly or self._distribution_strategy:
+ output_dict = training_eager.test_on_batch(
+ self,
+ x,
+ y,
+ sample_weights=sample_weights,
+ output_loss_metrics=self._output_loss_metrics)
+ outputs = (output_dict['total_loss'] + output_dict['output_losses']
+ + output_dict['metrics'])
+ outputs = [
+ training_v2_utils._non_none_constant_value(v) for v in outputs] # pylint: disable=protected-access
+ else:
+ x = training_utils.ModelInputs(x).as_list()
+ inputs = x + list(y or []) + list(sample_weights or [])
+
+ self._update_sample_weight_modes(sample_weights=sample_weights)
+ self._make_test_function()
+ outputs = self.test_function(inputs) # pylint: disable=not-callable
+
+ if reset_metrics:
+ self.reset_metrics()
+
+ if len(outputs) == 1:
+ return outputs[0]
+ return outputs
+
+ def predict_on_batch(self, x):
+ """Returns predictions for a single batch of samples.
+
+ Arguments:
+ x: Input data. It could be:
+ - A Numpy array (or array-like), or a list of arrays
+ (in case the model has multiple inputs).
+ - A TensorFlow tensor, or a list of tensors
+ (in case the model has multiple inputs).
+ - A `tf.data` dataset.
+
+ Returns:
+ Numpy array(s) of predictions.
+
+ Raises:
+ ValueError: In case of mismatch between given number of inputs and
+ expectations of the model.
+ """
+ self._check_call_args('predict_on_batch')
+ if self._experimental_run_tf_function:
+ return training_v2_utils.predict_on_batch(self, x, standalone=True)
+
+ if (self._distribution_strategy and
+ distribution_strategy_context.in_cross_replica_context()):
+ raise NotImplementedError(
+ '`predict_on_batch` is not supported for models distributed with'
+ ' tf.distribute.Strategy.')
+ # Validate and standardize user data.
+ inputs, _, _ = self._standardize_user_data(
+ x, extract_tensors_from_dataset=True)
+ # If `self._distribution_strategy` is True, then we are in a replica context
+ # at this point.
+ if self.run_eagerly or self._distribution_strategy:
+ inputs = training_utils.cast_if_floating_dtype(inputs)
+ if isinstance(inputs, collections_abc.Sequence):
+ # Unwrap lists with only one input, as we do when training on batch
+ if len(inputs) == 1:
+ inputs = inputs[0]
+
+ return self(inputs) # pylint: disable=not-callable
+
+ self._make_predict_function()
+ outputs = self.predict_function(inputs)
+
+ if len(outputs) == 1:
+ return outputs[0]
+ return outputs
+
+ @deprecation.deprecated(
+ None, 'Please use Model.fit, which supports generators.')
+ def fit_generator(self,
+ generator,
+ steps_per_epoch=None,
+ epochs=1,
+ verbose=1,
+ callbacks=None,
+ validation_data=None,
+ validation_steps=None,
+ validation_freq=1,
+ class_weight=None,
+ max_queue_size=10,
+ workers=1,
+ use_multiprocessing=False,
+ shuffle=True,
+ initial_epoch=0):
+ """Fits the model on data yielded batch-by-batch by a Python generator.
+
+ DEPRECATED:
+ `Model.fit` now supports generators, so there is no longer any need to use
+ this endpoint.
+ """
+ return self.fit(
+ generator,
+ steps_per_epoch=steps_per_epoch,
+ epochs=epochs,
+ verbose=verbose,
+ callbacks=callbacks,
+ validation_data=validation_data,
+ validation_steps=validation_steps,
+ validation_freq=validation_freq,
+ class_weight=class_weight,
+ max_queue_size=max_queue_size,
+ workers=workers,
+ use_multiprocessing=use_multiprocessing,
+ shuffle=shuffle,
+ initial_epoch=initial_epoch)
+
+ @deprecation.deprecated(
+ None, 'Please use Model.evaluate, which supports generators.')
+ def evaluate_generator(self,
+ generator,
+ steps=None,
+ callbacks=None,
+ max_queue_size=10,
+ workers=1,
+ use_multiprocessing=False,
+ verbose=0):
+ """Evaluates the model on a data generator.
+
+ DEPRECATED:
+ `Model.evaluate` now supports generators, so there is no longer any need
+ to use this endpoint.
+ """
+ self._check_call_args('evaluate_generator')
+
+ return self.evaluate(
+ generator,
+ steps=steps,
+ max_queue_size=max_queue_size,
+ workers=workers,
+ use_multiprocessing=use_multiprocessing,
+ verbose=verbose,
+ callbacks=callbacks)
+
+ @deprecation.deprecated(
+ None, 'Please use Model.predict, which supports generators.')
+ def predict_generator(self,
+ generator,
+ steps=None,
+ callbacks=None,
+ max_queue_size=10,
+ workers=1,
+ use_multiprocessing=False,
+ verbose=0):
+ """Generates predictions for the input samples from a data generator.
+
+ DEPRECATED:
+ `Model.predict` now supports generators, so there is no longer any need
+ to use this endpoint.
+ """
+ return self.predict(
+ generator,
+ steps=steps,
+ max_queue_size=max_queue_size,
+ workers=workers,
+ use_multiprocessing=use_multiprocessing,
+ verbose=verbose,
+ callbacks=callbacks)
+
+ def _check_call_args(self, method_name):
+ """Check that `call` has only one positional arg."""
+ # Always allow first arg, regardless of arg name.
+ fullargspec = self._call_full_argspec
+ if fullargspec.defaults:
+ positional_args = fullargspec.args[:-len(fullargspec.defaults)]
+ else:
+ positional_args = fullargspec.args
+ if 'training' in positional_args:
+ positional_args.remove('training')
+
+ # self and first arg can be positional.
+ if len(positional_args) > 2:
+ extra_args = positional_args[2:]
+ raise ValueError(
+ 'Models passed to `' + method_name + '` can only have `training` '
+ 'and the first argument in `call` as positional arguments, '
+ 'found: ' + str(extra_args) + '.')
+
+ def _set_optimizer(self, optimizer):
+ """Sets self.optimizer.
+
+ Sets self.optimizer to `optimizer`, potentially wrapping it with a
+ LossScaleOptimizer.
+
+ Args:
+ optimizer: The optimizer(s) to assign to self.optimizer.
+ """
+ if isinstance(optimizer, (list, tuple)):
+ self.optimizer = [optimizers.get(opt) for opt in optimizer]
+ else:
+ self.optimizer = optimizers.get(optimizer)
+
+ if (self._dtype_policy.loss_scale is not None and
+ not isinstance(self.optimizer,
+ loss_scale_optimizer.LossScaleOptimizer)):
+ if isinstance(self.optimizer, list):
+ raise ValueError('When a dtype policy with a loss scale is used, you '
+ 'can only pass a single optimizer. Using policy %s '
+ 'and got optimizers: %s' %
+ self._dtype_policy, self.optimizer)
+ if not isinstance(self.optimizer, optimizer_v2.OptimizerV2):
+ raise ValueError('"optimizer" must be an instance of '
+ 'tf.keras.optimizers.Optimizer when a dype policy '
+ 'with a loss scale used, but got: %s. Using policy: '
+ '%s' %
+ (self.optimizer, self._dtype_policy))
+ self.optimizer = loss_scale_optimizer.LossScaleOptimizer(
+ self.optimizer, self._dtype_policy.loss_scale)
+ if (isinstance(self.optimizer, loss_scale_optimizer.LossScaleOptimizer) and
+ self._dtype_policy.loss_scale and
+ self.optimizer.loss_scale != self._dtype_policy.loss_scale):
+ logging.warning('LossScale of LossScaleOptimizer passed to compile (%s) '
+ 'is not the same as the dtype policy\'s loss scale (%s). '
+ 'Because the dtype policy has a loss scale, you should '
+ 'pass an optimizer that is not wrapped with a '
+ 'LossScaleOptimizer,'
+ % (self.optimizer.loss_scale,
+ self._dtype_policy.loss_scale))
+
+ def _prepare_validation_data(self, validation_data, batch_size,
+ validation_steps):
+ """Unpack and check the validation data."""
+ val_x, val_y, val_sample_weights = training_utils.unpack_validation_data(
+ validation_data)
+ return self._standardize_user_data(
+ val_x,
+ val_y,
+ sample_weight=val_sample_weights,
+ batch_size=batch_size,
+ steps=validation_steps,
+ steps_name='validation_steps')
+
+ def _validate_compile_param_for_distribution_strategy(
+ self, run_eagerly, sample_weight_mode, target_tensors, weighted_metrics):
+ # Validate that arguments passed by the user to `compile` are supported by
+ # tf.distribute.Strategy.
+ if self._distribution_strategy:
+ if sample_weight_mode:
+ raise NotImplementedError('sample_weight_mode is not supported with '
+ 'tf.distribute.Strategy.')
+ if weighted_metrics:
+ raise NotImplementedError('weighted_metrics is not supported with '
+ 'tf.distribute.Strategy.')
+ if target_tensors:
+ raise ValueError('target_tensors is not supported with '
+ 'tf.distribute.Strategy.')
+
+ if run_eagerly:
+ raise ValueError(
+ 'We currently do not support enabling `run_eagerly` with '
+ 'distribution strategy.')
+
+ if (distributed_training_utils.is_distributing_by_cloning(self) and
+ (not self.built or not self.inputs or not self.outputs)):
+ raise ValueError(
+ 'We currently do not support distribution strategy with a '
+ '`Sequential` model that is created without `input_shape`/'
+ '`input_dim` set in its first layer or a subclassed model.')
+
+ def _process_target_tensor_for_compile(self, target_tensors):
+ if self.run_eagerly:
+ # target tensor is not supported with run_eagerly. Create a list with None
+ # as placeholder for each output.
+ return [None for _ in self.output_names]
+
+ if target_tensors is not None and not (isinstance(target_tensors, list) and
+ target_tensors == []): # pylint: disable=g-explicit-bool-comparison
+ if isinstance(target_tensors, list):
+ if len(target_tensors) != len(self.outputs):
+ raise ValueError(
+ 'When passing a list as `target_tensors`, '
+ 'it should have one entry per model output. '
+ 'The model has %s outputs, but you passed target_tensors=%s' %
+ (len(self.outputs), target_tensors))
+ elif isinstance(target_tensors, dict):
+ unexpected_target_tensor_names = set(target_tensors.keys()).difference(
+ self.output_names)
+ if unexpected_target_tensor_names:
+ raise ValueError(
+ 'Unknown entry in `target_tensors` dictionary: "{name}". '
+ 'Only expected the following keys: {keys}'.format(
+ name=unexpected_target_tensor_names,
+ keys=str(self.output_names)))
+ tmp_target_tensors = []
+ for name in self.output_names:
+ tmp_target_tensors.append(target_tensors.get(name, None))
+ target_tensors = tmp_target_tensors
+ elif tensor_util.is_tensor(target_tensors):
+ target_tensors = [target_tensors]
+ else:
+ raise TypeError('Expected `target_tensors` to be a list or tuple or '
+ 'dict or a single tensor, but got:', target_tensors)
+ else:
+ # In case target tensor is empty or None, create a list with Nones
+ # that has same length as self.output_names. With that, the None check of
+ # target tensor can be skipped downstream.
+ target_tensors = [None for _ in self.output_names]
+ return target_tensors
+
+ def _compile_eagerly(self, metrics, weighted_metrics, sample_weight_mode):
+ # Prepare sample weight modes. List with the same length as model outputs.
+ training_utils.prepare_sample_weight_modes(
+ self._training_endpoints, sample_weight_mode)
+ # Prepare sample weights.
+ self._prepare_sample_weights()
+ # Save all metric attributes per output of the model.
+ self._cache_output_metric_attributes(metrics, weighted_metrics)
+ self.total_loss = None
+ # Set metric attributes on model.
+ self._set_metric_attributes()
+
+ self._collected_trainable_weights = self.trainable_weights
+
+ def _update_sample_weight_modes(self, sample_weights=None):
+ """Updates sample weight modes based on training/eval inputs.
+
+ Sample weight placeholders will be created for all or no outputs
+ based on whether sample_weight is provided for any output.
+
+ If model contains `_sample_weight_modes` we check if the input
+ `sample_weights` corresponds to the sample weight modes.
+ 1. Set sample weight mode to be 'temporal' for output i, if `compile`
+ sample_weight_mode was set to `temporal` and sample weight inputs
+ are given for one or more outputs.
+ 2. Set sample weight mode to be 'samplewise' for output i, if `compile`
+ sample_weight_mode was not set and sample weight inputs are given for
+ one or more outputs.
+ 3. Reset sample weight mode to None for output i if sample weight mode
+ was set but there is no sample weight input.
+
+ Args:
+ sample_weights: List of sample weights of the same length as model outputs
+ or None.
+ """
+ if not self._is_compiled:
+ return
+ if sample_weights and any([s is not None for s in sample_weights]):
+ for endpoint in self._training_endpoints:
+ endpoint.sample_weight_mode = (
+ endpoint.sample_weight_mode or 'samplewise')
+ else:
+ for endpoint in self._training_endpoints:
+ endpoint.sample_weight_mode = None
+
+ def _recompile_weights_loss_and_weighted_metrics(self):
+ if not self._is_compiled:
+ return False
+ recompile = any([e.sample_weights_mismatch()
+ for e in self._training_endpoints])
+
+ if recompile:
+ self._compile_weights_loss_and_weighted_metrics()
+ return recompile
+
+ @trackable.no_automatic_dependency_tracking
+ def _compile_weights_loss_and_weighted_metrics(self, sample_weights=None):
+ """Compiles the model loss and weighted metric sub-graphs.
+
+ This may be used to set graph tensors as sample weights (instead of creating
+ placeholders). This functionality is necessary for
+ `tf.keras.estimator.model_to_estimator`, which calls Keras models in a v1
+ graph, and creates iterator tensors for inputs, targets, and sample weights.
+
+ Args:
+ sample_weights: List of tensors to use as the sample weights. Must be the
+ same length as the number of outputs. If left as `None`, placeholders
+ are used instead.
+ """
+ with K.get_graph().as_default():
+ if sample_weights is not None:
+ self._update_sample_weight_modes(sample_weights)
+ self._prepare_sample_weights(sample_weights)
+
+ masks = self._prepare_output_masks()
+
+ # Compute weighted metrics.
+ self._handle_metrics(
+ self.outputs,
+ targets=self._targets,
+ skip_target_masks=self._prepare_skip_target_masks(),
+ sample_weights=self.sample_weights,
+ masks=masks,
+ return_weighted_metrics=True)
+
+ # Compute total loss.
+ # Used to keep track of the total loss value (stateless).
+ # eg., total_loss = loss_weight_1 * output_1_loss_fn(...) +
+ # loss_weight_2 * output_2_loss_fn(...) +
+ # layer losses.
+ self.total_loss = self._prepare_total_loss(masks)
+
+ def _prepare_skip_target_masks(self):
+ """Boolean mask for whether the target in the output list should be skipped.
+
+ If the loss function corresponding to a model output is None, then this
+ output will be skipped during total loss calculation and feed targets
+ preparation.
+
+ Returns:
+ A boolean list for whether the corresponding target in the output list
+ should be skipped during loss calculation.
+ """
+ return [l is None for l in self.loss_functions]
+
+ def _prepare_output_masks(self):
+ """Returns masks corresponding to model outputs."""
+ return [getattr(x, '_keras_mask', None) for x in self.outputs]
+
+ def _prepare_total_loss(self, masks):
+ """Computes total loss from loss functions.
+
+ Arguments:
+ masks: List of mask values corresponding to each model output.
+
+ Returns:
+ A list of loss weights of python floats.
+
+ Raises:
+ TypeError: If model run_eagerly is True.
+ """
+ if self.run_eagerly:
+ raise TypeError('total loss can not be computed when compiled with '
+ 'run_eagerly = True.')
+ total_loss = None
+ with K.name_scope('loss'):
+ for endpoint, mask in zip(self._training_endpoints, masks):
+ if endpoint.should_skip_target():
+ continue
+ y_true = endpoint.training_target.target
+ y_pred = endpoint.output
+ loss_fn = endpoint.loss_fn
+ loss_weight = endpoint.loss_weight
+ loss_name = endpoint.loss_name()
+ sample_weight = endpoint.sample_weight
+
+ with K.name_scope(loss_name):
+ if mask is not None:
+ mask = math_ops.cast(mask, y_pred.dtype)
+ # Update weights with mask.
+ if sample_weight is None:
+ sample_weight = mask
+ else:
+ # Update dimensions of weights to match with mask if possible.
+ mask, _, sample_weight = (
+ tf_losses_utils.squeeze_or_expand_dimensions(
+ mask, sample_weight=sample_weight))
+ sample_weight *= mask
+
+ if hasattr(loss_fn, 'reduction'):
+ per_sample_losses = loss_fn.call(y_true, y_pred)
+ weighted_losses = losses_utils.compute_weighted_loss(
+ per_sample_losses,
+ sample_weight=sample_weight,
+ reduction=losses_utils.ReductionV2.NONE)
+ loss_reduction = loss_fn.reduction
+
+ # `AUTO` loss reduction defaults to `SUM_OVER_BATCH_SIZE` for all
+ # compile use cases.
+ if loss_reduction == losses_utils.ReductionV2.AUTO:
+ loss_reduction = losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE
+
+ # Compute the stateless loss value.
+ output_loss = losses_utils.reduce_weighted_loss(
+ weighted_losses, reduction=loss_reduction)
+ else:
+ # Compute the stateless loss value for a custom loss class.
+ # Here we assume that the class takes care of loss reduction
+ # because if this class returns a vector value we cannot
+ # differentiate between use case where a custom optimizer
+ # expects a vector loss value vs unreduced per-sample loss value.
+ output_loss = loss_fn(y_true, y_pred, sample_weight=sample_weight)
+ loss_reduction = losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE
+
+ if len(self.outputs) > 1:
+ # Keep track of stateful result tensor for the loss.
+ endpoint.output_loss_metric(output_loss)
+
+ # Scale output loss for distribution. For custom losses we assume
+ # reduction was mean.
+ if loss_reduction == losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE:
+ output_loss = losses_utils.scale_loss_for_distribution(output_loss)
+
+ if total_loss is None:
+ total_loss = loss_weight * output_loss
+ else:
+ total_loss += loss_weight * output_loss
+ if total_loss is None:
+ if not self.losses:
+ raise ValueError('The model cannot be compiled '
+ 'because it has no loss to optimize.')
+ else:
+ total_loss = 0.
+
+ # Add regularization penalties and other layer-specific losses.
+ custom_losses = self.get_losses_for(None) + self.get_losses_for(
+ self.inputs)
+ if custom_losses:
+ total_loss += losses_utils.scale_loss_for_distribution(
+ math_ops.add_n(custom_losses))
+ return total_loss
+
+ def _get_callback_model(self):
+ """Returns the Callback Model for this Model."""
+
+ if hasattr(self, '_replicated_model') and self._replicated_model:
+ # When using training_distributed, we set the callback model
+ # to an instance of the `DistributedModel` that we create in
+ # the `compile` call. The `DistributedModel` is initialized
+ # with the first replicated model. We need to set the callback
+ # model to a DistributedModel to allow us to override saving
+ # and loading weights when we checkpoint the model during training.
+ return self._replicated_model
+ if hasattr(self, 'callback_model') and self.callback_model:
+ return self.callback_model
+ return self
+
+ @trackable.no_automatic_dependency_tracking
+ def _make_callback_model(self, grouped_model):
+ first_replicated_model = self._distribution_strategy.unwrap(
+ grouped_model)[0]
+ # We initialize the callback model with the first replicated model.
+ self._replicated_model = DistributedCallbackModel(first_replicated_model)
+ self._replicated_model.set_original_model(self)
+
+ def _validate_or_infer_batch_size(self, batch_size, steps, x):
+ """Validates that the `batch_size` provided is consistent with InputLayer.
+
+ It's possible that the user specified a static batch size in their
+ InputLayer. If so, this method checks the provided `batch_size` and `x`
+ arguments are consistent with this static batch size. Also, if
+ `batch_size` is `None`, this method will attempt to infer the batch size
+ from the static batch size of the InputLayer. Lastly, ValueError will be
+ raised if `x` is a tf.data.Dataset and `batch_size` is specified as we
+ expect users to provide batched datasets.
+
+ Arguments:
+ batch_size: The batch_size provided as an argument to
+ fit/evaluate/predict.
+ steps: The steps provided as an argument to fit/evaluate/predict.
+ x: The data passed as `x` to fit/evaluate/predict.
+
+ Returns:
+ The validated batch_size, auto-inferred from the first layer if not
+ provided.
+ """
+ if (isinstance(x, (dataset_ops.DatasetV1,
+ dataset_ops.DatasetV2,
+ data_utils.Sequence)) or
+ tf_inspect.isgenerator(x)):
+ if batch_size is not None:
+ raise ValueError(
+ 'The `batch_size` argument must not be specified for the given '
+ 'input type. Received input: {}, batch_size: {}'.format(
+ x, batch_size))
+ return
+
+ # Avoids the override in Sequential.layers which filters Input layers.
+ # (Which are often the very layers that we're after.)
+ layers = trackable_layer_utils.filter_empty_layer_containers(self._layers)
+ first_layer = next(layers, None)
+ if first_layer:
+ # The per-replica static batch size.
+ static_batch_size = training_utils.get_static_batch_size(first_layer)
+ if static_batch_size is not None:
+
+ # Determine number of times the user-supplied batch size will be split.
+ if (self._distribution_strategy and
+ distributed_training_utils.global_batch_size_supported(
+ self._distribution_strategy)):
+ num_splits_for_ds = self._distribution_strategy.num_replicas_in_sync
+ else:
+ num_splits_for_ds = 1
+
+ # Check `batch_size` argument is consistent with InputLayer.
+ if batch_size is not None:
+ if batch_size % num_splits_for_ds != 0:
+ raise ValueError('The `batch_size` argument ({}) must be divisible '
+ 'the by number of replicas ({})'.format(
+ batch_size, num_splits_for_ds))
+ per_replica_batch_size = batch_size // num_splits_for_ds
+
+ if per_replica_batch_size != static_batch_size:
+ raise ValueError('The `batch_size` argument value {} is '
+ 'incompatible with the specified batch size of '
+ 'your Input Layer: {}'.format(
+ per_replica_batch_size, static_batch_size))
+
+ # Check Dataset/Iterator batch size is consistent with InputLayer.
+ if isinstance(x, (dataset_ops.DatasetV2, iterator_ops.Iterator,
+ iterator_ops.OwnedIterator)):
+ ds_batch_size = tensor_shape.as_dimension(
+ nest.flatten(dataset_ops.get_legacy_output_shapes(x))[0][0]).value
+ if ds_batch_size is not None:
+ if ds_batch_size % num_splits_for_ds != 0:
+ raise ValueError(
+ 'The batch output shape of your `Dataset` {} '
+ 'cannot be divisible by number of replicas {}'.format(
+ ds_batch_size, num_splits_for_ds))
+
+ ds_per_replica_batch_size = ds_batch_size // num_splits_for_ds
+ if ds_per_replica_batch_size != static_batch_size:
+ raise ValueError('The batch output shape of your `Dataset` is '
+ '{}, which is incompatible with the specified '
+ 'batch size of your Input Layer: {}'.format(
+ ds_per_replica_batch_size,
+ static_batch_size))
+
+ # Set inferred batch size from the InputLayer.
+ if steps is None:
+ batch_size = static_batch_size * num_splits_for_ds
+
+ if batch_size is None and steps is None:
+ # Backwards compatibility
+ batch_size = 32
+ return batch_size
+
+ def _prepare_sample_weights(self, sample_weights=None):
+ """Sets sample weight attribute on the model."""
+ # List with the same length as model outputs.
+ if sample_weights is not None:
+ if len(sample_weights) != len(self._training_endpoints):
+ raise ValueError('Provided sample weights must have same length as the '
+ 'number of outputs. Expected: {}, got: {}.'.format(
+ len(self._training_endpoints),
+ len(sample_weights)))
+ else:
+ sample_weights = [None] * len(self._training_endpoints)
+ for endpoint, weight in zip(self._training_endpoints, sample_weights):
+ endpoint.populate_sample_weight(weight, endpoint.sample_weight_mode)
+
+ def _cache_output_metric_attributes(self, metrics, weighted_metrics):
+ """Caches metric name and function attributes for every model output."""
+ output_shapes = []
+ for output in self.outputs:
+ if output is None or output.shape.rank is None:
+ output_shapes.append(None)
+ else:
+ output_shapes.append(output.shape.as_list())
+ self._per_output_metrics = training_utils.collect_per_output_metric_info(
+ metrics, self.output_names, output_shapes, self.loss_functions)
+ self._per_output_weighted_metrics = (
+ training_utils.collect_per_output_metric_info(
+ weighted_metrics,
+ self.output_names,
+ output_shapes,
+ self.loss_functions,
+ is_weighted=True))
+
+ def _add_unique_metric_name(self, metric_name, output_index):
+ """Makes the metric name unique and adds it to the model's metric name list.
+
+ If there are multiple outputs for which the metrics are calculated, the
+ metric names have to be made unique by appending an integer.
+
+ Arguments:
+ metric_name: Metric name that corresponds to the metric specified by the
+ user. For example: 'acc'.
+ output_index: The index of the model output for which the metric name is
+ being added.
+
+ Returns:
+ string, name of the model's unique metric name
+ """
+ if len(self.output_names) > 1:
+ metric_name = '%s_%s' % (self.output_names[output_index], metric_name)
+ j = 1
+ base_metric_name = metric_name
+ while metric_name in self.metrics_names:
+ metric_name = '%s_%d' % (base_metric_name, j)
+ j += 1
+
+ return metric_name
+
+ def _init_metric_attributes(self):
+ """Initialized model metric attributes."""
+ # List of stateful metric functions. Used for resetting metric state during
+ # training/eval.
+ self._compile_metric_functions = []
+
+ def _set_per_output_metric_attributes(self, metrics_dict, output_index):
+ """Sets the metric attributes on the model for the given output.
+
+ Arguments:
+ metrics_dict: A dict with metric names as keys and metric fns as values.
+ output_index: The index of the model output for which the metric
+ attributes are added.
+
+ Returns:
+ Metrics dict updated with unique metric names as keys.
+ """
+ updated_metrics_dict = collections.OrderedDict()
+ for metric_name, metric_fn in metrics_dict.items():
+ metric_name = self._add_unique_metric_name(metric_name, output_index)
+
+ # Update the name on the metric class to be the unique generated name.
+ metric_fn._name = metric_name # pylint: disable=protected-access
+ updated_metrics_dict[metric_name] = metric_fn
+ # Keep track of metric name and function.
+ self._compile_metric_functions.append(metric_fn)
+ return updated_metrics_dict
+
+ def _set_metric_attributes(self):
+ """Sets the metric attributes on the model for all the model outputs."""
+ updated_per_output_metrics = []
+ updated_per_output_weighted_metrics = []
+ for i, endpoint in enumerate(self._training_endpoints):
+ if endpoint.should_skip_target():
+ updated_per_output_metrics.append(self._per_output_metrics[i])
+ updated_per_output_weighted_metrics.append(
+ self._per_output_weighted_metrics[i])
+ continue
+ updated_per_output_metrics.append(
+ self._set_per_output_metric_attributes(self._per_output_metrics[i],
+ i))
+ updated_per_output_weighted_metrics.append(
+ self._set_per_output_metric_attributes(
+ self._per_output_weighted_metrics[i], i))
+
+ # Create a metric wrapper for each output loss. This computes mean of an
+ # output loss across mini-batches (irrespective of how we reduce within a
+ # batch).
+ if len(self._training_endpoints) > 1:
+ for endpoint in self._training_endpoints:
+ if not endpoint.should_skip_target():
+ endpoint.output_loss_metric = metrics_module.Mean(
+ name=endpoint.loss_name())
+
+ self._per_output_metrics = updated_per_output_metrics
+ self._per_output_weighted_metrics = updated_per_output_weighted_metrics
+
+ def _handle_per_output_metrics(self,
+ metrics_dict,
+ y_true,
+ y_pred,
+ mask,
+ weights=None):
+ """Calls metric functions for a single output.
+
+ Arguments:
+ metrics_dict: A dict with metric names as keys and metric fns as values.
+ y_true: Target output.
+ y_pred: Predicted output.
+ mask: Computed mask value for the current output.
+ weights: Weights to be applied on the current output.
+
+ Returns:
+ A list of metric result tensors.
+ """
+ metric_results = []
+ for metric_name, metric_fn in metrics_dict.items():
+ with K.name_scope(metric_name):
+ metric_result = training_utils.call_metric_function(
+ metric_fn, y_true, y_pred, weights=weights, mask=mask)
+ metric_results.append(metric_result)
+ return metric_results
+
+ def _handle_metrics(self,
+ outputs,
+ targets=None,
+ skip_target_masks=None,
+ sample_weights=None,
+ masks=None,
+ return_weighted_metrics=False,
+ return_weighted_and_unweighted_metrics=False):
+ """Handles calling metric functions.
+
+ Arguments:
+ outputs: List of outputs (predictions).
+ targets: List of targets.
+ skip_target_masks: Optional. List of boolean for whether the corresponding
+ target should be ignored or not.
+ sample_weights: Optional list of sample weight arrays.
+ masks: List of computed output mask values.
+ return_weighted_metrics: Flag that indicates whether weighted metrics
+ should be computed instead of unweighted metrics. This flag is ignored
+ when `return_weighted_and_unweighted_metrics` is enabled.
+ return_weighted_and_unweighted_metrics: Flag that is used to indicate
+ whether both weighted and unweighted metrics should be computed. When
+ this is not enabled, we use `return_weighted_metrics` param to indicate
+ whether weighted or unweighted metrics should be returned.
+
+ Returns:
+ A list of metric result tensors.
+ """
+ # TODO(scottzhu): Update this to use the new training_endpoints. Currently
+ # the eager and graph logic is bit different.
+ skip_target_masks = skip_target_masks or [False] * len(outputs)
+ metric_results = []
+ with K.name_scope('metrics'):
+ # Invoke all metrics added using `compile`.
+ for i in range(len(outputs)):
+ if skip_target_masks[i]:
+ continue
+ output = outputs[i] if outputs else None
+ target = targets[i] if targets else None
+ output_mask = masks[i] if masks else None
+
+ if (return_weighted_and_unweighted_metrics or
+ not return_weighted_metrics):
+ metric_results.extend(
+ self._handle_per_output_metrics(self._per_output_metrics[i],
+ target, output, output_mask))
+ if return_weighted_and_unweighted_metrics or return_weighted_metrics:
+ metric_results.extend(
+ self._handle_per_output_metrics(
+ self._per_output_weighted_metrics[i],
+ target,
+ output,
+ output_mask,
+ weights=sample_weights[i] if sample_weights else None))
+ return metric_results
+
+ def _check_trainable_weights_consistency(self):
+ """Check trainable weights count consistency.
+
+ This will raise a warning if `trainable_weights` and
+ `_collected_trainable_weights` are inconsistent (i.e. have different
+ number of parameters).
+ Inconsistency will typically arise when one modifies `model.trainable`
+ without calling `model.compile` again.
+ """
+ if not hasattr(self, '_collected_trainable_weights'):
+ return
+
+ if len(self.trainable_weights) != len(self._collected_trainable_weights):
+ logging.log_first_n(
+ logging.WARN, 'Discrepancy between trainable weights and collected'
+ ' trainable weights, did you set `model.trainable`'
+ ' without calling `model.compile` after ?', 1)
+
+ def _make_train_function(self):
+ has_recompiled = self._recompile_weights_loss_and_weighted_metrics()
+ self._check_trainable_weights_consistency()
+ if isinstance(self.optimizer, list):
+ raise ValueError('The `optimizer` in `compile` should be a single '
+ 'optimizer.')
+ # If we have re-compiled the loss/weighted metric sub-graphs then create
+ # train function even if one exists already. This is because
+ # `_feed_sample_weights` list has been updated on re-copmpile.
+ if getattr(self, 'train_function', None) is None or has_recompiled:
+ # Restore the compiled trainable state.
+ current_trainable_state = self._get_trainable_state()
+ self._set_trainable_state(self._compiled_trainable_state)
+
+ inputs = (self._feed_inputs +
+ self._feed_targets +
+ self._feed_sample_weights)
+ if not isinstance(K.symbolic_learning_phase(), int):
+ inputs += [K.symbolic_learning_phase()]
+
+ with K.get_graph().as_default():
+ with K.name_scope('training'):
+ # Training updates
+ updates = self.optimizer.get_updates(
+ params=self._collected_trainable_weights, loss=self.total_loss)
+ # Unconditional updates
+ updates += self.get_updates_for(None)
+ # Conditional updates relevant to this model
+ updates += self.get_updates_for(self.inputs)
+
+ metrics = self._get_training_eval_metrics()
+ metrics_tensors = [
+ m._call_result for m in metrics if hasattr(m, '_call_result') # pylint: disable=protected-access
+ ]
+
+ with K.name_scope('training'):
+ # Gets loss and metrics. Updates weights at each call.
+ fn = K.function(
+ inputs, [self.total_loss] + metrics_tensors,
+ updates=updates,
+ name='train_function',
+ **self._function_kwargs)
+ setattr(self, 'train_function', fn)
+
+ # Restore the current trainable state
+ self._set_trainable_state(current_trainable_state)
+
+ def _make_test_function(self):
+ has_recompiled = self._recompile_weights_loss_and_weighted_metrics()
+ # If we have re-compiled the loss/weighted metric sub-graphs then create
+ # test function even if one exists already. This is because
+ # `_feed_sample_weights` list has been updated on re-copmpile.
+ if getattr(self, 'test_function', None) is None or has_recompiled:
+ inputs = (self._feed_inputs +
+ self._feed_targets +
+ self._feed_sample_weights)
+
+ with K.get_graph().as_default():
+ metrics = self._get_training_eval_metrics()
+ metrics_tensors = [
+ m._call_result for m in metrics if hasattr(m, '_call_result') # pylint: disable=protected-access
+ ]
+
+ with K.name_scope('evaluation'):
+ updates = self.state_updates
+ # Return loss and metrics, no gradient updates.
+ # Does update the network states.
+ fn = K.function(
+ inputs, [self.total_loss] + metrics_tensors,
+ updates=updates,
+ name='test_function',
+ **self._function_kwargs)
+ setattr(self, 'test_function', fn)
+
+ def _make_predict_function(self):
+ if not hasattr(self, 'predict_function'):
+ self.predict_function = None
+ if self.predict_function is None:
+ inputs = self._feed_inputs
+ # Gets network outputs. Does not update weights.
+ # Does update the network states.
+ kwargs = getattr(self, '_function_kwargs', {})
+ with K.name_scope(ModeKeys.PREDICT):
+ self.predict_function = K.function(
+ inputs,
+ self.outputs,
+ updates=self.state_updates,
+ name='predict_function',
+ **kwargs)
+
+ def _make_execution_function(self, mode):
+ if mode == ModeKeys.TRAIN:
+ self._make_train_function()
+ return self.train_function
+ if mode == ModeKeys.TEST:
+ self._make_test_function()
+ return self.test_function
+ if mode == ModeKeys.PREDICT:
+ self._make_predict_function()
+ return self.predict_function
+
+ def _distribution_standardize_user_data(self,
+ x,
+ y=None,
+ sample_weight=None,
+ class_weight=None,
+ batch_size=None,
+ validation_split=0,
+ shuffle=False,
+ epochs=1,
+ allow_partial_batch=False):
+ """Runs validation checks on input and target data passed by the user.
+
+ This is called when using tf.distribute.Strategy to train, evaluate or serve
+ the model.
+
+ Args:
+ x: Input data. A numpy array or `tf.data` dataset.
+ y: Target data. A numpy array or None if x is a `tf.data` dataset.
+ sample_weight: An optional sample-weight array passed by the user to
+ weight the importance of each sample in `x`.
+ class_weight: An optional class-weight array by the user to
+ weight the importance of samples in `x` based on the class they belong
+ to, as conveyed by `y`.
+ batch_size: Integer batch size. If provided, it is used to run additional
+ validation checks on stateful models.
+ validation_split: Float between 0 and 1.
+ Fraction of the training data to be used as validation data.
+ shuffle: Boolean whether to shuffle the training data before each epoch.
+ epochs: Integer epochs. If > 1, repeat the numpy training data epochs
+ times when converting to training dataset.
+ allow_partial_batch: Boolean whether to enforce that all batches have the
+ same size.
+
+ Returns:
+ Dataset instance.
+
+ Raises:
+ ValueError: In case of invalid user-provided data.
+ RuntimeError: If the model was never compiled.
+ """
+ if class_weight:
+ raise NotImplementedError('`class_weight` is currently not supported '
+ 'when using tf.distribute.Strategy.')
+
+ if (sample_weight is not None and sample_weight.all() and
+ distributed_training_utils.is_tpu_strategy(
+ self._distribution_strategy)):
+ raise NotImplementedError('`sample_weight` is currently not supported '
+ 'when using TPUStrategy.')
+
+ # Validates `steps` and `shuffle` arguments right at the beginning
+ # since we use it to construct the dataset object.
+ # TODO(anjalisridhar): Remove this check once we refactor the
+ # _standardize_user_data code path. This check is already present elsewhere
+ # in the codebase.
+ if isinstance(x, dataset_ops.DatasetV2):
+ if shuffle:
+ training_utils.verify_dataset_shuffled(x)
+
+ strategy = self._distribution_strategy
+ with strategy.scope():
+ # We should be sure to call get_session() inside the strategy.scope()
+ # so the strategy can affect the session options.
+ if ops.executing_eagerly_outside_functions():
+ session = None
+ else:
+ session = K.get_session()
+
+ first_x_value = nest.flatten(x)[0]
+ if isinstance(first_x_value, np.ndarray):
+ x = training_utils.list_to_tuple(x)
+ if y is not None:
+ y = training_utils.list_to_tuple(y)
+ if sample_weight is not None:
+ sample_weight = training_utils.list_to_tuple(sample_weight)
+ in_tuple = (x, y, sample_weight)
+ else:
+ in_tuple = (x, y)
+ else:
+ in_tuple = x
+
+ ds = strategy.extended.experimental_make_numpy_dataset(in_tuple,
+ session=session)
+ if shuffle:
+ # We want a buffer size that is larger than the batch size provided by
+ # the user and provides sufficient randomness. Note that larger
+ # numbers introduce more memory usage based on the size of each
+ # sample.
+ ds = ds.shuffle(max(1024, batch_size * 8))
+ if epochs > 1:
+ ds = ds.repeat(epochs)
+
+ # We need to use the drop_remainder argument to get a known static
+ # input shape which is required for TPUs.
+ drop_remainder = (not allow_partial_batch and
+ strategy.extended.experimental_require_static_shapes)
+
+ # TODO(b/131720208): We still drop remainder here if number of examples
+ # is divisible by batch size, as sometimes dynamic padder will time out
+ # with keras.metrics.CategoricalAccuracy() metric.
+ if distributed_training_utils.is_tpu_strategy(
+ strategy) and not drop_remainder:
+ dataset_size = first_x_value.shape[0]
+ if dataset_size % batch_size == 0:
+ drop_remainder = True
+
+ x = ds.batch(batch_size, drop_remainder=drop_remainder)
+ else:
+ assert isinstance(x, dataset_ops.DatasetV2)
+ training_utils.validate_dataset_input(x, y, sample_weight,
+ validation_split)
+ return x
+
+ def _standardize_user_data(self,
+ x,
+ y=None,
+ sample_weight=None,
+ class_weight=None,
+ batch_size=None,
+ check_steps=False,
+ steps_name='steps',
+ steps=None,
+ validation_split=0,
+ shuffle=False,
+ extract_tensors_from_dataset=False):
+ """Runs validation checks on input and target data passed by the user.
+
+ Also standardizes the data to lists of arrays, in order.
+
+ Also builds and compiles the model on the fly if it is a subclassed model
+ that has never been called before (and thus has no inputs/outputs).
+
+ This is a purely internal method, subject to refactoring at any time.
+
+ Args:
+ x: Input data. It could be:
+ - A Numpy array (or array-like), or a list of arrays
+ (in case the model has multiple inputs).
+ - A TensorFlow tensor, or a list of tensors
+ (in case the model has multiple inputs).
+ - A dict mapping input names to the corresponding array/tensors,
+ if the model has named inputs.
+ - A `tf.data` dataset.
+ y: Target data. Like the input data `x`,
+ it could be either Numpy array(s) or TensorFlow tensor(s).
+ It should be consistent with `x` (you cannot have Numpy inputs and
+ tensor targets, or inversely). If `x` is a dataset, `y` should not be
+ specified (since targets will be obtained from the iterator).
+ sample_weight: An optional sample-weight array passed by the user to
+ weight the importance of each sample in `x`.
+ class_weight: An optional class-weight array by the user to
+ weight the importance of samples in `x` based on the class they belong
+ to, as conveyed by `y`. If both `sample_weight` and `class_weight` are
+ provided, the weights are multiplied.
+ batch_size: Integer batch size. If provided, it is used to run additional
+ validation checks on stateful models.
+ check_steps: boolean, True if we want to check for validity of `steps` and
+ False, otherwise. For example, when we are standardizing one batch of
+ data for train_on_batch/predict_on_batch/test_on_batch APIs, `steps`
+ value is not required and we should not check for its validity in these
+ cases.
+ steps_name: The public API's parameter name for `steps`.
+ steps: Integer or `None`. Total number of steps (batches of samples) to
+ execute.
+ validation_split: Float between 0 and 1.
+ Fraction of the training data to be used as validation data.
+ shuffle: Boolean whether to shuffle the training data before each epoch.
+ extract_tensors_from_dataset: Boolean. When `x` is a dataset instance,
+ this indicates whether to extract actual tensors from the dataset or
+ instead output the dataset instance itself.
+ Set to True when calling from `train_on_batch`/etc.
+
+ Returns:
+ A tuple of 3: inputs (arrays or dicts, depending on whether `x` was a dict
+ or not), target arrays, sample-weight arrays.
+ If the model's input and targets are symbolic, these lists are empty
+ (since the model takes no user-provided data, instead the data comes
+ from the symbolic inputs/targets).
+
+ Raises:
+ ValueError: In case of invalid user-provided data.
+ RuntimeError: If the model was never compiled.
+ """
+ if isinstance(x, (dataset_ops.DatasetV1, dataset_ops.DatasetV2)):
+ # Graph mode dataset. We'll pass the dataset as-is (unless
+ # `extract_tensors_from_dataset` is True, in which case we extract
+ # the tensors from the dataset and we output them.
+ training_utils.validate_dataset_input(x, y, sample_weight,
+ validation_split)
+ if shuffle:
+ training_utils.verify_dataset_shuffled(x)
+
+ is_dataset = True
+ if extract_tensors_from_dataset:
+ # We do this for `train_on_batch`/etc.
+ x, y, sample_weight = training_utils.extract_tensors_from_dataset(x)
+ elif isinstance(x, iterator_ops.Iterator):
+ # Graph mode iterator. We extract the symbolic tensors.
+ training_utils.validate_dataset_input(x, y, sample_weight,
+ validation_split)
+ iterator = x
+ x, y, sample_weight = training_utils.unpack_iterator_input(iterator)
+ is_dataset = True
+ else:
+ is_dataset = False
+
+ # Validates `steps` argument based on x's type.
+ if check_steps:
+ training_utils.check_steps_argument(x, steps, steps_name)
+
+ # First, we build the model on the fly if necessary.
+ if not self.inputs:
+ all_inputs, y_input, dict_inputs = self._build_model_with_inputs(x, y)
+ is_build_called = True
+ else:
+ all_inputs = []
+ # Whether this is a subclassed model that expects dictionary inputs
+ # rather than list inputs (e.g. FeatureColumn-based models).
+ dict_inputs = isinstance(self.inputs, dict)
+ is_build_called = False
+ y_input = y
+
+ # Second, we compile the model on the fly if necessary, mostly for subclass
+ # models.
+ is_compile_called = False
+ if not self._is_compiled and self.optimizer:
+ self._compile_from_inputs(all_inputs, y_input, x, y)
+ is_compile_called = True
+
+ # In graph mode, if we had just set inputs and targets as symbolic tensors
+ # by invoking build and compile on the model respectively, we do not have to
+ # feed anything to the model. Model already has input and target data as
+ # part of the graph.
+ # Note: in this case, `any` and `all` are equivalent since we disallow
+ # mixed symbolic/value inputs.
+
+ # self.run_eagerly is not free to compute, so we want to reuse the value.
+ run_eagerly = self.run_eagerly
+
+ if (not run_eagerly and is_build_called and is_compile_called and
+ not is_dataset and any(_is_symbolic_tensor(v) for v in all_inputs)):
+ return [], [], None
+
+ return self._standardize_tensors(
+ x, y, sample_weight,
+ run_eagerly=run_eagerly,
+ dict_inputs=dict_inputs,
+ is_dataset=is_dataset,
+ class_weight=class_weight,
+ batch_size=batch_size)
+
+ def _standardize_tensors(self, x, y, sample_weight, run_eagerly, dict_inputs,
+ is_dataset, class_weight=None, batch_size=None):
+ if run_eagerly:
+ # In eager mode, do not do shape validation
+ # since the network has no input nodes (placeholders) to be fed.
+ feed_input_names = self.input_names
+ feed_input_shapes = None
+ elif not self._is_graph_network:
+ # Case: symbolic-mode subclassed network. Do not do shape validation.
+ feed_input_names = self._feed_input_names
+ feed_input_shapes = None
+ else:
+ # Case: symbolic-mode graph network.
+ # In this case, we run extensive shape validation checks.
+ feed_input_names = self._feed_input_names
+ feed_input_shapes = self._feed_input_shapes
+
+ # Standardize the inputs.
+ if not isinstance(x, (dataset_ops.DatasetV1, dataset_ops.DatasetV2)):
+ # TODO(fchollet): run static checks with dataset output shape(s).
+ x = training_utils.standardize_input_data(
+ x,
+ feed_input_names,
+ feed_input_shapes,
+ check_batch_axis=False, # Don't enforce the batch size.
+ exception_prefix='input')
+
+ # Get typespecs for the input data and sanitize it if necessary.
+ # TODO(momernick): This should be capable of doing full input validation
+ # at all times - validate that this is so and refactor the standardization
+ # code.
+ if isinstance(x, dataset_ops.DatasetV2):
+ x_shapes = dataset_ops.get_structure(x)
+ if isinstance(x_shapes, tuple):
+ # If the output of a Dataset is a tuple, we assume it's either of the
+ # form (x_data, y_data) or (x_data, y_data, sample_weights). In either
+ # case, we only care about x_data here.
+ x_shapes = x_shapes[0]
+ else:
+ flat_inputs = nest.flatten(x, expand_composites=False)
+ flat_expected_inputs = nest.flatten(self.inputs, expand_composites=False)
+ converted_x = []
+ for (a, b) in zip(flat_inputs, flat_expected_inputs):
+ converted_x.append(_convert_scipy_sparse_tensor(a, b))
+ x = nest.pack_sequence_as(x, converted_x, expand_composites=False)
+
+ def _type_spec_from_value(value):
+ """Grab type_spec without converting array-likes to tensors."""
+ if isinstance(value, composite_tensor.CompositeTensor):
+ return value._type_spec # pylint: disable=protected-access
+ # Get a TensorSpec for array-like data without
+ # converting the data to a Tensor
+ if hasattr(value, 'shape') and hasattr(value, 'dtype'):
+ return tensor_spec.TensorSpec(value.shape, value.dtype)
+ else:
+ return type_spec.type_spec_from_value(value)
+
+ x_shapes = nest.map_structure(_type_spec_from_value, x)
+
+ flat_inputs = nest.flatten(x_shapes, expand_composites=False)
+ flat_expected_inputs = nest.flatten(self.inputs, expand_composites=False)
+ for (a, b) in zip(flat_inputs, flat_expected_inputs):
+ nest.assert_same_structure(a, b, expand_composites=True)
+
+ if y is not None:
+ # Prepare self._sample_weight_modes. List with the same length as
+ # model outputs.
+ training_utils.prepare_sample_weight_modes(self._training_endpoints,
+ self.sample_weight_mode)
+ feed_output_names = self._feed_output_names
+ feed_sample_weight_modes = self._sample_weight_modes
+ if not self._is_graph_network:
+ feed_output_shapes = None
+ else:
+ feed_output_shapes = self._feed_output_shapes
+
+ # Standardize the outputs.
+ y = training_utils.standardize_input_data(
+ y,
+ feed_output_names,
+ # Don't enforce target shapes to match output shapes.
+ # Precise checks will be run in `check_loss_and_target_compatibility`.
+ shapes=None,
+ check_batch_axis=False, # Don't enforce the batch size.
+ exception_prefix='target')
+
+ # Generate sample-wise weight values given the `sample_weight` and
+ # `class_weight` arguments.
+ sample_weights = training_utils.standardize_sample_weights(
+ sample_weight, feed_output_names)
+ class_weights = training_utils.standardize_class_weights(
+ class_weight, feed_output_names)
+
+ sample_weights = [
+ training_utils.standardize_weights(ref, sw, cw, mode)
+ for (ref, sw, cw, mode) in zip(y, sample_weights, class_weights,
+ feed_sample_weight_modes)
+ ]
+ # Check that all arrays have the same length.
+ if not self._distribution_strategy:
+ training_utils.check_array_lengths(x, y, sample_weights)
+ if self._is_graph_network and not run_eagerly:
+ # Additional checks to avoid users mistakenly using improper loss fns.
+ training_utils.check_loss_and_target_compatibility(
+ y, self._feed_loss_fns, feed_output_shapes)
+
+ sample_weights, _, _ = training_utils.handle_partial_sample_weights(
+ y, sample_weights, feed_sample_weight_modes, check_all_flat=True)
+ else:
+ y = []
+ sample_weights = None
+
+ if self.stateful and batch_size and not is_dataset:
+ # Check that for stateful networks, number of samples is a multiple
+ # of the static batch size.
+ if x[0].shape[0] % batch_size != 0:
+ raise ValueError('In a stateful network, '
+ 'you should only pass inputs with '
+ 'a number of samples that can be '
+ 'divided by the batch size. Found: ' +
+ str(x[0].shape[0]) + ' samples')
+
+ # If dictionary inputs were provided, we return a dictionary as well.
+ if dict_inputs and not isinstance(x, (dataset_ops.DatasetV1,
+ dataset_ops.DatasetV2)):
+ x = dict(zip(feed_input_names, x))
+ return x, y, sample_weights
+
+ def _build_model_with_inputs(self, inputs, targets):
+ """Build the model (set model inputs/outputs), mainly for subclass model."""
+ processed_inputs = []
+ is_dict_inputs = False
+ orig_inputs = inputs
+ # We need to use `inputs` to set the model inputs.
+ # If input data is a dataset iterator in graph mode or if it is an eager
+ # iterator and only one batch of samples is required, we fetch the data
+ # tensors from the iterator and then standardize them.
+ if isinstance(inputs, (dataset_ops.DatasetV1, dataset_ops.DatasetV2)):
+ inputs, targets, _ = training_utils.extract_tensors_from_dataset(inputs)
+ # We type-check that `inputs` and `targets` are either single arrays
+ # or lists of arrays, and extract a flat list of inputs from the passed
+ # structure.
+ training_utils.validate_input_types(inputs, orig_inputs)
+
+ if isinstance(inputs, (list, tuple)):
+ processed_inputs += list(inputs)
+ elif isinstance(inputs, dict):
+ is_dict_inputs = True
+ keys = sorted(inputs.keys())
+ processed_inputs = [inputs[k] for k in keys]
+ else:
+ processed_inputs.append(inputs)
+ # Now that we have a flat set of inputs, we make sure that none of them
+ # are CompositeTensors or CompositeTensorValues of any type (or scipy
+ # sparse arrays, which we treat as SparseTensor values). We cannot safely
+ # infer input data from an arbitrary composite tensor, so we don't try -
+ # users should explicitly add composite tensor inputs to their subclassed
+ # models.
+ for input_tensor in processed_inputs:
+ if composite_tensor_utils.is_composite_or_composite_value(input_tensor):
+ # TODO(b/132691975): Document subclass-model CT input handling.
+ raise ValueError(
+ 'All SparseTensor and RaggedTensor inputs must be explicitly '
+ 'declared using a keras.Input() with sparse=True or ragged=True. '
+ 'We found an undeclared input %s. For Sequential models, please '
+ 'add a keras.Input() as your first Layer. For subclassed models, '
+ 'please call self._set_inputs() on your input set, which you can '
+ 'create using keras.Input() for each input to your model.' %
+ (input_tensor,))
+ # Build the model using the retrieved inputs (value or symbolic).
+ # If values are generated from a dataset, then in symbolic-mode
+ # placeholders will be created to match the value shapes.
+ if isinstance(orig_inputs, (dataset_ops.DatasetV1, dataset_ops.DatasetV2,
+ iterator_ops.Iterator)):
+ if not self.inputs:
+ # For subclassed models, a robust input spec is not available so we
+ # must cast to the model dtype.
+ inputs = training_utils.cast_if_floating_dtype(inputs, self.dtype)
+
+ def create_tensor_spec(t):
+ return tensor_spec.TensorSpec(t.shape, t.dtype)
+
+ cast_inputs = nest.map_structure(create_tensor_spec, inputs)
+ elif training_utils.has_tensors(inputs):
+ cast_inputs = training_utils.cast_if_floating_dtype(inputs)
+ else:
+ cast_inputs = inputs
+ self._set_inputs(cast_inputs)
+ return processed_inputs, targets, is_dict_inputs
+
+ def _compile_from_inputs(self, all_inputs, target, orig_inputs, orig_target):
+ if target is not None:
+ # We need to use `y` to set the model targets.
+ if training_utils.has_tensors(target):
+ target = training_utils.cast_if_floating_dtype_and_mismatch(
+ target, self.outputs)
+ training_utils.validate_input_types(target, orig_target,
+ allow_dict=False, field_name='target')
+ if isinstance(target, (list, tuple)):
+ all_inputs += list(target)
+ else:
+ all_inputs.append(target)
+ # Type check that all inputs are *either* value *or* symbolic.
+ # TODO(fchollet): this check could be removed in Eager mode?
+ if any(tensor_util.is_tensor(v) for v in all_inputs):
+ if not all(tensor_util.is_tensor(v) for v in all_inputs):
+ raise ValueError('Do not pass inputs that mix Numpy arrays and '
+ 'TensorFlow tensors. '
+ 'You passed: x=' + str(orig_inputs) +
+ '; y=' + str(orig_target))
+ is_dataset = isinstance(orig_inputs, (dataset_ops.DatasetV1,
+ dataset_ops.DatasetV2,
+ iterator_ops.Iterator))
+ if is_dataset or context.executing_eagerly():
+ target_tensors = None
+ else:
+ # Handle target tensors if any passed.
+ if target is not None:
+ if not isinstance(target, (list, tuple)):
+ target = [target]
+ target_tensors = [v for v in target if _is_symbolic_tensor(v)]
+ else:
+ target_tensors = None
+
+ self.compile(
+ optimizer=self.optimizer,
+ loss=self.loss,
+ metrics=self._compile_metrics,
+ weighted_metrics=self._compile_weighted_metrics,
+ loss_weights=self.loss_weights,
+ target_tensors=target_tensors,
+ sample_weight_mode=self.sample_weight_mode,
+ run_eagerly=self.run_eagerly,
+ experimental_run_tf_function=self._experimental_run_tf_function)
+
+ # TODO(omalleyt): Consider changing to a more descriptive function name.
+ def _set_inputs(self, inputs, outputs=None, training=None):
+ """Set model's input and output specs based on the input data received.
+
+ This is to be used for Model subclasses, which do not know at instantiation
+ time what their inputs look like.
+
+ Args:
+ inputs: Single array, or list of arrays. The arrays could be placeholders,
+ Numpy arrays, data tensors, or TensorSpecs.
+ - if placeholders: the model is built on top of these placeholders,
+ and we expect Numpy data to be fed for them when calling `fit`/etc.
+ - if Numpy data or TensorShapes: we create placeholders matching the
+ TensorShapes or shapes of the Numpy arrays. We expect Numpy data to be
+ fed for these placeholders when calling `fit`/etc.
+ - if data tensors: the model is built on top of these tensors.
+ We do not expect any Numpy data to be provided when calling `fit`/etc.
+ outputs: None, a data tensor, or a list of tensors. If None, the
+ outputs will be determined by invoking `self.call()`, otherwise the
+ provided value will be used.
+ training: Boolean or None. Only relevant in symbolic mode. Specifies
+ whether to build the model's graph in inference mode (False), training
+ mode (True), or using the Keras learning phase (None).
+ Raises:
+ ValueError: If dict inputs are passed to a Sequential Model where the
+ first layer isn't FeatureLayer.
+ """
+ inputs = self._set_input_attrs(inputs)
+
+ if outputs is None:
+ kwargs = {}
+ if self._expects_training_arg:
+ # In V2 mode, feeding `training=None` is not allowed because any value
+ # explicitly passed by the user is respected, even `None`.`
+ if training is None and not ops.executing_eagerly_outside_functions():
+ training = K.learning_phase()
+ if training is not None:
+ kwargs['training'] = training
+ try:
+ outputs = self(inputs, **kwargs)
+ except NotImplementedError:
+ # This Model or a submodel is dynamic and hasn't overridden
+ # `compute_output_shape`.
+ outputs = None
+
+ self._set_output_attrs(outputs)
+
+ @trackable.no_automatic_dependency_tracking
+ def _set_input_attrs(self, inputs):
+ """Sets attributes related to the inputs of the Model."""
+ if self.inputs:
+ raise ValueError('Model inputs are already set.')
+
+ if self.__class__.__name__ == 'Sequential' and not self.built:
+ if tensor_util.is_tensor(inputs):
+ input_shape = (None,) + tuple(inputs.shape.as_list()[1:])
+ elif isinstance(inputs, tensor_shape.TensorShape):
+ input_shape = (None,) + tuple(inputs.as_list()[1:])
+ elif isinstance(inputs, dict):
+ # We assert that the first layer is a FeatureLayer.
+ if not training_utils.is_feature_layer(self.layers[0]):
+ raise ValueError('Passing a dictionary input to a Sequential Model '
+ 'which doesn\'t have FeatureLayer as the first layer'
+ ' is an error.')
+ input_shape = (None,)
+ else:
+ input_shape = (None,) + tuple(inputs.shape[1:])
+ self._build_input_shape = input_shape
+
+ # Cast inputs to the compute dtype. This is primarily used
+ # when saving to determine the correct dtype in the input signature.
+ inputs = self._maybe_cast_inputs(inputs)
+
+ # On-the-fly setting of symbolic model inputs (either by using the tensor
+ # provided, or by creating a placeholder if Numpy data was provided).
+ model_inputs = training_utils.ModelInputs(inputs)
+ inputs = model_inputs.get_symbolic_inputs()
+ self.inputs = model_inputs.get_symbolic_inputs(return_single_as_list=True)
+ self.input_names = model_inputs.get_input_names()
+
+ self._feed_inputs = []
+ self._feed_input_names = []
+ self._feed_input_shapes = []
+
+ for k, v in model_inputs.as_dict():
+ if K.is_placeholder(v):
+ self._feed_input_names.append(k)
+ self._feed_inputs.append(v)
+ self._feed_input_shapes.append(K.int_shape(v))
+
+ return inputs
+
+ @trackable.no_automatic_dependency_tracking
+ def _set_output_attrs(self, outputs):
+ """Sets attributes related to the outputs of the Model."""
+ # NOTE(taylorrobie): This convention cannot be changed without updating the
+ # data adapter since it assumes nest.flatten ordering.
+ outputs = nest.flatten(outputs)
+ self.outputs = outputs
+ self.output_names = training_utils.generic_output_names(outputs)
+ # TODO(scottzhu): Should we cleanup the self._training_endpoints here?
+ self.built = True
+
+ @property
+ def _targets(self):
+ """The output target tensors for the model."""
+ return [
+ e.training_target.target
+ for e in self._training_endpoints
+ if e.has_training_target()
+ ]
+
+ @property
+ def _feed_targets(self):
+ return [
+ e.training_target.target
+ for e in self._training_endpoints
+ if e.has_feedable_training_target()
+ ]
+
+ @property
+ def _feed_output_names(self):
+ return [
+ e.output_name
+ for e in self._training_endpoints
+ if e.has_feedable_training_target()
+ ]
+
+ @property
+ def _feed_output_shapes(self):
+ return [
+ e.feed_output_shape
+ for e in self._training_endpoints
+ if e.has_feedable_training_target()
+ ]
+
+ @property
+ def _feed_loss_fns(self):
+ return [
+ e.loss_fn
+ for e in self._training_endpoints
+ if e.has_feedable_training_target()
+ ]
+
+ @property
+ def _loss_weights_list(self):
+ return [e.loss_weight for e in self._training_endpoints]
+
+ @property
+ def _output_loss_metrics(self):
+ if hasattr(self, '_training_endpoints'):
+ return [
+ e.output_loss_metric
+ for e in self._training_endpoints
+ if e.output_loss_metric is not None
+ ]
+ return None
+
+ @property
+ def sample_weights(self):
+ return [e.sample_weight for e in self._training_endpoints]
+
+ @property
+ def _sample_weight_modes(self):
+ return [e.sample_weight_mode for e in self._training_endpoints]
+
+ @property
+ def _feed_sample_weights(self):
+ return [e.sample_weight for e in self._training_endpoints
+ if e.sample_weight is not None]
+
+ def _maybe_load_initial_epoch_from_ckpt(self, initial_epoch, mode):
+ """Maybe load initial epoch from ckpt considering possible worker recovery.
+
+ Refer to tensorflow/python/keras/distribute/multi_worker_training_state.py
+ for more information.
+
+ Arguments:
+ initial_epoch: The original initial_epoch user passes in in `fit()`.
+ mode: The mode for running `model.fit()`.
+
+ Returns:
+ If the training is recovering from previous failure under multi-worker
+ training setting, return the epoch the training is supposed to continue
+ at. Otherwise, return the `initial_epoch` the user passes in.
+ """
+ if hasattr(self, '_training_state'):
+ return self._training_state.maybe_load_initial_epoch_from_ckpt(
+ initial_epoch, mode)
+ return initial_epoch
+
+ def _get_training_eval_metrics(self):
+ """Returns all the metrics that are to be reported.
+
+ This includes the output loss metrics, compile metrics/weighted metrics,
+ add_metric metrics.
+ """
+ metrics = []
+ metrics.extend(getattr(self, '_output_loss_metrics', None) or [])
+ metrics.extend(getattr(self, 'metrics', None) or [])
+ return metrics
+
+ def _assert_compile_was_called(self):
+ # Checks whether `compile` has been called. If it has been called,
+ # then the optimizer is set. This is different from whether the
+ # model is compiled
+ # (i.e. whether the model is built and its inputs/outputs are set).
+ if not self.optimizer:
+ raise RuntimeError('You must compile your model before '
+ 'training/testing. '
+ 'Use `model.compile(optimizer, loss)`.')
+
+ def _in_multi_worker_mode(self):
+ """Method to infer if this `Model` is working in multi-worker settings.
+
+ Multi-worker training refers to the setup where the training is
+ distributed across multiple workers, as opposed to the case where
+ only a local process performs the training. This function is
+ used to infer for example whether or not a distribute coordinator
+ should be run, and thus TensorFlow servers should be started for
+ communication with other servers in the cluster, or whether or not
+ saving/restoring checkpoints is relevant for preemption fault tolerance.
+
+ Experimental. Signature and implementation are subject to change.
+
+ Returns:
+ Whether this model indicates it's working in multi-worker settings.
+ """
+ strategy = self._get_distribution_strategy()
+ return strategy and strategy.extended._in_multi_worker_mode() # pylint: disable=protected-access
+
+ def _get_distribution_strategy(self):
+ # If the model was compiled under the scope of a `tf.distribute.Strategy',
+ # `self._distribution_strategy` would have been set and model should infer
+ # that as the used strategy (even if it's out of strategy scope already).
+ strategy = self._distribution_strategy
+
+ # Otherwise, use the strategy whose scope this is in.
+ if not strategy and distribution_strategy_context.has_strategy():
+ strategy = distribution_strategy_context.get_strategy()
+
+ return strategy
+
+ @property
+ def _trackable_saved_model_saver(self):
+ return model_serialization.ModelSavedModelSaver(self)
+
+
+class DistributedCallbackModel(Model):
+ """Model that is used for callbacks with tf.distribute.Strategy."""
+
+ def __init__(self, model):
+ super(DistributedCallbackModel, self).__init__()
+ self.optimizer = model.optimizer
+
+ def set_original_model(self, orig_model):
+ self._original_model = orig_model
+
+ def save_weights(self, filepath, overwrite=True, save_format=None):
+ self._replicated_model.save_weights(filepath, overwrite=overwrite,
+ save_format=save_format)
+
+ def save(self, filepath, overwrite=True, include_optimizer=True):
+ # save weights from the distributed model to the original model
+ distributed_model_weights = self.get_weights()
+ self._original_model.set_weights(distributed_model_weights)
+ # TODO(anjalisridhar): Do we need to save the original model here?
+ # Saving the first replicated model works as well.
+ self._original_model.save(filepath, overwrite=True, include_optimizer=False)
+
+ def load_weights(self, filepath, by_name=False):
+ self._original_model.load_weights(filepath, by_name=False)
+ # Copy the weights from the original model to each of the replicated models.
+ orig_model_weights = self._original_model.get_weights()
+ distributed_training_utils.set_weights(
+ self._original_model._distribution_strategy, self, # pylint: disable=protected-access
+ orig_model_weights)
+
+ def __getattr__(self, item):
+ # Whitelisted atttributes of the model that can be accessed by the user
+ # during a callback.
+ if item not in ('_setattr_tracking', '_layers'):
+ logging.warning('You are accessing attribute ' + item + ' of the '
+ 'DistributedCallbackModel that may not have been set '
+ 'correctly.')
+ return super(DistributedCallbackModel, self).__getattr__(item)
+
+
+class _TrainingEndpoint(object):
+ """A container for the training output/target and related entities.
+
+ In the case of model with multiple outputs, there is a one-to-one mapping
+ between model output (y_pred), model target (y_true), loss, metrics etc.
+ By unifying these entities into one class, different entity can access
+ information between each other, rather than currently access different list of
+ attributes of the model.
+ """
+
+ def __init__(self,
+ output,
+ output_name,
+ loss_fn,
+ loss_weight=None,
+ training_target=None,
+ output_loss_metric=None,
+ sample_weight=None,
+ sample_weight_mode=None):
+ """Initialize the _TrainingEndpoint.
+
+ Note that the output and output_name should be stable as long as the model
+ structure doesn't change. The training_target suppose to be mutable since
+ the information is provided via `compile()`
+
+ Args:
+ output: the output tensor of the model.
+ output_name: the unique name of the output tensor.
+ loss_fn: the loss function for the output tensor.
+ loss_weight: float, the weights for the loss.
+ training_target: the _TrainingTarget for the model.
+ output_loss_metric: the metric object for the loss function.
+ sample_weight: the weights for how a sample is weighted during metric and
+ loss calculation. Could be None.
+ sample_weight_mode: string, 'temporal', 'samplewise' or None. The mode for
+ how the sample_weight is populated.
+ """
+ self._output = output
+ self._output_name = output_name
+ self._loss_fn = loss_fn
+ self._loss_weight = loss_weight
+ self._training_target = training_target
+ self._output_loss_metric = output_loss_metric
+ self._sample_weight = sample_weight
+ self._sample_weight_mode = sample_weight_mode
+
+ @property
+ def output(self):
+ return self._output
+
+ @property
+ def output_name(self):
+ return self._output_name
+
+ @property
+ def shape(self):
+ return K.int_shape(self.output)
+
+ @property
+ def loss_fn(self):
+ return self._loss_fn
+
+ @property
+ def loss_weight(self):
+ return self._loss_weight
+
+ @loss_weight.setter
+ def loss_weight(self, value):
+ self._loss_weight = value
+
+ @property
+ def training_target(self):
+ return self._training_target
+
+ @training_target.setter
+ def training_target(self, value):
+ self._training_target = value
+
+ def create_training_target(self, target, run_eagerly=False):
+ """Create training_target instance and update the self.training_target.
+
+ Note that the input target should just be a tensor or None, and
+ corresponding training target will be created based on the output and
+ loss_fn.
+
+ Args:
+ target: the target tensor for the current output. Could be None.
+ run_eagerly: boolean, whether the model is in run_eagerly mode.
+
+ Raises:
+ ValueError if the training_target field for the current instance has
+ already been populated.
+ """
+ if self.has_training_target():
+ raise ValueError('The training_target field for the _TrainingEndpoint '
+ 'instance has already been populated')
+ if run_eagerly:
+ # When run_eagerly, the target tensor is ignored, and the None placeholder
+ # is created instead.
+ self.training_target = _TrainingTarget(
+ None, feedable=True, skip_target_weights=False)
+ return
+
+ if self.should_skip_target():
+ self.training_target = _TrainingTarget(None)
+ else:
+ if target is not None and not K.is_placeholder(target):
+ feedable = False
+ skip_target_weights = True
+ else:
+ feedable = True
+ skip_target_weights = False
+
+ if target is None:
+ target_dtype = losses.LABEL_DTYPES_FOR_LOSSES.get(
+ self.loss_fn, K.dtype(self.output))
+
+ target = K.placeholder(
+ ndim=len(self.shape),
+ name=self.output_name + '_target',
+ sparse=K.is_sparse(self.output),
+ dtype=target_dtype)
+
+ self.training_target = _TrainingTarget(
+ target,
+ feedable=feedable,
+ skip_target_weights=skip_target_weights)
+
+ @property
+ def output_loss_metric(self):
+ return self._output_loss_metric
+
+ @output_loss_metric.setter
+ def output_loss_metric(self, value):
+ self._output_loss_metric = value
+
+ @property
+ def sample_weight(self):
+ return self._sample_weight
+
+ @sample_weight.setter
+ def sample_weight(self, value):
+ self._sample_weight = value
+
+ @property
+ def sample_weight_mode(self):
+ return self._sample_weight_mode
+
+ @sample_weight_mode.setter
+ def sample_weight_mode(self, value):
+ self._sample_weight_mode = value
+
+ def should_skip_target(self):
+ return self._loss_fn is None
+
+ def should_skip_target_weights(self):
+ return (self.should_skip_target() or self.training_target is None or
+ self.training_target.skip_target_weights)
+
+ def has_training_target(self):
+ return self.training_target is not None
+
+ def has_feedable_training_target(self):
+ return (not self.should_skip_target() and
+ self.training_target is not None and self.training_target.feedable)
+
+ def loss_name(self):
+ if self._loss_fn is not None:
+ return self._output_name + '_loss'
+ return None
+
+ @property
+ def feed_output_shape(self):
+ """The output shape for the feedable target."""
+ if not self.has_feedable_training_target():
+ return None
+
+ if ((isinstance(self.loss_fn, losses.LossFunctionWrapper) and
+ self.loss_fn.fn == losses.sparse_categorical_crossentropy)) or (
+ isinstance(self.loss_fn, losses.SparseCategoricalCrossentropy)):
+ if K.image_data_format() == 'channels_first':
+ return (self.shape[0], 1) + self.shape[2:]
+ else:
+ return self.shape[:-1] + (1,)
+ elif (not isinstance(self.loss_fn, losses.Loss) or
+ (isinstance(self.loss_fn, losses.LossFunctionWrapper) and
+ (getattr(losses, self.loss_fn.fn.__name__, None) is None))):
+ # If the given loss is not an instance of the `Loss` class (custom
+ # class) or if the loss function that is wrapped is not in the
+ # `losses` module, then it is a user-defined loss and we make no
+ # assumptions about it.
+ return None
+ else:
+ return self.shape
+
+ def sample_weights_mismatch(self):
+ """Check if the sample weight and the mode match or not."""
+ # If there is a mismatch between sample weight mode and the placeholders
+ # created, then recompile the sub-graphs that depend on sample weights.
+ return (
+ (self.sample_weight_mode is not None and self.sample_weight is None) or
+ (self.sample_weight_mode is None and self.sample_weight is not None))
+
+ def populate_sample_weight(self, sample_weight, sample_weight_mode):
+ """Populate the sample weight and based on the sample weight mode."""
+ if (sample_weight is None and
+ (self.should_skip_target_weights() or sample_weight_mode is None or
+ context.executing_eagerly())):
+ self._sample_weight = None
+ return
+
+ assert sample_weight_mode in ['temporal', 'samplewise']
+ if sample_weight_mode == 'temporal':
+ default_value = [[1.]]
+ shape = [None, None]
+ else:
+ # sample_weight_mode == 'samplewise'
+ default_value = [1.]
+ shape = [None]
+
+ if sample_weight is not None:
+ if not sample_weight.shape.is_compatible_with(shape):
+ raise ValueError('Received sample weight with shape {}. Expected shape '
+ '{}.'.format(sample_weight.shape, shape))
+ self._sample_weight = sample_weight
+ else:
+ self._sample_weight = array_ops.placeholder_with_default(
+ constant_op.constant(default_value, dtype=K.floatx()),
+ shape=shape,
+ name=self.output_name + '_sample_weights')
+
+
+class _TrainingTarget(object):
+ """Container for a target tensor (y_true) and its metadata (shape, loss...).
+
+ Arguments:
+ target: A target tensor for the model. It may be `None` if the
+ output is excluded from loss computation. It is still kept as None
+ since each output of the model should have a corresponding target. If
+ the target is None, the rest of the attributes will be None as well.
+ feedable: Boolean, whether the target is feedable (requires data to be
+ passed in `fit` or `train_on_batch`), or not (model compiled with
+ `target_tensors` argument).
+ skip_target_weights: Boolean, whether the target should be skipped during
+ weights calculation.
+ """
+
+ def __init__(self, target, feedable=False, skip_target_weights=True):
+ self._target = target
+ self._feedable = feedable
+ self._skip_target_weights = skip_target_weights
+
+ @property
+ def target(self):
+ return self._target
+
+ @property
+ def feedable(self):
+ return self._feedable
+
+ @property
+ def skip_target_weights(self):
+ return self._skip_target_weights
+
+
+def _is_symbolic_tensor(x):
+ return tensor_util.is_tensor(x) and not isinstance(x, ops.EagerTensor)
+
+
+def _convert_scipy_sparse_tensor(value, expected_input):
+ """Handle scipy sparse tensor conversions.
+
+ This method takes a value 'value' and returns the proper conversion. If
+ value is a scipy sparse tensor and the expected input is a dense tensor,
+ we densify 'value'. If value is a scipy sparse tensor and the expected input
+ is a TF SparseTensor, we convert 'value' to a SparseTensor. If 'value' is
+ not a scipy sparse tensor, or scipy is not imported, we pass it through
+ unchanged.
+
+ Arguments:
+ value: An object that may be a scipy sparse tensor
+ expected_input: The expected input placeholder.
+
+ Returns:
+ The possibly-converted 'value'.
+ """
+ if issparse is not None and issparse(value):
+ if ops.is_dense_tensor_like(expected_input):
+ if ops.executing_eagerly_outside_functions():
+ # In TF2 we do not silently densify sparse matrices.
+ raise ValueError('A SciPy sparse matrix was passed to a model '
+ 'that expects dense inputs. Please densify your '
+ 'inputs first, such as by calling `x.toarray().')
+ return value.toarray()
+ else:
+ sparse_coo = value.tocoo()
+ row, col = sparse_coo.row, sparse_coo.col
+ data, shape = sparse_coo.data, sparse_coo.shape
+ indices = np.concatenate((np.expand_dims(row, 1), np.expand_dims(col, 1)),
+ 1)
+ return sparse_tensor.SparseTensor(indices, data, shape)
+ else:
+ return value
+
+
+def _get_metrics_from_layers(layers):
+ """Returns list of metrics from the given layers.
+
+ This will not include the `compile` metrics of a model layer.
+
+ Arguments:
+ layers: List of layers.
+
+ Returns:
+ List of metrics.
+ """
+ metrics = []
+ layers = trackable_layer_utils.filter_empty_layer_containers(layers)
+ for layer in layers:
+ if isinstance(layer, Model):
+ # We cannot call 'metrics' on the model because we do not want to
+ # include the metrics that were added in compile API of a nested model.
+ metrics.extend(layer._metrics) # pylint: disable=protected-access
+ metrics.extend(_get_metrics_from_layers(layer.layers))
+ else:
+ metrics.extend(layer.metrics)
+ return metrics
diff --git a/tensorflow/python/keras/engine/training_v2.py b/tensorflow/python/keras/engine/training_v2.py
index 708083f..476da84 100644
--- a/tensorflow/python/keras/engine/training_v2.py
+++ b/tensorflow/python/keras/engine/training_v2.py
@@ -320,7 +320,13 @@
with training_context.on_epoch(epoch, ModeKeys.TRAIN) as epoch_logs:
model.reset_metrics()
if training_data_iter is None or recreate_training_iterator:
- training_data_iter = iter(training_dataset)
+ if (training_data_iter is not None and
+ distribution_strategy_context.has_strategy()):
+ # TODO(kaftan): remove this when MultiDeviceIterator is a
+ ## compositetensor (unless this is more efficient)
+ training_data_iter._initializer # pylint: disable=pointless-statement
+ else:
+ training_data_iter = iter(training_dataset)
training_result = run_one_epoch(
model,
@@ -347,7 +353,13 @@
if (do_validation and
training_utils.should_run_validation(validation_freq, epoch) and
not training_callbacks.model.stop_training):
- eval_data_iter = iter(validation_dataset)
+ if (eval_data_iter is not None and
+ distribution_strategy_context.has_strategy()):
+ # TODO(kaftan): remove this when MultiDeviceIterator is a
+ ## compositetensor (unless this is more efficient)
+ eval_data_iter._initializer # pylint: disable=pointless-statement
+ else:
+ eval_data_iter = iter(validation_dataset)
validation_callbacks = cbks.configure_callbacks(
training_callbacks,
diff --git a/tensorflow/python/keras/losses.py b/tensorflow/python/keras/losses.py
index 1baa746..8370fdf 100644
--- a/tensorflow/python/keras/losses.py
+++ b/tensorflow/python/keras/losses.py
@@ -974,6 +974,8 @@
@keras_export('keras.metrics.sparse_categorical_crossentropy',
'keras.losses.sparse_categorical_crossentropy')
def sparse_categorical_crossentropy(y_true, y_pred, from_logits=False, axis=-1):
+ y_pred = ops.convert_to_tensor(y_pred)
+ y_true = math_ops.cast(y_true, y_pred.dtype)
return K.sparse_categorical_crossentropy(
y_true, y_pred, from_logits=from_logits, axis=axis)
diff --git a/tensorflow/python/keras/losses_test.py b/tensorflow/python/keras/losses_test.py
index 8cd7b32..d760f35 100644
--- a/tensorflow/python/keras/losses_test.py
+++ b/tensorflow/python/keras/losses_test.py
@@ -1003,6 +1003,14 @@
loss = cce_obj(y_true, logits)
self.assertAllClose((0.001822, 0.000459, 0.169846), self.evaluate(loss), 3)
+ def test_non_tensor(self):
+ # Test case for GitHub issue 33394.
+ cce_obj = keras.losses.SparseCategoricalCrossentropy()
+ y_true = [[0], [1], [2]]
+ y_pred = [[.9, .05, .05], [.5, .89, .6], [.05, .01, .94]]
+ loss = cce_obj(y_true, y_pred, sample_weight=2.3)
+ self.assertAlmostEqual(self.evaluate(loss), .7449, 3)
+
@test_util.run_all_in_graph_and_eager_modes
class HingeTest(test.TestCase):
diff --git a/tensorflow/python/keras/metrics.py b/tensorflow/python/keras/metrics.py
index 7abd44b..db8f897 100644
--- a/tensorflow/python/keras/metrics.py
+++ b/tensorflow/python/keras/metrics.py
@@ -2997,12 +2997,25 @@
@keras_export('keras.metrics.sparse_top_k_categorical_accuracy')
def sparse_top_k_categorical_accuracy(y_true, y_pred, k=5):
+ """Computes how often integer targets are in the top `K` predictions.
+
+ Args:
+ y_true: tensor of true targets.
+ y_pred: tensor of predicted targets.
+ k: (Optional) Number of top elements to look at for computing accuracy.
+ Defaults to 5.
+
+ Returns:
+ Sparse top K categorical accuracy value.
+ """
y_pred_rank = ops.convert_to_tensor(y_pred).shape.ndims
y_true_rank = ops.convert_to_tensor(y_true).shape.ndims
- # If the shape of y_true is (num_samples, 1), squeeze to (num_samples,)
- if (y_true_rank is not None) and (y_pred_rank is not None) and (len(
- K.int_shape(y_true)) == len(K.int_shape(y_pred))):
- y_true = array_ops.squeeze(y_true, [-1])
+ # Flatten y_pred to (batch_size, num_samples) and y_true to (num_samples,)
+ if (y_true_rank is not None) and (y_pred_rank is not None):
+ if y_pred_rank > 2:
+ y_pred = array_ops.reshape(y_pred, [-1, y_pred.shape[-1]])
+ if y_true_rank > 1:
+ y_true = array_ops.reshape(y_true, [-1])
return math_ops.cast(
nn.in_top_k(y_pred, math_ops.cast(y_true, 'int32'), k), K.floatx())
diff --git a/tensorflow/python/keras/metrics_functional_test.py b/tensorflow/python/keras/metrics_functional_test.py
index 40478d2..1082ac9 100644
--- a/tensorflow/python/keras/metrics_functional_test.py
+++ b/tensorflow/python/keras/metrics_functional_test.py
@@ -46,15 +46,21 @@
# Test correctness if the shape of y_true is (num_samples,)
y_true = K.variable([1., 0., 0., 0.])
y_pred = K.variable([[0.8, 0.2], [0.6, 0.4], [0.7, 0.3], [0.9, 0.1]])
- print(K.eval(metric(y_true, y_pred)))
self.assertAllEqual(K.eval(metric(y_true, y_pred)), [0., 1., 1., 1.])
# Test correctness if the shape of y_true is (num_samples, 1)
y_true = K.variable([[1.], [0.], [0.], [0.]])
y_pred = K.variable([[0.8, 0.2], [0.6, 0.4], [0.7, 0.3], [0.9, 0.1]])
- print(K.eval(metric(y_true, y_pred)))
self.assertAllEqual(K.eval(metric(y_true, y_pred)), [0., 1., 1., 1.])
+ # Test correctness if the shape of y_true is (batch_size, seq_length) and
+ # y_pred is (batch_size, seq_length, num_classes)
+ y_pred = K.variable(
+ np.array([[[0.2, 0.3, 0.1], [0.1, 0.2, 0.7]],
+ [[0.3, 0.2, 0.1], [0.7, 0.2, 0.1]]]))
+ y_true = K.variable(np.array([[1, 0], [1, 0]]))
+ self.assertAllEqual(K.eval(metric(y_true, y_pred)), [[1., 0.], [0., 1.]])
+
def test_sparse_categorical_accuracy_float(self):
with self.cached_session():
metric = metrics.sparse_categorical_accuracy
@@ -106,6 +112,22 @@
metrics.sparse_top_k_categorical_accuracy(y_true, y_pred, k=1))
self.assertEqual(np.mean(result), 0.)
+ # Test correctness if the shape of y_true is (batch_size, seq_length) and
+ # y_pred is (batch_size, seq_length, num_classes)
+ y_pred = K.variable(
+ np.array([[[0.3, 0.2, 0.1], [0.1, 0.2, 0.7], [0.1, 0.2, 0.7]],
+ [[0.3, 0.2, 0.1], [0.1, 0.2, 0.7], [0.3, 0.2, 0.1]]]))
+ y_true = K.variable(np.array([[1, 0, 0], [1, 0, 1]]))
+ result = K.eval(
+ metrics.sparse_top_k_categorical_accuracy(y_true, y_pred, k=3))
+ self.assertEqual(np.mean(result), 1)
+ result = K.eval(
+ metrics.sparse_top_k_categorical_accuracy(y_true, y_pred, k=2))
+ self.assertEqual(np.mean(result), 0.5)
+ result = K.eval(
+ metrics.sparse_top_k_categorical_accuracy(y_true, y_pred, k=1))
+ self.assertEqual(np.mean(result), 0.)
+
def test_top_k_categorical_accuracy(self):
with self.cached_session():
y_pred = K.variable(np.array([[0.3, 0.2, 0.1], [0.1, 0.2, 0.7]]))
diff --git a/tensorflow/python/keras/mixed_precision/experimental/BUILD b/tensorflow/python/keras/mixed_precision/experimental/BUILD
index ef579ee..8130689 100644
--- a/tensorflow/python/keras/mixed_precision/experimental/BUILD
+++ b/tensorflow/python/keras/mixed_precision/experimental/BUILD
@@ -54,7 +54,7 @@
srcs = [
"policy_test.py",
],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":policy",
@@ -85,7 +85,7 @@
name = "autocast_variable_test",
size = "medium",
srcs = ["autocast_variable_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
deps = [
":autocast_variable",
"//tensorflow/python:client_testlib",
@@ -133,6 +133,7 @@
"//tensorflow/python/distribute:one_device_strategy",
"//tensorflow/python/keras",
],
+ python_version = "PY3",
)
py_library(
@@ -157,6 +158,7 @@
"//tensorflow/python/distribute:one_device_strategy",
"//tensorflow/python/keras",
],
+ python_version = "PY3",
shard_count = 4,
tags = ["no_windows"], # b/139083295: bfloat16 tests fail on Windows
)
@@ -172,5 +174,6 @@
"//tensorflow/python/distribute:one_device_strategy",
"//tensorflow/python/keras",
],
+ python_version = "PY3",
tags = ["no_rocm"],
)
diff --git a/tensorflow/python/keras/premade/BUILD b/tensorflow/python/keras/premade/BUILD
index af8e86b..2892dfb 100644
--- a/tensorflow/python/keras/premade/BUILD
+++ b/tensorflow/python/keras/premade/BUILD
@@ -32,6 +32,7 @@
name = "linear_test",
size = "medium",
srcs = ["linear_test.py"],
+ python_version = "PY3",
shard_count = 2,
deps = [
":premade",
@@ -45,7 +46,7 @@
name = "wide_deep_test",
size = "medium",
srcs = ["wide_deep_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
shard_count = 2,
srcs_version = "PY2AND3",
deps = [
diff --git a/tensorflow/python/keras/saving/saved_model/load.py b/tensorflow/python/keras/saving/saved_model/load.py
index 2446ab6..084038c 100644
--- a/tensorflow/python/keras/saving/saved_model/load.py
+++ b/tensorflow/python/keras/saving/saved_model/load.py
@@ -20,6 +20,7 @@
import json
from tensorflow.python.eager import function as defun
+from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_spec
from tensorflow.python.keras import regularizers
from tensorflow.python.keras.engine import input_spec
@@ -55,6 +56,9 @@
training_lib = LazyLoader(
"training_lib", globals(),
"tensorflow.python.keras.engine.training")
+training_lib_v1 = LazyLoader(
+ "training_lib", globals(),
+ "tensorflow.python.keras.engine.training_v1")
# pylint:enable=g-inconsistent-quotes
@@ -196,11 +200,16 @@
# pylint: enable=protected-access
def _recreate_base_user_object(self, proto):
+ if ops.executing_eagerly_outside_functions():
+ model_class = training_lib.Model
+ else:
+ model_class = training_lib_v1.Model
+
revived_classes = {
'_tf_keras_layer': (RevivedLayer, base_layer.Layer),
'_tf_keras_input_layer': (RevivedInputLayer, input_layer.InputLayer),
'_tf_keras_network': (RevivedNetwork, network_lib.Network),
- '_tf_keras_model': (RevivedNetwork, training_lib.Model),
+ '_tf_keras_model': (RevivedNetwork, model_class),
'_tf_keras_sequential': (RevivedNetwork, models_lib.Sequential)
}
@@ -210,9 +219,7 @@
parent_classes = revived_classes[proto.identifier]
metadata = json.loads(proto.metadata)
revived_cls = type(
- compat.as_str(metadata['class_name']),
- parent_classes,
- {'__setattr__': parent_classes[1].__setattr__})
+ compat.as_str(metadata['class_name']), parent_classes, {})
return revived_cls._init_from_metadata(metadata) # pylint: disable=protected-access
return super(KerasObjectLoader, self)._recreate_base_user_object(proto)
@@ -377,4 +384,3 @@
revived_obj.activity_regularizer = regularizers.deserialize(
metadata['activity_regularizer'])
# pylint:enable=protected-access
-
diff --git a/tensorflow/python/keras/saving/saved_model/saved_model_test.py b/tensorflow/python/keras/saving/saved_model/saved_model_test.py
index 00db92b..de466f0 100644
--- a/tensorflow/python/keras/saving/saved_model/saved_model_test.py
+++ b/tensorflow/python/keras/saving/saved_model/saved_model_test.py
@@ -92,7 +92,7 @@
return inputs
-@test_util.run_all_in_graph_and_eager_modes
+@keras_parameterized.run_all_keras_modes
class TestModelSavingAndLoadingV2(keras_parameterized.TestCase):
def _save_model_dir(self, dirname='saved_model'):
@@ -264,6 +264,11 @@
@keras_parameterized.run_with_all_model_types
def test_compiled_model(self):
+ # TODO(b/134519980): Issue with model.fit if the model call function uses
+ # a tf.function (Graph mode only).
+ if not context.executing_eagerly():
+ return
+
input_arr = np.random.random((1, 3))
target_arr = np.random.random((1, 4))
@@ -275,21 +280,18 @@
saved_model_dir = self._save_model_dir()
tf_save.save(model, saved_model_dir)
- # TODO(b/134519980): Issue with model.fit if the model call function uses
- # a tf.function (Graph mode only).
- with context.eager_mode():
- loaded = keras_load.load(saved_model_dir)
- actual_predict = loaded.predict(input_arr)
- self.assertAllClose(expected_predict, actual_predict)
+ loaded = keras_load.load(saved_model_dir)
+ actual_predict = loaded.predict(input_arr)
+ self.assertAllClose(expected_predict, actual_predict)
- loss_before = loaded.evaluate(input_arr, target_arr)
- loaded.fit(input_arr, target_arr)
- loss_after = loaded.evaluate(input_arr, target_arr)
- self.assertLess(loss_after, loss_before)
- predict = loaded.predict(input_arr)
+ loss_before = loaded.evaluate(input_arr, target_arr)
+ loaded.fit(input_arr, target_arr)
+ loss_after = loaded.evaluate(input_arr, target_arr)
+ self.assertLess(loss_after, loss_before)
+ predict = loaded.predict(input_arr)
- ckpt_path = os.path.join(self.get_temp_dir(), 'weights')
- loaded.save_weights(ckpt_path)
+ ckpt_path = os.path.join(self.get_temp_dir(), 'weights')
+ loaded.save_weights(ckpt_path)
# Ensure that the checkpoint is compatible with the original model.
model.load_weights(ckpt_path)
diff --git a/tensorflow/python/keras/utils/version_utils.py b/tensorflow/python/keras/utils/version_utils.py
new file mode 100644
index 0000000..ca72dcd
--- /dev/null
+++ b/tensorflow/python/keras/utils/version_utils.py
@@ -0,0 +1,70 @@
+# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+# pylint: disable=protected-access
+"""Utilities for Keras classes with v1 and v2 versions."""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from tensorflow.python.framework import ops
+from tensorflow.python.util import lazy_loader
+
+# TODO(b/134426265): Switch back to single-quotes once the issue
+# with copybara is fixed.
+# pylint: disable=g-inconsistent-quotes
+training = lazy_loader.LazyLoader(
+ "training", globals(),
+ "tensorflow.python.keras.engine.training")
+training_v1 = lazy_loader.LazyLoader(
+ "training_v1", globals(),
+ "tensorflow.python.keras.engine.training_v1")
+
+# pylint: enable=g-inconsistent-quotes
+
+
+# TODO(omalleyt): Extend to Layer class once Layer class is split.
+class VersionSelector(object):
+ """Chooses between Keras v1 and v2 Model class."""
+
+ def __new__(cls, *args, **kwargs): # pylint: disable=unused-argument
+ new_cls = swap_class(cls, training.Model, training_v1.Model)
+ return object.__new__(new_cls)
+
+
+def swap_class(cls, v2_cls, v1_cls):
+ """Swaps in v2_cls or v1_cls depending on graph mode."""
+ if cls == object:
+ return cls
+
+ if cls in (v2_cls, v1_cls):
+ if ops.executing_eagerly_outside_functions():
+ return v2_cls
+ return v1_cls
+
+ # Recursively search superclasses to swap in the right Keras class.
+ cls.__bases__ = tuple(
+ swap_class(base, v2_cls, v1_cls) for base in cls.__bases__)
+ return cls
+
+
+def disallow_legacy_graph(cls_name, method_name):
+ if not ops.executing_eagerly_outside_functions():
+ error_msg = (
+ "Calling `{cls_name}.{method_name}` in graph mode is not supported "
+ "when the `{cls_name}` instance was constructed with eager mode "
+ "enabled. Please construct your `{cls_name}` instance in graph mode or"
+ " call `{cls_name}.{method_name}` with eager mode enabled.")
+ error_msg = error_msg.format(cls_name=cls_name, method_name=method_name)
+ raise ValueError(error_msg)
diff --git a/tensorflow/python/keras/utils/version_utils_test.py b/tensorflow/python/keras/utils/version_utils_test.py
new file mode 100644
index 0000000..65eee62
--- /dev/null
+++ b/tensorflow/python/keras/utils/version_utils_test.py
@@ -0,0 +1,133 @@
+# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Tests for Keras utilities to split v1 and v2 classes."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import abc
+import numpy as np
+import six
+
+from tensorflow.python import keras
+from tensorflow.python.eager import context
+from tensorflow.python.framework import ops
+from tensorflow.python.keras import keras_parameterized
+from tensorflow.python.keras.engine import training
+from tensorflow.python.keras.engine import training_v1
+from tensorflow.python.platform import test
+
+
+@keras_parameterized.run_all_keras_modes
+class SplitUtilsTest(keras_parameterized.TestCase):
+
+ def _check_model_class(self, model_class):
+ if ops.executing_eagerly_outside_functions():
+ self.assertEqual(model_class, training.Model)
+ else:
+ self.assertEqual(model_class, training_v1.Model)
+
+ def test_functional_model(self):
+ inputs = keras.Input(10)
+ outputs = keras.layers.Dense(1)(inputs)
+ model = keras.Model(inputs, outputs)
+ self._check_model_class(model.__class__)
+
+ def test_sequential_model(self):
+ model = keras.Sequential([keras.layers.Dense(1)])
+ model_class = model.__class__.__bases__[0]
+ self._check_model_class(model_class)
+
+ def test_subclass_model(self):
+
+ class MyModel(keras.Model):
+
+ def call(self, x):
+ return 2 * x
+
+ model = MyModel()
+ model_class = model.__class__.__bases__[0]
+ self._check_model_class(model_class)
+
+ def test_multiple_subclass_model(self):
+
+ class Model1(keras.Model):
+ pass
+
+ class Model2(Model1):
+
+ def call(self, x):
+ return 2 * x
+
+ model = Model2()
+ model_class = model.__class__.__bases__[0].__bases__[0]
+ self._check_model_class(model_class)
+
+ def test_user_provided_metaclass(self):
+
+ @six.add_metaclass(abc.ABCMeta)
+ class AbstractModel(keras.Model):
+
+ @abc.abstractmethod
+ def call(self, inputs):
+ """Calls the model."""
+
+ class MyModel(AbstractModel):
+
+ def call(self, inputs):
+ return 2 * inputs
+
+ with self.assertRaisesRegexp(TypeError, 'instantiate abstract class'):
+ AbstractModel()
+
+ model = MyModel()
+ model_class = model.__class__.__bases__[0].__bases__[0]
+ self._check_model_class(model_class)
+
+ def test_multiple_inheritance(self):
+
+ class Return2(object):
+
+ def return_2(self):
+ return 2
+
+ class MyModel(keras.Model, Return2):
+
+ def call(self, x):
+ return self.return_2() * x
+
+ model = MyModel()
+ bases = model.__class__.__bases__
+ self._check_model_class(bases[0])
+ self.assertEqual(bases[1], Return2)
+ self.assertEqual(model.return_2(), 2)
+
+ def test_fit_error(self):
+ if not ops.executing_eagerly_outside_functions():
+ # Error only appears on the v2 class.
+ return
+
+ model = keras.Sequential([keras.layers.Dense(1)])
+ model.compile('sgd', 'mse')
+ x, y = np.ones((10, 10)), np.ones((10, 1))
+ with context.graph_mode():
+ with self.assertRaisesRegexp(
+ ValueError, 'instance was constructed with eager mode enabled'):
+ model.fit(x, y, batch_size=2)
+
+
+if __name__ == '__main__':
+ test.main()
diff --git a/tensorflow/python/kernel_tests/BUILD b/tensorflow/python/kernel_tests/BUILD
index c5f3ecc..6838f35 100644
--- a/tensorflow/python/kernel_tests/BUILD
+++ b/tensorflow/python/kernel_tests/BUILD
@@ -1859,6 +1859,10 @@
"//tensorflow/python:nn_grad",
"//tensorflow/python:nn_ops",
],
+
+ # TODO(b/144432983): S32 convolutions should not be auto-clustered, only
+ # crashes tests.
+ xla_enable_strict_auto_jit = False,
)
cuda_py_test(
@@ -1979,6 +1983,8 @@
"//tensorflow/python:client_testlib",
"//tensorflow/python:framework_for_generated_wrappers",
],
+ # TODO(b/144432983): S32 convolutions should not be auto-clustered.
+ xla_enable_strict_auto_jit = False,
)
cuda_py_test(
diff --git a/tensorflow/python/kernel_tests/argmax_op_test.py b/tensorflow/python/kernel_tests/argmax_op_test.py
index 44a0e83..3870972 100644
--- a/tensorflow/python/kernel_tests/argmax_op_test.py
+++ b/tensorflow/python/kernel_tests/argmax_op_test.py
@@ -55,7 +55,9 @@
expected_values,
expected_err_re=None):
self._testArg(method, x, axis, expected_values, True, expected_err_re)
- self._testArg(method, x, axis, expected_values, False, expected_err_re)
+ # Compilation time is too large with XLA/CPU autojit.
+ if not test_util.is_xla_enabled():
+ self._testArg(method, x, axis, expected_values, False, expected_err_re)
def _testBasic(self, dtype):
x = np.arange(200, dtype=dtype)
diff --git a/tensorflow/python/kernel_tests/distributions/BUILD b/tensorflow/python/kernel_tests/distributions/BUILD
index b551715..0c5d472 100644
--- a/tensorflow/python/kernel_tests/distributions/BUILD
+++ b/tensorflow/python/kernel_tests/distributions/BUILD
@@ -41,6 +41,7 @@
"//tensorflow/python:platform_test",
],
shard_count = 3,
+ xla_enable_strict_auto_jit = False, # TODO(b/144920376)
)
cuda_py_test(
diff --git a/tensorflow/python/kernel_tests/proto/BUILD b/tensorflow/python/kernel_tests/proto/BUILD
index ff86609..33fe9ab 100644
--- a/tensorflow/python/kernel_tests/proto/BUILD
+++ b/tensorflow/python/kernel_tests/proto/BUILD
@@ -4,7 +4,6 @@
load("//tensorflow:tensorflow.bzl", "tf_cc_shared_object")
load("//tensorflow/core/platform:default/build_config_root.bzl", "if_static")
load("//tensorflow/core/platform:default/build_config.bzl", "tf_additional_all_protos", "tf_proto_library")
-# Placeholder for Google-internal load statements.
package(
default_visibility = ["//visibility:public"],
@@ -26,6 +25,7 @@
[],
otherwise = [":libtestexample.so"],
),
+ python_version = "PY3",
tags = [
"no_pip", # TODO(b/78026780)
"no_windows", # TODO(b/78028010)
@@ -45,6 +45,7 @@
[],
otherwise = [":libtestexample.so"],
),
+ python_version = "PY3",
tags = [
"no_pip", # TODO(b/78026780)
"no_windows", # TODO(b/78028010)
@@ -123,6 +124,7 @@
"//tensorflow/python:proto_ops",
"//tensorflow/python:client_testlib",
],
+ python_version = "PY3",
tags = [
"no_pip",
],
diff --git a/tensorflow/python/kernel_tests/signal/BUILD b/tensorflow/python/kernel_tests/signal/BUILD
index 7836d47..e8ef9e3 100644
--- a/tensorflow/python/kernel_tests/signal/BUILD
+++ b/tensorflow/python/kernel_tests/signal/BUILD
@@ -31,6 +31,7 @@
"//tensorflow/python:framework_for_generated_wrappers",
"//tensorflow/python/ops/signal",
],
+ python_version = "PY3",
tags = ["no_rocm"],
)
@@ -45,6 +46,7 @@
"//tensorflow/python:math_ops",
"//tensorflow/python/ops/signal",
],
+ python_version = "PY3",
shard_count = 8,
tags = [
"no_rocm",
@@ -61,6 +63,7 @@
"//tensorflow/python:client_testlib",
"//tensorflow/python/ops/signal",
],
+ python_version = "PY3",
)
cuda_py_tests(
@@ -75,6 +78,7 @@
"//tensorflow/python:framework_test_lib",
"//tensorflow/python/ops/signal",
],
+ python_version = "PY3",
tags = ["no_rocm"],
)
@@ -94,6 +98,7 @@
"//tensorflow/python/ops/signal",
"//tensorflow/python:platform_test",
],
+ python_version = "PY3",
)
cuda_py_tests(
@@ -111,6 +116,7 @@
"//tensorflow/python/ops/signal",
"//tensorflow/python:platform_test",
],
+ python_version = "PY3",
)
cuda_py_tests(
@@ -131,6 +137,7 @@
"//tensorflow/python:platform_test",
"//tensorflow/python/ops/signal",
],
+ python_version = "PY3",
tags = [
"no_rocm",
"nomac",
@@ -150,6 +157,7 @@
"//tensorflow/python/ops/signal",
"//tensorflow/python:platform_test",
],
+ python_version = "PY3",
shard_count = 4,
tags = [
"no_windows_gpu",
diff --git a/tensorflow/python/kernel_tests/signal/window_ops_test.py b/tensorflow/python/kernel_tests/signal/window_ops_test.py
index b2ae7f5..0708618 100644
--- a/tensorflow/python/kernel_tests/signal/window_ops_test.py
+++ b/tensorflow/python/kernel_tests/signal/window_ops_test.py
@@ -141,7 +141,7 @@
tflite_model, [window_length])
expected_output = self.evaluate(fn(window_length))
- self.assertAllClose(actual_output, expected_output, rtol=1e-7, atol=1e-7)
+ self.assertAllClose(actual_output, expected_output, rtol=1e-6, atol=1e-6)
if __name__ == '__main__':
diff --git a/tensorflow/python/lib/core/pybind11_status.h b/tensorflow/python/lib/core/pybind11_status.h
index f71dace..e45d894 100644
--- a/tensorflow/python/lib/core/pybind11_status.h
+++ b/tensorflow/python/lib/core/pybind11_status.h
@@ -22,6 +22,7 @@
#include "tensorflow/c/tf_status.h"
#include "tensorflow/core/lib/core/status.h"
#include "tensorflow/core/protobuf/error_codes.pb.h"
+#include "tensorflow/python/lib/core/py_exception_registry.h"
namespace tensorflow {
@@ -58,6 +59,16 @@
}
}
+inline void MaybeRaiseRegisteredFromStatus(const tensorflow::Status& status) {
+ if (!status.ok()) {
+ PyErr_SetObject(PyExceptionRegistry::Lookup(status.code()),
+ pybind11::make_tuple(pybind11::none(), pybind11::none(),
+ status.error_message())
+ .ptr());
+ throw pybind11::error_already_set();
+ }
+}
+
inline void MaybeRaiseFromTFStatus(TF_Status* status) {
TF_Code code = TF_GetCode(status);
if (code != TF_OK) {
@@ -66,6 +77,17 @@
}
}
+inline void MaybeRaiseRegisteredFromTFStatus(TF_Status* status) {
+ TF_Code code = TF_GetCode(status);
+ if (code != TF_OK) {
+ PyErr_SetObject(PyExceptionRegistry::Lookup(code),
+ pybind11::make_tuple(pybind11::none(), pybind11::none(),
+ TF_Message(status))
+ .ptr());
+ throw pybind11::error_already_set();
+ }
+}
+
} // namespace tensorflow
namespace pybind11 {
diff --git a/tensorflow/python/ops/losses/BUILD b/tensorflow/python/ops/losses/BUILD
index d513aea..0620238 100644
--- a/tensorflow/python/ops/losses/BUILD
+++ b/tensorflow/python/ops/losses/BUILD
@@ -41,7 +41,7 @@
name = "util_test",
size = "small",
srcs = ["util_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":losses",
diff --git a/tensorflow/python/ops/math_ops.py b/tensorflow/python/ops/math_ops.py
index 0ba5e92..6e6fd50 100644
--- a/tensorflow/python/ops/math_ops.py
+++ b/tensorflow/python/ops/math_ops.py
@@ -109,6 +109,7 @@
tf_export(v1=["arg_max"])(arg_max)
tf_export(v1=["arg_min"])(arg_min)
+
# This is set by resource_variable_ops.py. It is included in this way since
# there is a circular dependency between math_ops and resource_variable_ops
_resource_variable_type = None
@@ -4224,3 +4225,36 @@
x = ops.convert_to_tensor(x, name="x")
one = constant_op.constant(1, dtype=x.dtype.base_dtype, name="one")
return gen_math_ops.div_no_nan(one, x, name=scope)
+
+
+@tf_export("math.erfinv")
+@dispatch.add_dispatch_support
+def erfinv(x, name=None):
+ """Compute inverse error function.
+
+ Given `x`, compute the inverse error function of `x`. This function
+ is the inverse of `tf.math.erf`.
+
+ Args:
+ x: `Tensor` with type `float` or `double`.
+ name: A name for the operation (optional).
+ Returns:
+ Inverse error function of `x`.
+ """
+ with ops.name_scope(name, "erfinv", [x]):
+ return gen_math_ops.erfinv(x)
+
+
+@tf_export("math.ndtri")
+@dispatch.add_dispatch_support
+def ndtri(x, name=None):
+ """Compute quantile of Standard Normal.
+
+ Args:
+ x: `Tensor` with type `float` or `double`.
+ name: A name for the operation (optional).
+ Returns:
+ Inverse error function of `x`.
+ """
+ with ops.name_scope(name, "ndtri", [x]):
+ return gen_math_ops.ndtri(x)
diff --git a/tensorflow/python/ops/math_ops_test.py b/tensorflow/python/ops/math_ops_test.py
index de9b289..f49ba3d 100644
--- a/tensorflow/python/ops/math_ops_test.py
+++ b/tensorflow/python/ops/math_ops_test.py
@@ -441,7 +441,7 @@
nums, divs = self.floatTestData()
tf_result = math_ops.realdiv(nums, divs)
np_result = np.divide(nums, divs)
- self.assertAllEqual(tf_result, np_result)
+ self.assertAllClose(tf_result, np_result)
def testComplexDiv(self):
foo = array_ops.constant([1. + 3.j])
@@ -498,7 +498,7 @@
with test_util.use_gpu():
tf_result = math_ops.div_no_nan(nums, divs)
- self.assertAllEqual(tf_result, np_result)
+ self.assertAllClose(tf_result, np_result)
@test_util.run_all_in_graph_and_eager_modes
diff --git a/tensorflow/python/ops/nn_ops.py b/tensorflow/python/ops/nn_ops.py
index 77b7465..5ed5bf8 100644
--- a/tensorflow/python/ops/nn_ops.py
+++ b/tensorflow/python/ops/nn_ops.py
@@ -24,7 +24,9 @@
import numpy as np
+from tensorflow.python.compat import compat
from tensorflow.python.eager import context
+from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import graph_util
@@ -38,6 +40,7 @@
# TODO(b/138808492): Remove code inside copybara
from tensorflow.python.ops import control_flow_ops
# copybara:strip_end
+from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import gen_nn_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
@@ -4379,19 +4382,24 @@
>>> tf.random.set_seed(0)
>>> x = tf.ones([3,5])
- >>> tf.nn.dropout(x, rate = 0.5).numpy()
- array([[0., 0., 2., 2., 0.],
- [2., 0., 2., 2., 0.],
- [2., 2., 2., 0., 0.]], dtype=float32)
- >>> tf.nn.dropout(x, rate = 0.8).numpy()
- array([[0., 0., 5., 0., 0.],
- [0., 0., 5., 0., 0.],
- [5., 0., 0., 5., 0.]], dtype=float32)
+ >>> tf.nn.dropout(x, rate = 0.5, seed = 1).numpy()
+ array([[2., 0., 0., 2., 2.],
+ [2., 2., 2., 2., 2.],
+ [2., 0., 2., 0., 2.]], dtype=float32)
- If rate is set to `0` the input is returned, unchanged:
+ >>> tf.random.set_seed(0)
+ >>> x = tf.ones([3,5])
+ >>> tf.nn.dropout(x, rate = 0.8, seed = 1).numpy()
+ array([[0., 0., 0., 5., 5.],
+ [0., 5., 0., 5., 0.],
+ [5., 0., 5., 0., 5.]], dtype=float32)
- >>> tf.nn.dropout(x, rate = 0.0) is x
- True
+ >>> tf.nn.dropout(x, rate = 0.0) == x
+ <tf.Tensor: shape=(3, 5), dtype=bool, numpy=
+ array([[ True, True, True, True, True],
+ [ True, True, True, True, True],
+ [ True, True, True, True, True]])>
+
By default, each element is kept or dropped independently. If `noise_shape`
is specified, it must be
@@ -4400,11 +4408,12 @@
will make independent decisions. This is useful for dropping whole
channels from an image or sequence. For example:
+ >>> tf.random.set_seed(0)
>>> x = tf.ones([3,10])
- >>> tf.nn.dropout(x, rate = 2/3, noise_shape=[1,10]).numpy()
- array([[0., 3., 0., 3., 0., 0., 3., 0., 0., 3.],
- [0., 3., 0., 3., 0., 0., 3., 0., 0., 3.],
- [0., 3., 0., 3., 0., 0., 3., 0., 0., 3.]], dtype=float32)
+ >>> tf.nn.dropout(x, rate = 2/3, noise_shape=[1,10], seed=1).numpy()
+ array([[0., 0., 0., 3., 3., 0., 3., 3., 3., 0.],
+ [0., 0., 0., 3., 3., 0., 3., 3., 3., 0.],
+ [0., 0., 0., 3., 3., 0., 3., 3., 3., 0.]], dtype=float32)
Args:
x: A floating point tensor.
@@ -4426,53 +4435,100 @@
which is likely not what was intended.
"""
with ops.name_scope(name, "dropout", [x]) as name:
- x = ops.convert_to_tensor(x, name="x")
- if not x.dtype.is_floating:
- raise ValueError("x has to be a floating point tensor since it's going to"
- " be scaled. Got a %s tensor instead." % x.dtype)
- if isinstance(rate, numbers.Real):
- if not (rate >= 0 and rate < 1):
+ # TODO(b/144930399): Remove this once the compatible window is passed.
+ if compat.forward_compatible(2019, 12, 16):
+ is_rate_number = isinstance(rate, numbers.Real)
+ if is_rate_number and (rate < 0 or rate >= 1):
raise ValueError("rate must be a scalar tensor or a float in the "
"range [0, 1), got %g" % rate)
- if rate > 0.5:
- logging.log_first_n(
- logging.WARN, "Large dropout rate: %g (>0.5). In TensorFlow "
- "2.x, dropout() uses dropout rate instead of keep_prob. "
- "Please ensure that this is intended.", 5, rate)
+ x = ops.convert_to_tensor(x, name="x")
+ x_dtype = x.dtype
+ if not x_dtype.is_floating:
+ raise ValueError("x has to be a floating point tensor since it's going "
+ "to be scaled. Got a %s tensor instead." % x_dtype)
+ is_executing_eagerly = context.executing_eagerly()
+ if not tensor_util.is_tensor(rate):
+ if is_rate_number:
+ keep_prob = 1 - rate
+ scale = 1 / keep_prob
+ scale = ops.convert_to_tensor(scale, dtype=x_dtype)
+ ret = gen_math_ops.mul(x, scale)
+ else:
+ raise ValueError("rate is neither scalar nor scalar tensor %r" % rate)
+ else:
+ rate.get_shape().assert_has_rank(0)
+ rate_dtype = rate.dtype
+ if rate_dtype != x_dtype:
+ if not rate_dtype.is_compatible_with(x_dtype):
+ raise ValueError(
+ "Tensor dtype %s is incomptaible with Tensor dtype %s: %r" %
+ (x_dtype.name, rate_dtype.name, rate))
+ rate = gen_math_ops.cast(rate, x_dtype, name="rate")
+ one_tensor = constant_op.constant(1, dtype=x_dtype)
+ ret = gen_math_ops.real_div(x, gen_math_ops.sub(one_tensor, rate))
- # Early return if nothing needs to be dropped.
- if isinstance(rate, numbers.Real) and rate == 0:
- return x
- if context.executing_eagerly():
- if isinstance(rate, ops.EagerTensor):
- if rate.numpy() == 0:
- return x
+ noise_shape = _get_noise_shape(x, noise_shape)
+ # Sample a uniform distribution on [0.0, 1.0) and select values larger
+ # than rate.
+ #
+ # NOTE: Random uniform can only generate 2^23 floats on [1.0, 2.0)
+ # and subtract 1.0.
+ random_tensor = random_ops.random_uniform(
+ noise_shape, seed=seed, dtype=x_dtype)
+ # NOTE: if (1.0 + rate) - 1 is equal to rate, then that float is selected,
+ # hence a >= comparison is used.
+ keep_mask = random_tensor >= rate
+ ret = gen_math_ops.mul(ret, gen_math_ops.cast(keep_mask, x_dtype))
+ if not is_executing_eagerly:
+ ret.set_shape(x.get_shape())
+ return ret
else:
- rate = ops.convert_to_tensor(
- rate, dtype=x.dtype, name="rate")
- rate.get_shape().assert_has_rank(0)
+ x = ops.convert_to_tensor(x, name="x")
+ if not x.dtype.is_floating:
+ raise ValueError("x has to be a floating point tensor since it will "
+ "be scaled. Got a %s tensor instead." % x.dtype)
+ if isinstance(rate, numbers.Real):
+ if not (rate >= 0 and rate < 1):
+ raise ValueError("rate must be a scalar tensor or a float in the "
+ "range [0, 1), got %g" % rate)
+ if rate > 0.5:
+ logging.log_first_n(
+ logging.WARN, "Large dropout rate: %g (>0.5). In TensorFlow "
+ "2.x, dropout() uses dropout rate instead of keep_prob. "
+ "Please ensure that this is intended.", 5, rate)
- # Do nothing if we know rate == 0
- if tensor_util.constant_value(rate) == 0:
+ # Early return if nothing needs to be dropped.
+ if isinstance(rate, numbers.Real) and rate == 0:
return x
+ if context.executing_eagerly():
+ if isinstance(rate, ops.EagerTensor):
+ if rate.numpy() == 0:
+ return x
+ else:
+ rate = ops.convert_to_tensor(rate, dtype=x.dtype, name="rate")
+ rate.get_shape().assert_has_rank(0)
- noise_shape = _get_noise_shape(x, noise_shape)
- # Sample a uniform distribution on [0.0, 1.0) and select values larger than
- # rate.
- #
- # NOTE: Random uniform actually can only generate 2^23 floats on [1.0, 2.0)
- # and subtract 1.0.
- random_tensor = random_ops.random_uniform(
- noise_shape, seed=seed, dtype=x.dtype)
- keep_prob = 1 - rate
- scale = 1 / keep_prob
- # NOTE: if (1.0 + rate) - 1 is equal to rate, then we want to consider that
- # float to be selected, hence we use a >= comparison.
- keep_mask = random_tensor >= rate
- ret = x * scale * math_ops.cast(keep_mask, x.dtype)
- if not context.executing_eagerly():
- ret.set_shape(x.get_shape())
- return ret
+ # Do nothing if we know rate == 0
+ if tensor_util.constant_value(rate) == 0:
+ return x
+
+ noise_shape = _get_noise_shape(x, noise_shape)
+ # Sample a uniform distribution on [0.0, 1.0) and select values larger
+ # than rate.
+ #
+ # NOTE: Random uniform can only generate 2^23 floats on [1.0, 2.0)
+ # and subtract 1.0.
+ random_tensor = random_ops.random_uniform(
+ noise_shape, seed=seed, dtype=x.dtype)
+ keep_prob = 1 - rate
+ scale = 1 / keep_prob
+ # NOTE: if (1.0 + rate) - 1 is equal to rate, then that
+ # float is selected, hence we use a >= comparison.
+ keep_mask = random_tensor >= rate
+ ret = x * scale * math_ops.cast(keep_mask, x.dtype)
+ if not context.executing_eagerly():
+ ret.set_shape(x.get_shape())
+ return ret
@tf_export("math.top_k", "nn.top_k")
diff --git a/tensorflow/python/ops/nn_test.py b/tensorflow/python/ops/nn_test.py
index e2389e7..53efadc 100644
--- a/tensorflow/python/ops/nn_test.py
+++ b/tensorflow/python/ops/nn_test.py
@@ -474,6 +474,10 @@
t = constant_op.constant(1.0, shape=[x_dim, y_dim], dtype=dtypes.float32)
_ = nn_ops.dropout_v2(t, 0.9)
+ def testVariableRef(self):
+ x = variable_scope.get_variable("x", shape=[10, 10], dtype=dtypes.float32)
+ _ = nn_ops.dropout(x, keep_prob=0.1)
+
@test_util.run_deprecated_v1
def testShapedDropoutShapeError(self):
# Runs shaped dropout and verifies an error is thrown on misshapen noise.
@@ -496,13 +500,13 @@
_ = nn_ops.dropout(t, rate=(1 - keep_prob), noise_shape=[x_dim, 1])
_ = nn_ops.dropout(t, rate=(1 - keep_prob), noise_shape=[1, 1])
- def testNoDropoutFast(self):
+ def testNoDropout(self):
x = array_ops.zeros((5,))
y = nn_ops.dropout(x, rate=0)
- self.assertTrue(x is y)
+ self.assertAllEqual(x, y)
y = nn_ops.dropout_v2(x, rate=0)
- self.assertTrue(x is y)
+ self.assertAllEqual(x, y)
def testDropoutWithIntegerInputs(self):
x = constant_op.constant([1, 1, 1, 1, 1])
diff --git a/tensorflow/python/ops/ragged/BUILD b/tensorflow/python/ops/ragged/BUILD
index 0ab663d..010a1a4 100644
--- a/tensorflow/python/ops/ragged/BUILD
+++ b/tensorflow/python/ops/ragged/BUILD
@@ -6,6 +6,7 @@
"//intelligence/datum/prensor:__pkg__",
"//learning/brain/contrib/text:__pkg__",
"//nlp/nlx/bert:__pkg__",
+ "//nlp/nlx/infrastructure/multiscale:__subpackages__",
"//nlp/projects/atc/tf/ops:__pkg__",
"//research/socrates:__subpackages__",
"//tensorflow:internal",
@@ -433,7 +434,7 @@
size = "medium",
timeout = "long",
srcs = ["ragged_tensor_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
shard_count = 4,
srcs_version = "PY2AND3",
tags = [
@@ -463,7 +464,7 @@
name = "ragged_eager_test",
size = "medium",
srcs = ["ragged_eager_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_factory_ops",
@@ -477,7 +478,7 @@
py_test(
name = "ragged_range_op_test",
srcs = ["ragged_range_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_math_ops",
@@ -490,7 +491,7 @@
py_test(
name = "ragged_tensor_bounding_shape_op_test",
srcs = ["ragged_tensor_bounding_shape_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_factory_ops",
@@ -503,7 +504,7 @@
py_test(
name = "ragged_row_lengths_op_test",
srcs = ["ragged_row_lengths_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_factory_ops",
@@ -518,7 +519,7 @@
py_test(
name = "ragged_gather_op_test",
srcs = ["ragged_gather_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_factory_ops",
@@ -537,7 +538,7 @@
py_test(
name = "ragged_batch_gather_op_test",
srcs = ["ragged_batch_gather_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_batch_gather_ops",
@@ -558,7 +559,7 @@
py_test(
name = "ragged_gather_nd_op_test",
srcs = ["ragged_gather_nd_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_factory_ops",
@@ -577,7 +578,7 @@
py_test(
name = "ragged_row_splits_to_segment_ids_op_test",
srcs = ["ragged_row_splits_to_segment_ids_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":segment_id_ops",
@@ -590,7 +591,7 @@
py_test(
name = "ragged_segment_ids_to_row_splits_op_test",
srcs = ["ragged_segment_ids_to_row_splits_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":segment_id_ops",
@@ -603,7 +604,7 @@
py_test(
name = "ragged_from_tensor_op_test",
srcs = ["ragged_from_tensor_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_tensor",
@@ -619,7 +620,7 @@
py_test(
name = "ragged_to_sparse_op_test",
srcs = ["ragged_to_sparse_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = [
"no_windows",
@@ -643,7 +644,7 @@
py_test(
name = "ragged_from_sparse_op_test",
srcs = ["ragged_from_sparse_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_tensor",
@@ -660,7 +661,7 @@
py_test(
name = "ragged_to_tensor_op_test",
srcs = ["ragged_to_tensor_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_factory_ops",
@@ -680,7 +681,7 @@
py_test(
name = "ragged_segment_op_test",
srcs = ["ragged_segment_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_factory_ops",
@@ -698,7 +699,7 @@
py_test(
name = "ragged_reduce_op_test",
srcs = ["ragged_reduce_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_factory_ops",
@@ -717,7 +718,7 @@
py_test(
name = "ragged_map_flat_values_op_test",
srcs = ["ragged_map_flat_values_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_factory_ops",
@@ -736,7 +737,7 @@
py_test(
name = "ragged_const_op_test",
srcs = ["ragged_const_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged",
@@ -772,7 +773,7 @@
py_test(
name = "ragged_constant_value_op_test",
srcs = ["ragged_constant_value_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = [
"no_windows",
@@ -790,7 +791,7 @@
py_test(
name = "convert_to_tensor_or_ragged_tensor_op_test",
srcs = ["convert_to_tensor_or_ragged_tensor_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_factory_ops",
@@ -807,7 +808,7 @@
py_test(
name = "ragged_boolean_mask_op_test",
srcs = ["ragged_boolean_mask_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_array_ops",
@@ -825,7 +826,7 @@
py_test(
name = "ragged_concat_op_test",
srcs = ["ragged_concat_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_concat_ops",
@@ -844,7 +845,7 @@
py_test(
name = "ragged_stack_op_test",
srcs = ["ragged_stack_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_concat_ops",
@@ -859,7 +860,7 @@
py_test(
name = "ragged_rank_op_test",
srcs = ["ragged_rank_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_array_ops",
@@ -873,7 +874,7 @@
py_test(
name = "ragged_tile_op_test",
srcs = ["ragged_tile_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_array_ops",
@@ -890,7 +891,7 @@
py_test(
name = "ragged_util_test",
srcs = ["ragged_util_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_util",
@@ -906,7 +907,7 @@
py_test(
name = "ragged_expand_dims_op_test",
srcs = ["ragged_expand_dims_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_array_ops",
@@ -920,7 +921,7 @@
py_test(
name = "ragged_where_op_test",
srcs = ["ragged_where_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_factory_ops",
@@ -934,7 +935,7 @@
py_test(
name = "ragged_dispatch_test",
srcs = ["ragged_dispatch_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged", # fixdeps: keep
@@ -961,7 +962,7 @@
py_test(
name = "ragged_operators_test",
srcs = ["ragged_operators_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged", # fixdeps: keep
@@ -975,7 +976,7 @@
name = "ragged_map_fn_op_test",
size = "small",
srcs = ["ragged_map_fn_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged", # fixdeps: keep
@@ -1000,7 +1001,7 @@
py_test(
name = "ragged_tensor_shape_test",
srcs = ["ragged_tensor_shape_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged", # fixdeps: keep
@@ -1018,7 +1019,7 @@
py_test(
name = "ragged_size_op_test",
srcs = ["ragged_size_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_array_ops",
@@ -1032,7 +1033,7 @@
py_test(
name = "ragged_placeholder_op_test",
srcs = ["ragged_placeholder_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_factory_ops",
@@ -1047,7 +1048,7 @@
py_test(
name = "ragged_squeeze_op_test",
srcs = ["ragged_squeeze_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_conversion_ops",
@@ -1100,7 +1101,7 @@
name = "string_ngrams_op_test",
size = "small",
srcs = ["string_ngrams_op_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":ragged_string_ops",
diff --git a/tensorflow/python/ops/script_ops.py b/tensorflow/python/ops/script_ops.py
index 7639797..8463ffb 100644
--- a/tensorflow/python/ops/script_ops.py
+++ b/tensorflow/python/ops/script_ops.py
@@ -19,7 +19,6 @@
from __future__ import division
from __future__ import print_function
-import inspect
import threading
# Used by py_util.cc to get tracebacks.
@@ -42,6 +41,7 @@
from tensorflow.python.util import deprecation
from tensorflow.python.util import lazy_loader
from tensorflow.python.util import nest
+from tensorflow.python.util import tf_inspect
from tensorflow.python.util.tf_export import tf_export
autograph = lazy_loader.LazyLoader(
@@ -285,20 +285,6 @@
original_func = func
func = autograph.do_not_convert(func)
- # Tying the registered function's lifetime with the current default graph is
- # not reliable. For example, Estimator-based binaries may switch graphs in
- # between model training end evaluation, via saved_model. Those binaries work
- # because the original function is global, and break once the registered
- # function is an anonymous lambda, like the one produced by do_not_convert.
- # To avoid breaking those cases, we attach the wrapper to the original
- # function so that their lifetime is connected.
- # TODO(b/144286616): Remove this.
- if inspect.isfunction(original_func):
- # Note: this check is needed because original_func may be a descriptor
- # (https://docs.python.org/3/howto/descriptor.html)
- # and we can't attach attributes to those.
- original_func.ag_dnc_wrapper__ = func
-
is_list_or_tuple = False
if isinstance(Tout, (list, tuple)):
is_list_or_tuple = True
@@ -308,6 +294,20 @@
if eager:
func = EagerFunc(func, Tout, is_grad_func)
+ # Tying the registered function's lifetime with the current default graph is
+ # not reliable. For example, Estimator-based binaries may switch graphs in
+ # between model training end evaluation, via saved_model. Those binaries work
+ # because the original function is global, and break once the registered
+ # function is an anonymous lambda, like the one produced by do_not_convert.
+ # To avoid breaking those cases, we attach the wrapper to the original
+ # function so that their lifetime is connected.
+ # TODO(b/144286616): Remove this.
+ if tf_inspect.isfunction(original_func):
+ # Note: this check is needed because original_func may be a descriptor
+ # (https://docs.python.org/3/howto/descriptor.html)
+ # and we can't attach attributes to those.
+ original_func.ag_dnc_wrapper__ = func
+
token = _py_funcs.insert(func)
# We tie the registered function's lifetime with the current default graph,
# i.e., when the current graph is destroyed, we remove its py funcs.
diff --git a/tensorflow/python/ops/structured/BUILD b/tensorflow/python/ops/structured/BUILD
index 21ce94a..a45496a 100644
--- a/tensorflow/python/ops/structured/BUILD
+++ b/tensorflow/python/ops/structured/BUILD
@@ -34,6 +34,7 @@
py_test(
name = "structured_tensor_test",
srcs = ["structured_tensor_test.py"],
+ python_version = "PY3",
deps = [
":structured_tensor",
"//tensorflow/python:framework_ops",
@@ -49,6 +50,7 @@
py_test(
name = "structured_tensor_spec_test",
srcs = ["structured_tensor_spec_test.py"],
+ python_version = "PY3",
deps = [
":structured_tensor",
"//tensorflow/python:framework_ops",
@@ -64,6 +66,7 @@
py_test(
name = "structured_tensor_slice_test",
srcs = ["structured_tensor_slice_test.py"],
+ python_version = "PY3",
deps = [
":structured_tensor",
"//tensorflow/python:framework_ops",
diff --git a/tensorflow/python/profiler/BUILD b/tensorflow/python/profiler/BUILD
index 616f3ff..4853d0e 100644
--- a/tensorflow/python/profiler/BUILD
+++ b/tensorflow/python/profiler/BUILD
@@ -57,6 +57,7 @@
"//tensorflow/python:platform",
"//tensorflow/python:variables",
],
+ python_version = "PY3",
tags = [
"no_pip",
"notap",
@@ -77,6 +78,7 @@
"//tensorflow/python/profiler/internal:model_analyzer_testlib",
"//tensorflow/python:variables",
],
+ python_version = "PY3",
tags = ["no_pip"],
xla_enable_strict_auto_jit = False, # Node names are different with autojit
)
@@ -108,6 +110,7 @@
"//tensorflow/python:framework_for_generated_wrappers",
"//tensorflow/python:math_ops",
],
+ python_version = "PY3",
)
py_library(
@@ -132,6 +135,7 @@
"//tensorflow/python/profiler/internal:model_analyzer_testlib",
"//tensorflow/python:variables",
],
+ python_version = "PY3",
tags = [
"no_gpu", # b/136036359
"no_pip",
@@ -151,7 +155,7 @@
size = "small",
srcs = ["pprof_profiler_test.py"],
main = "pprof_profiler_test.py",
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = ["no_pip"], # TODO(annarev): get it working with pip.
deps = [
diff --git a/tensorflow/python/profiler/internal/BUILD b/tensorflow/python/profiler/internal/BUILD
index 5225bff..c414b7a 100644
--- a/tensorflow/python/profiler/internal/BUILD
+++ b/tensorflow/python/profiler/internal/BUILD
@@ -40,7 +40,7 @@
py_test(
name = "print_model_analysis_test",
srcs = ["print_model_analysis_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
"//tensorflow/core/profiler:protos_all_py",
@@ -67,6 +67,7 @@
"//tensorflow/python/profiler:model_analyzer",
"//tensorflow/python:random_ops",
],
+ python_version = "PY3",
tags = [
"no_gpu", # b/138442728
"no_pip",
diff --git a/tensorflow/python/saved_model/model_utils/BUILD b/tensorflow/python/saved_model/model_utils/BUILD
index 58d7456..8aab121 100644
--- a/tensorflow/python/saved_model/model_utils/BUILD
+++ b/tensorflow/python/saved_model/model_utils/BUILD
@@ -50,7 +50,7 @@
py_test(
name = "export_output_test",
srcs = ["export_output_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":export_output",
@@ -85,7 +85,7 @@
py_test(
name = "export_test",
srcs = ["export_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":export_utils",
@@ -112,7 +112,7 @@
py_test(
name = "mode_keys_test",
srcs = ["mode_keys_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":mode_keys",
diff --git a/tensorflow/python/tools/api/generator/BUILD b/tensorflow/python/tools/api/generator/BUILD
index d31e726..664d368 100644
--- a/tensorflow/python/tools/api/generator/BUILD
+++ b/tensorflow/python/tools/api/generator/BUILD
@@ -45,7 +45,7 @@
"create_python_api.py",
"create_python_api_test.py",
],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":doc_srcs",
@@ -62,7 +62,7 @@
"--api_name=tensorflow",
] + KERAS_API_INIT_FILES + KERAS_API_INIT_FILES_V1 + TENSORFLOW_API_INIT_FILES + TENSORFLOW_API_INIT_FILES_V1,
main = "doc_srcs_test.py",
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
deps = [
":doc_srcs",
@@ -78,7 +78,7 @@
"api_init_files.bzl",
"api_init_files_v1.bzl",
],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = [
"no_pip",
diff --git a/tensorflow/python/tpu/tpu_embedding.py b/tensorflow/python/tpu/tpu_embedding.py
index 5b61abc..7648369 100644
--- a/tensorflow/python/tpu/tpu_embedding.py
+++ b/tensorflow/python/tpu/tpu_embedding.py
@@ -32,6 +32,7 @@
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import init_ops
+from tensorflow.python.ops import math_ops
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
@@ -49,7 +50,7 @@
class TableConfig(
collections.namedtuple('TableConfig', [
'vocabulary_size', 'dimension', 'initializer', 'combiner',
- 'hot_id_replication', 'learning_rate', 'learning_rate_key'
+ 'hot_id_replication', 'learning_rate', 'learning_rate_fn'
])):
"""Embedding table configuration."""
@@ -60,7 +61,7 @@
combiner='mean',
hot_id_replication=False,
learning_rate=None,
- learning_rate_key=None):
+ learning_rate_fn=None):
"""Embedding table configuration.
Args:
@@ -79,17 +80,16 @@
hot_id_replication: If true, enables hot id replication, which can make
embedding lookups faster if there are some hot rows in the table.
learning_rate: float, static learning rate for this table. If
- learning_rate and learning_rate_key are both `None`, global
+ learning_rate and learning_rate_fn are both `None`, global
static learning rate as specified in `optimization_parameters` in
- `TPUEmbedding` constructor will be used. `learning_rate_key` must be
+ `TPUEmbedding` constructor will be used. `learning_rate_fn` must be
`None` if `learning_rate` is not `None.
- learning_rate_key: string, use dynamic learning rate of
- `learning_rates[learning_rate_key]` for this table, where
- `learning_rates` is the second argument of
- `generate_send_gradients_op()`. If learning_rate and learning_rate_key
- are both `None`, global static learning rate as specified in
- `optimization_parameters` in `TPUEmbedding` constructor will be used.
- `learning_rate` must be `None` if `learning_rate_key` is not `None.
+ learning_rate_fn: string, use dynamic learning rate given by the function.
+ This function function will be passed the current global step. If
+ learning_rate and learning_rate_fn are both `None`, global static
+ learning rate as specified in `optimization_parameters` in
+ `TPUEmbedding` constructor will be used. `learning_rate` must be `None`
+ if `learning_rate_fn` is not `None.
Returns:
`TableConfig`.
@@ -99,7 +99,7 @@
ValueError: if `dimension` is not positive integer.
ValueError: if `initializer` is specified and is not callable.
ValueError: if `combiner` is not supported.
- ValueError: if `learning_rate` and `learning_rate_key` are both not
+ ValueError: if `learning_rate` and `learning_rate_fn` are both not
`None`.
"""
if not isinstance(vocabulary_size, int) or vocabulary_size < 1:
@@ -117,14 +117,14 @@
if combiner not in ('mean', 'sum', 'sqrtn', None):
raise ValueError('Invalid combiner {}'.format(combiner))
- if learning_rate is not None and learning_rate_key is not None:
- raise ValueError('At most one of learning_rate and learning_rate_key '
+ if learning_rate is not None and learning_rate_fn is not None:
+ raise ValueError('At most one of learning_rate and learning_rate_fn '
'can be None; got {} and {}'
- .format(learning_rate, learning_rate_key))
+ .format(learning_rate, learning_rate_fn))
return super(TableConfig, cls).__new__(
cls, vocabulary_size, dimension, initializer, combiner,
- hot_id_replication, learning_rate, learning_rate_key)
+ hot_id_replication, learning_rate, learning_rate_fn)
class FeatureConfig(
@@ -694,6 +694,11 @@
self._optimization_parameters)
self._pipeline_execution_with_tensor_core = (
pipeline_execution_with_tensor_core)
+ self._learning_rate_fn = list(set(
+ c.learning_rate_fn for c in self._table_to_config_dict.values()
+ if c.learning_rate_fn is not None))
+ self._learning_rate_fn_to_tag = {
+ fn: id for id, fn in enumerate(self._learning_rate_fn)}
self._config_proto = self._create_config_proto()
@@ -767,10 +772,6 @@
def _create_config_proto(self):
"""Create `TPUEmbeddingConfiguration`."""
- self._learning_rate_keys = list(
- set(c.learning_rate_key
- for c in self._table_to_config_dict.values()
- if c.learning_rate_key is not None))
config_proto = elc.TPUEmbeddingConfiguration()
for table in self._table_to_config_dict:
table_descriptor = config_proto.table_descriptor.add()
@@ -788,9 +789,9 @@
parameters = table_descriptor.optimization_parameters
if table_config.learning_rate:
parameters.learning_rate.constant = (table_config.learning_rate)
- elif table_config.learning_rate_key:
+ elif table_config.learning_rate_fn:
parameters.learning_rate.dynamic.tag = (
- self._learning_rate_keys.index(table_config.learning_rate_key))
+ self._learning_rate_fn_to_tag[table_config.learning_rate_fn])
else:
parameters.learning_rate.constant = (
self._optimization_parameters.learning_rate)
@@ -1097,14 +1098,13 @@
def generate_send_gradients_op(self,
feature_to_gradient_dict,
- learning_rates=None):
+ step=None):
"""Send gradient to TPU embedding.
Args:
feature_to_gradient_dict: dict mapping feature names to gradient wrt
activations.
- learning_rates: dict mapping from learning rate key to dynamic learning
- rate. Defaults to `None`.
+ step: the current global step, used for dynamic learning rate.
Returns:
SendTPUEmbeddingGradients Op.
@@ -1116,9 +1116,8 @@
raise RuntimeError('Only in training mode gradients need to '
'be sent to TPU embedding; got mode {}.'
.format(self._mode))
-
- if learning_rates is None:
- learning_rates = dict()
+ if step is None and self._learning_rate_fn:
+ raise ValueError('There are dynamic learning rates but step is None.')
gradients = []
for table in self._table_to_features_dict:
@@ -1137,9 +1136,8 @@
return tpu_ops.send_tpu_embedding_gradients(
inputs=gradients,
- learning_rates=[
- learning_rates[tag] for tag in self._learning_rate_keys
- ],
+ learning_rates=[math_ops.cast(fn(step), dtype=dtypes.float32)
+ for fn in self._learning_rate_fn],
config=self.config_proto.SerializeToString())
diff --git a/tensorflow/python/training/supervisor.py b/tensorflow/python/training/supervisor.py
index 91960cb..07b5298 100644
--- a/tensorflow/python/training/supervisor.py
+++ b/tensorflow/python/training/supervisor.py
@@ -306,7 +306,7 @@
@end_compatibility
"""
if context.executing_eagerly():
- raise RuntimeError("Supervisors are compatible with eager execution.")
+ raise RuntimeError("Supervisors are incompatible with eager execution.")
# Set default values of arguments.
if graph is None:
graph = ops.get_default_graph()
diff --git a/tensorflow/python/training/tracking/BUILD b/tensorflow/python/training/tracking/BUILD
index ce8e9af..1b26622 100644
--- a/tensorflow/python/training/tracking/BUILD
+++ b/tensorflow/python/training/tracking/BUILD
@@ -255,7 +255,6 @@
"//tensorflow/python/keras:layers",
],
tags = [
- "no_windows", # b/124401331
"notsan", # b/74395663
],
)
diff --git a/tensorflow/stream_executor/lib/statusor.h b/tensorflow/stream_executor/lib/statusor.h
index 272cda1..7ba0954 100644
--- a/tensorflow/stream_executor/lib/statusor.h
+++ b/tensorflow/stream_executor/lib/statusor.h
@@ -273,7 +273,9 @@
}
template <typename T>
Status StatusOr<T>::status() && {
- return ok() ? Status::OK() : std::move(this->status_);
+ // Note that we copy instead of moving the status here so that
+ // ~StatusOrData() can call ok() without invoking UB.
+ return ok() ? Status::OK() : this->status_;
}
template <typename T>
diff --git a/tensorflow/stream_executor/lib/statusor_internals.h b/tensorflow/stream_executor/lib/statusor_internals.h
index a159da5..d3a6026 100644
--- a/tensorflow/stream_executor/lib/statusor_internals.h
+++ b/tensorflow/stream_executor/lib/statusor_internals.h
@@ -67,7 +67,7 @@
MakeValue(std::move(other.data_));
MakeStatus();
} else {
- MakeStatus(std::move(other.status_));
+ MakeStatus(other.status_);
}
}
@@ -87,7 +87,7 @@
MakeValue(std::move(other.data_));
MakeStatus();
} else {
- MakeStatus(std::move(other.status_));
+ MakeStatus(other.status_);
}
}
@@ -156,7 +156,9 @@
void Assign(Status&& status) {
Clear();
- status_ = std::move(status);
+ // Note that we copy instead of moving the status here so that
+ // status.~StatusOrData() can call ok() without invoking UB.
+ status_ = status;
EnsureNotOk();
}
diff --git a/tensorflow/tensorflow.bzl b/tensorflow/tensorflow.bzl
index 2376f0d..72257bf 100644
--- a/tensorflow/tensorflow.bzl
+++ b/tensorflow/tensorflow.bzl
@@ -2153,7 +2153,7 @@
xla_enable_strict_auto_jit = False,
xla_enabled = False,
grpc_enabled = False,
- python_version = "PY2"):
+ **kwargs):
# TODO(b/122522101): Don't ignore xla_enable_strict_auto_jit and enable additional
# XLA tests once enough compute resources are available.
_ignored = [xla_enable_strict_auto_jit]
@@ -2180,7 +2180,7 @@
tags = test_tags,
xla_enabled = xla_enabled,
xla_enable_strict_auto_jit = False,
- python_version = python_version,
+ **kwargs
)
register_extension_info(
@@ -2246,7 +2246,7 @@
xla_enable_strict_auto_jit = False,
xla_enabled = False,
grpc_enabled = False,
- python_version = "PY2"):
+ **kwargs):
for src in srcs:
test_name = src.split("/")[-1].split(".")[0]
if prefix:
@@ -2264,7 +2264,7 @@
tags = tags,
xla_enabled = xla_enabled,
xla_enable_strict_auto_jit = xla_enable_strict_auto_jit,
- python_version = python_version,
+ **kwargs
)
def gpu_py_tests(
@@ -2280,7 +2280,7 @@
xla_enable_strict_auto_jit = False,
xla_enabled = False,
grpc_enabled = False,
- python_version = "PY2"):
+ **kwargs):
# TODO(b/122522101): Don't ignore xla_enable_strict_auto_jit and enable additional
# XLA tests once enough compute resources are available.
_ignored = [xla_enable_strict_auto_jit]
@@ -2298,7 +2298,7 @@
tags = test_tags,
xla_enabled = xla_enabled,
xla_enable_strict_auto_jit = False,
- python_version = python_version,
+ **kwargs
)
# terminology changes: saving cuda_* definition for compatibility
diff --git a/tensorflow/tools/api/golden/v1/tensorflow.keras.-model.pbtxt b/tensorflow/tools/api/golden/v1/tensorflow.keras.-model.pbtxt
index f5fa3f6..7df20bf 100644
--- a/tensorflow/tools/api/golden/v1/tensorflow.keras.-model.pbtxt
+++ b/tensorflow/tools/api/golden/v1/tensorflow.keras.-model.pbtxt
@@ -6,6 +6,7 @@
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.training.tracking.tracking.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
+ is_instance: "<class \'tensorflow.python.keras.utils.version_utils.VersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
diff --git a/tensorflow/tools/api/golden/v1/tensorflow.keras.-sequential.pbtxt b/tensorflow/tools/api/golden/v1/tensorflow.keras.-sequential.pbtxt
index 86d218a..2b46b14 100644
--- a/tensorflow/tools/api/golden/v1/tensorflow.keras.-sequential.pbtxt
+++ b/tensorflow/tools/api/golden/v1/tensorflow.keras.-sequential.pbtxt
@@ -7,6 +7,7 @@
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.training.tracking.tracking.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
+ is_instance: "<class \'tensorflow.python.keras.utils.version_utils.VersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
diff --git a/tensorflow/tools/api/golden/v1/tensorflow.keras.experimental.-linear-model.pbtxt b/tensorflow/tools/api/golden/v1/tensorflow.keras.experimental.-linear-model.pbtxt
index aa207e3..4862a93 100644
--- a/tensorflow/tools/api/golden/v1/tensorflow.keras.experimental.-linear-model.pbtxt
+++ b/tensorflow/tools/api/golden/v1/tensorflow.keras.experimental.-linear-model.pbtxt
@@ -7,6 +7,7 @@
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.training.tracking.tracking.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
+ is_instance: "<class \'tensorflow.python.keras.utils.version_utils.VersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
diff --git a/tensorflow/tools/api/golden/v1/tensorflow.keras.experimental.-wide-deep-model.pbtxt b/tensorflow/tools/api/golden/v1/tensorflow.keras.experimental.-wide-deep-model.pbtxt
index 67479e4..45edc2e 100644
--- a/tensorflow/tools/api/golden/v1/tensorflow.keras.experimental.-wide-deep-model.pbtxt
+++ b/tensorflow/tools/api/golden/v1/tensorflow.keras.experimental.-wide-deep-model.pbtxt
@@ -7,6 +7,7 @@
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.training.tracking.tracking.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
+ is_instance: "<class \'tensorflow.python.keras.utils.version_utils.VersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
diff --git a/tensorflow/tools/api/golden/v1/tensorflow.keras.models.-model.pbtxt b/tensorflow/tools/api/golden/v1/tensorflow.keras.models.-model.pbtxt
index 15c5ba4..6fb8f38 100644
--- a/tensorflow/tools/api/golden/v1/tensorflow.keras.models.-model.pbtxt
+++ b/tensorflow/tools/api/golden/v1/tensorflow.keras.models.-model.pbtxt
@@ -6,6 +6,7 @@
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.training.tracking.tracking.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
+ is_instance: "<class \'tensorflow.python.keras.utils.version_utils.VersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
diff --git a/tensorflow/tools/api/golden/v1/tensorflow.keras.models.-sequential.pbtxt b/tensorflow/tools/api/golden/v1/tensorflow.keras.models.-sequential.pbtxt
index ad5b3fe..069025c 100644
--- a/tensorflow/tools/api/golden/v1/tensorflow.keras.models.-sequential.pbtxt
+++ b/tensorflow/tools/api/golden/v1/tensorflow.keras.models.-sequential.pbtxt
@@ -7,6 +7,7 @@
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.training.tracking.tracking.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
+ is_instance: "<class \'tensorflow.python.keras.utils.version_utils.VersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
diff --git a/tensorflow/tools/api/golden/v1/tensorflow.math.pbtxt b/tensorflow/tools/api/golden/v1/tensorflow.math.pbtxt
index bf7812a..c904681 100644
--- a/tensorflow/tools/api/golden/v1/tensorflow.math.pbtxt
+++ b/tensorflow/tools/api/golden/v1/tensorflow.math.pbtxt
@@ -141,6 +141,10 @@
argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
+ name: "erfinv"
+ argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
+ }
+ member_method {
name: "exp"
argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
@@ -281,6 +285,10 @@
argspec: "args=[\'x\', \'y\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
+ name: "ndtri"
+ argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
+ }
+ member_method {
name: "negative"
argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
diff --git a/tensorflow/tools/api/golden/v1/tensorflow.pbtxt b/tensorflow/tools/api/golden/v1/tensorflow.pbtxt
index 2883f6d..75fcf74 100644
--- a/tensorflow/tools/api/golden/v1/tensorflow.pbtxt
+++ b/tensorflow/tools/api/golden/v1/tensorflow.pbtxt
@@ -1237,10 +1237,6 @@
argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
- name: "erfinv"
- argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
- }
- member_method {
name: "executing_eagerly"
argspec: "args=[], varargs=None, keywords=None, defaults=None"
}
@@ -1717,10 +1713,6 @@
argspec: "args=[\'x\', \'y\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
- name: "ndtri"
- argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
- }
- member_method {
name: "negative"
argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
diff --git a/tensorflow/tools/api/golden/v2/tensorflow.keras.-model.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.keras.-model.pbtxt
index f5fa3f6..7df20bf 100644
--- a/tensorflow/tools/api/golden/v2/tensorflow.keras.-model.pbtxt
+++ b/tensorflow/tools/api/golden/v2/tensorflow.keras.-model.pbtxt
@@ -6,6 +6,7 @@
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.training.tracking.tracking.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
+ is_instance: "<class \'tensorflow.python.keras.utils.version_utils.VersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
diff --git a/tensorflow/tools/api/golden/v2/tensorflow.keras.-sequential.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.keras.-sequential.pbtxt
index 86d218a..2b46b14 100644
--- a/tensorflow/tools/api/golden/v2/tensorflow.keras.-sequential.pbtxt
+++ b/tensorflow/tools/api/golden/v2/tensorflow.keras.-sequential.pbtxt
@@ -7,6 +7,7 @@
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.training.tracking.tracking.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
+ is_instance: "<class \'tensorflow.python.keras.utils.version_utils.VersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
diff --git a/tensorflow/tools/api/golden/v2/tensorflow.keras.experimental.-linear-model.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.keras.experimental.-linear-model.pbtxt
index aa207e3..4862a93 100644
--- a/tensorflow/tools/api/golden/v2/tensorflow.keras.experimental.-linear-model.pbtxt
+++ b/tensorflow/tools/api/golden/v2/tensorflow.keras.experimental.-linear-model.pbtxt
@@ -7,6 +7,7 @@
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.training.tracking.tracking.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
+ is_instance: "<class \'tensorflow.python.keras.utils.version_utils.VersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
diff --git a/tensorflow/tools/api/golden/v2/tensorflow.keras.experimental.-wide-deep-model.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.keras.experimental.-wide-deep-model.pbtxt
index 67479e4..45edc2e 100644
--- a/tensorflow/tools/api/golden/v2/tensorflow.keras.experimental.-wide-deep-model.pbtxt
+++ b/tensorflow/tools/api/golden/v2/tensorflow.keras.experimental.-wide-deep-model.pbtxt
@@ -7,6 +7,7 @@
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.training.tracking.tracking.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
+ is_instance: "<class \'tensorflow.python.keras.utils.version_utils.VersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
diff --git a/tensorflow/tools/api/golden/v2/tensorflow.keras.models.-model.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.keras.models.-model.pbtxt
index 15c5ba4..6fb8f38 100644
--- a/tensorflow/tools/api/golden/v2/tensorflow.keras.models.-model.pbtxt
+++ b/tensorflow/tools/api/golden/v2/tensorflow.keras.models.-model.pbtxt
@@ -6,6 +6,7 @@
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.training.tracking.tracking.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
+ is_instance: "<class \'tensorflow.python.keras.utils.version_utils.VersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
diff --git a/tensorflow/tools/api/golden/v2/tensorflow.keras.models.-sequential.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.keras.models.-sequential.pbtxt
index ad5b3fe..069025c 100644
--- a/tensorflow/tools/api/golden/v2/tensorflow.keras.models.-sequential.pbtxt
+++ b/tensorflow/tools/api/golden/v2/tensorflow.keras.models.-sequential.pbtxt
@@ -7,6 +7,7 @@
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.training.tracking.tracking.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
+ is_instance: "<class \'tensorflow.python.keras.utils.version_utils.VersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
diff --git a/tensorflow/tools/api/golden/v2/tensorflow.math.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.math.pbtxt
index 82688f5..2ec2ab2 100644
--- a/tensorflow/tools/api/golden/v2/tensorflow.math.pbtxt
+++ b/tensorflow/tools/api/golden/v2/tensorflow.math.pbtxt
@@ -141,6 +141,10 @@
argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
+ name: "erfinv"
+ argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
+ }
+ member_method {
name: "exp"
argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
@@ -281,6 +285,10 @@
argspec: "args=[\'x\', \'y\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
+ name: "ndtri"
+ argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
+ }
+ member_method {
name: "negative"
argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
diff --git a/tensorflow/tools/api/golden/v2/tensorflow.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.pbtxt
index d67870a..7cf14d6 100644
--- a/tensorflow/tools/api/golden/v2/tensorflow.pbtxt
+++ b/tensorflow/tools/api/golden/v2/tensorflow.pbtxt
@@ -625,10 +625,6 @@
argspec: "args=[\'x\', \'y\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
- name: "erfinv"
- argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
- }
- member_method {
name: "executing_eagerly"
argspec: "args=[], varargs=None, keywords=None, defaults=None"
}
@@ -813,10 +809,6 @@
argspec: "args=[\'x\', \'y\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
- name: "ndtri"
- argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
- }
- member_method {
name: "negative"
argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
diff --git a/tensorflow/tools/ci_build/Dockerfile.rocm b/tensorflow/tools/ci_build/Dockerfile.rocm
index 191947d..a083bc6 100644
--- a/tensorflow/tools/ci_build/Dockerfile.rocm
+++ b/tensorflow/tools/ci_build/Dockerfile.rocm
@@ -3,7 +3,7 @@
FROM ubuntu:xenial
MAINTAINER Jeff Poznanovic <jeffrey.poznanovic@amd.com>
-ARG DEB_ROCM_REPO=http://repo.radeon.com/rocm/apt/2.6/
+ARG DEB_ROCM_REPO=http://repo.radeon.com/rocm/apt/2.8.0/
ARG ROCM_PATH=/opt/rocm
ENV DEBIAN_FRONTEND noninteractive
diff --git a/tensorflow/tools/ci_build/builds/docker_cpu_pip.sh b/tensorflow/tools/ci_build/builds/docker_cpu_pip.sh
index c87ec29..3bb8d8b 100755
--- a/tensorflow/tools/ci_build/builds/docker_cpu_pip.sh
+++ b/tensorflow/tools/ci_build/builds/docker_cpu_pip.sh
@@ -22,6 +22,18 @@
pip install portpicker
pip install *.whl
+# Make bazel version the same as the env that invokes this script
+rm -rf ~/bazel
+mkdir ~/bazel
+pushd ~/bazel
+wget https://github.com/bazelbuild/bazel/releases/download/"${BAZEL_VERSION}"/bazel-"${BAZEL_VERSION}"-installer-linux-x86_64.sh
+chmod +x bazel-*.sh
+./bazel-"${BAZEL_VERSION}"-installer-linux-x86_64.sh --user
+rm bazel-"${BAZEL_VERSION}"-installer-linux-x86_64.sh
+PATH="/bazel_pip/bin:$PATH"
+popd
+bazel version
+
# Use default configuration
yes "" | python configure.py
diff --git a/tensorflow/tools/ci_build/builds/docker_test.sh b/tensorflow/tools/ci_build/builds/docker_test.sh
index 38891b6..39e119f 100755
--- a/tensorflow/tools/ci_build/builds/docker_test.sh
+++ b/tensorflow/tools/ci_build/builds/docker_test.sh
@@ -109,7 +109,8 @@
libs=$(\ls /usr/lib/x86_64-linux-gnu/libcuda.* | xargs -I{} echo '-v {}:{}')
GPU_EXTRA_PARAMS="${devices} ${libs}"
elif [ "${IMAGE_TYPE}" == "rocm" ]; then
- ROCM_EXTRA_PARAMS="--device=/dev/kfd --device=/dev/dri --group-add video"
+ ROCM_EXTRA_PARAMS="--device=/dev/kfd --device=/dev/dri --group-add video \
+ --cap-add=SYS_PTRACE --security-opt seccomp=unconfined --shm-size 16G"
else
GPU_EXTRA_PARAMS=""
ROCM_EXTRA_PARAMS=""
diff --git a/tensorflow/tools/ci_build/builds/pip_new.sh b/tensorflow/tools/ci_build/builds/pip_new.sh
index 932d3e8..2559dac 100755
--- a/tensorflow/tools/ci_build/builds/pip_new.sh
+++ b/tensorflow/tools/ci_build/builds/pip_new.sh
@@ -60,7 +60,13 @@
# and tensorflow-gpu pip package. Will
# automatically handle adding/removing of _gpu
# suffix depending on what project name was
-# passed.
+# passed. Only work for Ubuntu.
+# TF_BUILD_BOTH_CPU_PACKAGES: (1 | 0)
+# 1 will build both tensorflow (no gpu support)
+# and tensorflow-cpu pip package. Will
+# automatically handle adding/removing of _cpu
+# suffix depending on what project name was
+# passed. Only work for MacOS
#
# To-be-deprecated variable(s).
# GIT_TAG_OVERRIDE: Values for `--git_tag_override`. This flag gets passed
@@ -241,11 +247,13 @@
DEFAULT_PROJECT_NAME="tensorflow"
DEFAULT_PIP_TEST_ROOT="pip_test"
DEFAULT_BUILD_BOTH_GPU_PACKAGES=0
+DEFAULT_BUILD_BOTH_CPU_PACKAGES=0
# Take in optional global variables
PIP_TESTS=${TF_PIP_TESTS:-$DEFAULT_PIP_TESTS}
PROJECT_NAME=${TF_PROJECT_NAME:-$DEFAULT_PROJECT_NAME}
PIP_TEST_ROOT=${TF_PIP_TEST_ROOT:-$DEFAULT_PIP_TEST_ROOT}
BUILD_BOTH_GPU_PACKAGES=${TF_BUILD_BOTH_GPU_PACKAGES:-$DEFAULT_BUILD_BOTH_GPU_PACKAGES}
+BUILD_BOTH_CPU_PACKAGES=${TF_BUILD_BOTH_CPU_PACKAGES:-$DEFAULT_BUILD_BOTH_CPU_PACKAGES}
# Local variables
PIP_WHL_DIR="${KOKORO_ARTIFACTS_DIR}/tensorflow/${PIP_TEST_ROOT}/whl"
@@ -640,20 +648,38 @@
echo "Size of the PIP wheel file built: $(ls -l ${WHL_PATH} | awk '{print $5}')"
# Build the other GPU package.
-if [ "$BUILD_BOTH_GPU_PACKAGES" -eq "1" ]; then
- echo "====================================="\
- "Building the other GPU pip package."
+if [[ "$BUILD_BOTH_GPU_PACKAGES" -eq "1" ]] || [[ "$BUILD_BOTH_CPU_PACKAGES" -eq "1" ]]; then
+
+ if [[ "$BUILD_BOTH_GPU_PACKAGES" -eq "1" ]] && [[ "$BUILD_BOTH_CPU_PACKAGES" -eq "1" ]]; then
+ die "ERROR: TF_BUILD_BOTH_GPU_PACKAGES and TF_BUILD_BOTH_GPU_PACKAGES cannot both be set. No additional package will be built."
+ fi
+
+ echo "====================================="
+ if [[ "$BUILD_BOTH_GPU_PACKAGES" -eq "1" ]]; then
+ if ! [[ ${OS_TYPE} == "ubuntu" ]]; then
+ die "ERROR: pip_new.sh only support building both GPU wheels on ubuntu."
+ fi
+ echo "Building the other GPU pip package."
+ PROJECT_SUFFIX="gpu"
+ else
+ if ! [[ ${OS_TYPE} == "macos" ]]; then
+ die "ERROR: pip_new.sh only support building both CPU wheels on macos."
+ fi
+ echo "Building the other CPU pip package."
+ PROJECT_SUFFIX="cpu"
+ fi
+
# Check container type
- if ! [[ ${CONTAINER_TYPE} == "gpu" ]]; then
- die "Error: CONTAINER_TYPE needs to be `GPU` to build GPU packages. Got "\
+ if ! [[ ${CONTAINER_TYPE} == ${PROJECT_SUFFIX} ]]; then
+ die "Error: CONTAINER_TYPE needs to be \"${PROJECT_SUFFIX}\" to build ${PROJECT_SUFFIX} packages. Got"\
"\"${CONTAINER_TYPE}\" instead."
fi
- if [[ "$PROJECT_NAME" == *_gpu ]]; then
- NEW_PROJECT_NAME=${PROJECT_NAME%"_gpu"}
+ if [[ "$PROJECT_NAME" == *_${PROJECT_SUFFIX} ]]; then
+ NEW_PROJECT_NAME=${PROJECT_NAME%"_${PROJECT_SUFFIX}"}
else
- NEW_PROJECT_NAME="${PROJECT_NAME}_gpu"
+ NEW_PROJECT_NAME="${PROJECT_NAME}_${PROJECT_SUFFIX}"
fi
- echo "The given gpu \$PROJECT_NAME is ${PROJECT_NAME}. The additional GPU "\
+ echo "The given ${PROJECT_SUFFIX} \$PROJECT_NAME is ${PROJECT_NAME}. The additional ${PROJECT_SUFFIX}"\
"pip package will have project name ${NEW_PROJECT_NAME}."
./bazel-bin/tensorflow/tools/pip_package/build_pip_package ${PIP_WHL_DIR} ${GPU_FLAG} ${NIGHTLY_FLAG} "--project_name" ${NEW_PROJECT_NAME} || die "build_pip_package FAILED"
diff --git a/tensorflow/tools/ci_build/ci_build.sh b/tensorflow/tools/ci_build/ci_build.sh
index 079765b..d41972f 100755
--- a/tensorflow/tools/ci_build/ci_build.sh
+++ b/tensorflow/tools/ci_build/ci_build.sh
@@ -111,7 +111,8 @@
# Add extra params for rocm devices and libraries for ROCm container.
if [[ "${CONTAINER_TYPE}" == "rocm" ]]; then
- ROCM_EXTRA_PARAMS="--device=/dev/kfd --device=/dev/dri --group-add video"
+ ROCM_EXTRA_PARAMS="--device=/dev/kfd --device=/dev/dri --group-add video \
+ --cap-add=SYS_PTRACE --security-opt seccomp=unconfined --shm-size 16G"
else
ROCM_EXTRA_PARAMS=""
fi
diff --git a/tensorflow/tools/ci_build/release/macos/cpu_py2_full/pip.sh b/tensorflow/tools/ci_build/release/macos/cpu_py2_full/pip.sh
index 98c241f..3744559 100644
--- a/tensorflow/tools/ci_build/release/macos/cpu_py2_full/pip.sh
+++ b/tensorflow/tools/ci_build/release/macos/cpu_py2_full/pip.sh
@@ -30,10 +30,11 @@
# Install macos pip dependencies
install_macos_pip_deps sudo
-# Export required variables for running pip.sh
+# Export required variables for running pip_new.sh
export OS_TYPE="MACOS"
export CONTAINER_TYPE="CPU"
export TF_PYTHON_VERSION='python2'
+export TF_BUILD_BOTH_CPU_PACKAGES=1
# Run configure.
export TF_NEED_CUDA=0
diff --git a/tensorflow/tools/ci_build/release/macos/cpu_py35_full/pip.sh b/tensorflow/tools/ci_build/release/macos/cpu_py35_full/pip.sh
index a1cbfde..4559c18 100644
--- a/tensorflow/tools/ci_build/release/macos/cpu_py35_full/pip.sh
+++ b/tensorflow/tools/ci_build/release/macos/cpu_py35_full/pip.sh
@@ -30,10 +30,11 @@
# Install macos pip dependencies
install_macos_pip_deps sudo pip3.5
-# Export required variables for running pip.sh
+# Export required variables for running pip_new.sh
export OS_TYPE="MACOS"
export CONTAINER_TYPE="CPU"
export TF_PYTHON_VERSION='python3.5'
+export TF_BUILD_BOTH_CPU_PACKAGES=1
# Run configure.
export TF_NEED_CUDA=0
diff --git a/tensorflow/tools/ci_build/release/macos/cpu_py36_full/pip.sh b/tensorflow/tools/ci_build/release/macos/cpu_py36_full/pip.sh
index d97fbf8..0ae2c3b 100644
--- a/tensorflow/tools/ci_build/release/macos/cpu_py36_full/pip.sh
+++ b/tensorflow/tools/ci_build/release/macos/cpu_py36_full/pip.sh
@@ -30,10 +30,11 @@
# Install macos pip dependencies
install_macos_pip_deps sudo pip3.6
-# Export required variables for running pip.sh
+# Export required variables for running pip_new.sh
export OS_TYPE="MACOS"
export CONTAINER_TYPE="CPU"
export TF_PYTHON_VERSION='python3.6'
+export TF_BUILD_BOTH_CPU_PACKAGES=1
# Run configure.
export TF_NEED_CUDA=0
diff --git a/tensorflow/tools/ci_build/release/macos/cpu_py37_full/pip.sh b/tensorflow/tools/ci_build/release/macos/cpu_py37_full/pip.sh
index 84f8b05..2d5fb07 100644
--- a/tensorflow/tools/ci_build/release/macos/cpu_py37_full/pip.sh
+++ b/tensorflow/tools/ci_build/release/macos/cpu_py37_full/pip.sh
@@ -30,10 +30,11 @@
# Install macos pip dependencies
install_macos_pip_deps sudo pip3.7
-# Export required variables for running pip.sh
+# Export required variables for running pip_new.sh
export OS_TYPE="MACOS"
export CONTAINER_TYPE="CPU"
export TF_PYTHON_VERSION='python3.7'
+export TF_BUILD_BOTH_CPU_PACKAGES=1
# Run configure.
export TF_NEED_CUDA=0
diff --git a/tensorflow/tools/ci_build/release/ubuntu_16/gpu_pip_on_cpu/build.sh b/tensorflow/tools/ci_build/release/ubuntu_16/gpu_pip_on_cpu/build.sh
index 4b619aa..d6c2df7 100755
--- a/tensorflow/tools/ci_build/release/ubuntu_16/gpu_pip_on_cpu/build.sh
+++ b/tensorflow/tools/ci_build/release/ubuntu_16/gpu_pip_on_cpu/build.sh
@@ -53,4 +53,4 @@
cp "${WHL_PATH}" "$(pwd)"/.
chmod +x tensorflow/tools/ci_build/builds/docker_cpu_pip.sh
-docker run -e "CI_BUILD_USER=$(id -u -n)" -e "CI_BUILD_UID=$(id -u)" -e "CI_BUILD_GROUP=$(id -g -n)" -e "CI_BUILD_GID=$(id -g)" -e "CI_BUILD_HOME=/bazel_pip" -v "$(pwd)":/bazel_pip tensorflow/tensorflow:devel-py3 "./bazel_pip/tensorflow/tools/ci_build/builds/with_the_same_user" "./bazel_pip/tensorflow/tools/ci_build/builds/docker_cpu_pip.sh"
+docker run -e "BAZEL_VERSION=${BAZEL_VERSION}" -e "CI_BUILD_USER=$(id -u -n)" -e "CI_BUILD_UID=$(id -u)" -e "CI_BUILD_GROUP=$(id -g -n)" -e "CI_BUILD_GID=$(id -g)" -e "CI_BUILD_HOME=/bazel_pip" -v "$(pwd)":/bazel_pip tensorflow/tensorflow:devel-py3 "./bazel_pip/tensorflow/tools/ci_build/builds/with_the_same_user" "./bazel_pip/tensorflow/tools/ci_build/builds/docker_cpu_pip.sh"
diff --git a/tensorflow/tools/dockerfiles/README.md b/tensorflow/tools/dockerfiles/README.md
index 927246a..744bb89 100644
--- a/tensorflow/tools/dockerfiles/README.md
+++ b/tensorflow/tools/dockerfiles/README.md
@@ -41,9 +41,13 @@
# CPU-based images
$ docker run -u $(id -u):$(id -g) -v $(pwd):/my-devel -it tf
-# GPU-based images (set up nvidia-docker2 first)
+# GPU-based images,
+# 1) On Docker versions earlier than 19.03 (set up nvidia-docker2 first)
$ docker run --runtime=nvidia -u $(id -u):$(id -g) -v $(pwd):/my-devel -it tf
+# 2) On Docker versions including and after 19.03 (with nvidia-container-toolkit)
+$ docker run --gpus all -u $(id -u):$(id -g) -v $(pwd):/my-devel -it tf
+
# Images with Jupyter run on port 8888 and need a volume for your notebooks
# You can change $(PWD) to the full path to a directory if your notebooks
# live outside the current directory.
diff --git a/tensorflow/tools/docs/BUILD b/tensorflow/tools/docs/BUILD
index 68f04f2..d844a27 100644
--- a/tensorflow/tools/docs/BUILD
+++ b/tensorflow/tools/docs/BUILD
@@ -151,7 +151,7 @@
name = "generate2_test",
size = "medium",
srcs = ["generate2_test.py"],
- python_version = "PY2",
+ python_version = "PY3",
srcs_version = "PY2AND3",
tags = [
"manual",
diff --git a/tensorflow/tools/test/performance.bzl b/tensorflow/tools/test/performance.bzl
index 8090722..538d3d8 100644
--- a/tensorflow/tools/test/performance.bzl
+++ b/tensorflow/tools/test/performance.bzl
@@ -7,7 +7,8 @@
benchmarks = "..",
tags = [],
test_log_output_prefix = "",
- benchmark_type = "cpp_microbenchmark"):
+ benchmark_type = "cpp_microbenchmark",
+ **kwargs):
if not name:
fail("Must provide a name")
if not target:
@@ -41,6 +42,7 @@
additional_deps = [
"//tensorflow/tools/test:run_and_gather_logs",
],
+ **kwargs
)
# Create a benchmark test target of a TensorFlow python test (*py_tests)
@@ -49,7 +51,8 @@
target = None,
benchmarks = "..",
tags = [],
- test_log_output_prefix = ""):
+ test_log_output_prefix = "",
+ **kwargs):
# For now generating a py benchmark is the same as generating a C++
# benchmark target. In the future this may change, so we have
# two macros just in case
@@ -60,4 +63,5 @@
tags = tags,
test_log_output_prefix = test_log_output_prefix,
benchmark_type = "python_benchmark",
+ **kwargs
)
diff --git a/third_party/mlir/BUILD b/third_party/mlir/BUILD
index 088572c..c6e8ae6 100644
--- a/third_party/mlir/BUILD
+++ b/third_party/mlir/BUILD
@@ -43,6 +43,26 @@
textual_hdrs = ["include/mlir/IR/DialectSymbolRegistry.def"],
)
+gentbl(
+ name = "OpAsmInterfacesIncGen",
+ strip_include_prefix = "include",
+ tbl_outs = [
+ (
+ "-gen-op-interface-decls",
+ "include/mlir/IR/OpAsmInterface.h.inc",
+ ),
+ (
+ "-gen-op-interface-defs",
+ "include/mlir/IR/OpAsmInterface.cpp.inc",
+ ),
+ ],
+ tblgen = ":mlir-tblgen",
+ td_file = "include/mlir/IR/OpAsmInterface.td",
+ td_srcs = [
+ ":OpBaseTdFiles",
+ ],
+)
+
cc_library(
name = "IR",
srcs = [
@@ -122,6 +142,7 @@
":CallOpInterfacesIncGen",
":DialectSymbolRegistry",
":InferTypeOpInterfaceIncGen",
+ ":OpAsmInterfacesIncGen",
":Support",
"@llvm//:support",
],
@@ -272,6 +293,7 @@
srcs = [
"include/mlir/Analysis/CallInterfaces.td",
"include/mlir/Dialect/StandardOps/Ops.td",
+ "include/mlir/IR/OpAsmInterface.td",
":OpBaseTdFiles",
],
)
@@ -1605,6 +1627,7 @@
":IR",
":LLVMDialect",
":LLVMTransforms",
+ ":LinalgToLLVM",
":NVVMDialect",
":Parser",
":Pass",
@@ -1783,6 +1806,7 @@
srcs = ["tools/mlir-cuda-runner/mlir-cuda-runner.cpp"],
data = [
":tools/libcuda-runtime-wrappers.so",
+ "@local_config_mlir//test/mlir-cpu-runner:libmlir_runner_utils.so",
],
deps = [
":GPUDialect",
@@ -2098,8 +2122,19 @@
],
)
+filegroup(
+ name = "LinalgTransformPatternsTdFiles",
+ srcs = [
+ "include/mlir/Dialect/Linalg/Transforms/LinalgTransformPatterns.td",
+ ":AffineOpsTdFiles",
+ ":LinalgLibraryOpsTdFiles",
+ ":LinalgOpsTdFiles",
+ ":OpBaseTdFiles",
+ ],
+)
+
gentbl(
- name = "LinalgTransformPatterns",
+ name = "LinalgTransformPatternsIncGen",
tbl_outs = [
(
"-gen-rewriters",
@@ -2109,13 +2144,40 @@
tblgen = ":mlir-tblgen",
td_file = "include/mlir/Dialect/Linalg/Transforms/LinalgTransformPatterns.td",
td_srcs = [
- "include/mlir/IR/OpBase.td",
- ":LinalgOpsTdFiles",
- ":LinalgLibraryOpsTdFiles",
+ ":LinalgTransformPatternsTdFiles",
],
)
cc_library(
+ name = "LinalgToLLVM",
+ srcs = [
+ "lib/Conversion/LinalgToLLVM/LinalgToLLVM.cpp",
+ ],
+ hdrs = [
+ "include/mlir/Conversion/LinalgToLLVM/LinalgToLLVM.h",
+ ],
+ includes = ["include"],
+ deps = [
+ ":AffineToStandardTransforms",
+ ":Analysis",
+ ":CFGTransforms",
+ ":EDSC",
+ ":IR",
+ ":LLVMDialect",
+ ":LLVMTransforms",
+ ":Linalg",
+ ":Pass",
+ ":StandardOps",
+ ":Support",
+ ":Transforms",
+ ":VectorConversions",
+ "@llvm//:core",
+ "@llvm//:support",
+ ],
+ alwayslink = 1,
+)
+
+cc_library(
name = "Linalg",
srcs = [
"lib/Dialect/Linalg/Analysis/DependenceAnalysis.cpp",
@@ -2123,7 +2185,6 @@
"lib/Dialect/Linalg/IR/LinalgTypes.cpp",
"lib/Dialect/Linalg/Transforms/Fusion.cpp",
"lib/Dialect/Linalg/Transforms/LinalgTransforms.cpp",
- "lib/Dialect/Linalg/Transforms/LowerToLLVMDialect.cpp",
"lib/Dialect/Linalg/Transforms/LowerToLoops.cpp",
"lib/Dialect/Linalg/Transforms/Promotion.cpp",
"lib/Dialect/Linalg/Transforms/Tiling.cpp",
@@ -2135,12 +2196,13 @@
"include/mlir/Dialect/Linalg/IR/LinalgTraits.h",
"include/mlir/Dialect/Linalg/IR/LinalgTypes.h",
"include/mlir/Dialect/Linalg/Passes.h",
- "include/mlir/Dialect/Linalg/Transforms/LinalgTransformPatterns.h.inc",
+ "include/mlir/Dialect/Linalg/Transforms/LinalgTransforms.h",
"include/mlir/Dialect/Linalg/Utils/Intrinsics.h",
"include/mlir/Dialect/Linalg/Utils/Utils.h",
],
includes = ["include"],
deps = [
+ "LinalgTransformPatternsIncGen",
":AffineOps",
":AffineToStandardTransforms",
":Analysis",
@@ -2158,7 +2220,6 @@
":Support",
":TransformUtils",
":Transforms",
- ":VectorConversions",
"@llvm//:core",
"@llvm//:support",
],
@@ -2261,24 +2322,46 @@
],
)
+gentbl(
+ name = "VectorTransformPatterns",
+ tbl_outs = [
+ (
+ "-gen-rewriters",
+ "include/mlir/Dialect/VectorOps/VectorTransformPatterns.h.inc",
+ ),
+ ],
+ tblgen = ":mlir-tblgen",
+ td_file = "include/mlir/Dialect/VectorOps/VectorTransformPatterns.td",
+ td_srcs = [
+ ":OpBaseTdFiles",
+ ":StdOpsTdFiles",
+ ":VectorOpsTdFiles",
+ ],
+)
+
cc_library(
name = "VectorConversions",
srcs = [
"lib/Conversion/VectorConversions/VectorToLLVM.cpp",
"lib/Conversion/VectorConversions/VectorToLoops.cpp",
+ "lib/Conversion/VectorConversions/VectorToVector.cpp", # TODO(transforms?)
],
hdrs = [
"include/mlir/Conversion/VectorConversions/VectorConversions.h",
],
includes = ["include"],
deps = [
+ ":Analysis",
":EDSC",
":IR",
":LLVMDialect",
":LLVMTransforms",
":Pass",
+ ":StandardOps",
+ ":Support",
":Transforms",
":VectorOps",
+ ":VectorTransformPatterns",
"@llvm//:core",
"@llvm//:support",
],
@@ -2297,6 +2380,7 @@
"include/mlir/Analysis/CallInterfaces.td",
"include/mlir/Transforms/InliningUtils.h",
"include/mlir/IR/OpBase.td",
+ "include/mlir/IR/OpAsmInterface.td",
"include/mlir/Analysis/CallInterfaces.h",
],
visibility = ["@local_config_mlir//:friends"],
diff --git a/third_party/mlir/g3doc/LangRef.md b/third_party/mlir/g3doc/LangRef.md
index 974ea31..ba92ca7 100644
--- a/third_party/mlir/g3doc/LangRef.md
+++ b/third_party/mlir/g3doc/LangRef.md
@@ -796,7 +796,7 @@
#col_major = (d0, d1, d2) -> (d2, d1, d0)
// A 2-d tiled layout with tiles of size 128 x 256.
-#tiled_2d_128x256 = (d0, d1) -> (d0 div 128, d1 div 256, d0 mod 128, d0 mod 256)
+#tiled_2d_128x256 = (d0, d1) -> (d0 div 128, d1 div 256, d0 mod 128, d1 mod 256)
// A tiled data layout with non-constant tile sizes.
#tiled_dynamic = (d0, d1)[s0, s1] -> (d0 floordiv s0, d1 floordiv s1,
diff --git a/third_party/mlir/g3doc/Rationale.md b/third_party/mlir/g3doc/Rationale.md
index f127d8d..efccf07 100644
--- a/third_party/mlir/g3doc/Rationale.md
+++ b/third_party/mlir/g3doc/Rationale.md
@@ -609,12 +609,12 @@
%ni = dim %A, 0 : memref<?x?xi32>
// This loop can be parallelized
affine.for %i = 0 to %ni {
- call @search_body (%A, %S, %i) : (memref<?x?xi32>, memref<?xi32>, i32)
+ call @search_body (%A, %S, %key, %i) : (memref<?x?xi32>, memref<?xi32>, i32, i32)
}
return
}
-func @search_body(%A: memref<?x?xi32>, %S: memref<?xi32>, %key: i32) {
+func @search_body(%A: memref<?x?xi32>, %S: memref<?xi32>, %key: i32, %i : i32) {
%nj = dim %A, 1 : memref<?x?xi32>
br ^bb1(0)
diff --git a/third_party/mlir/g3doc/Traits.md b/third_party/mlir/g3doc/Traits.md
new file mode 100644
index 0000000..a3d91a7
--- /dev/null
+++ b/third_party/mlir/g3doc/Traits.md
@@ -0,0 +1,246 @@
+# Introduction to MLIR Operation Traits
+
+[TOC]
+
+MLIR allows for a truly open operation ecosystem, as any dialect may define
+operations that suit a specific level of abstraction. `Traits` are a mechanism
+in which to abstract implementation details and properties that are common
+across many different operations. `Traits` may be used to specify special
+properties and constraints of the operation, including whether the operation has
+side effects or whether its output has the same type as the input. Some examples
+of traits are `Commutative`, `SingleResult`, `Terminator`, etc. See the more
+[comprehensive list](#traits) below for more examples of what is possible.
+
+## Defining a Trait
+
+Traits may be defined in C++ by inheriting from the
+`OpTrait::TraitBase<ConcreteType, TraitType>` class. This base class takes as
+template parameters:
+
+* ConcreteType
+ - The concrete operation type that this trait was attached to.
+* TraitType
+ - The type of the trait class that is being defined, for use with the
+ [`Curiously Recurring Template Pattern`](https://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
+
+A derived trait class is expected to take a single template that corresponds to
+the `ConcreteType`. An example trait definition is shown below:
+
+```c++
+template <typename ConcreteType>
+class MyTrait : public OpTrait::TraitBase<ConcreteType, MyTrait> {
+};
+```
+
+Derived traits may also provide a `verifyTrait` hook, that is called when
+verifying the concrete operation. The trait verifiers will currently always be
+invoked before the main `Op::verify`.
+
+```c++
+template <typename ConcreteType>
+class MyTrait : public OpTrait::TraitBase<ConcreteType, MyTrait> {
+public:
+ /// Override the 'verifyTrait' hook to add additional verification on the
+ /// concrete operation.
+ static LogicalResult verifyTrait(Operation *op) {
+ // ...
+ }
+};
+```
+
+Note: It is generally good practice to define the implementation of the
+`verifyTrait` hook out-of-line as a free function when possible to avoid
+instantiating the implementation for every concrete operation type.
+
+### Parametric Traits
+
+The above demonstrates the definition of a simple self-contained trait. It is
+also often useful to provide some static parameters to the trait to control its
+behavior. Given that the definition of the trait class is rigid, i.e. we must
+have a single template argument for the concrete operation, the templates for
+the parameters will need to be split out. An example is shown below:
+
+```c++
+template <int Parameter>
+class MyParametricTrait {
+public:
+ template <typename ConcreteType>
+ class Impl : public OpTrait::TraitBase<ConcreteType, Impl> {
+ // Inside of 'Impl' we have full access to the template parameters
+ // specified above.
+ };
+};
+```
+
+## Attaching a Trait
+
+Traits may be used when defining a derived operation type, by simply adding the
+name of the trait class to the `Op` class after the concrete operation type:
+
+```c++
+/// Here we define 'MyOp' along with the 'MyTrait' and `MyParameteric trait
+/// classes we defined previously.
+class MyOp : public Op<MyOp, MyTrait, MyParametricTrait<10>::Impl> {};
+```
+
+To use a trait in the [ODS](OpDefinitions.md) framework, we need to provide a
+definition of the trait class. This can be done using the `NativeOpTrait` and
+`ParamNativeOpTrait` classes. `ParamNativeOpTrait` provides a mechanism in which
+to specify arguments to a parametric trait class with an internal `Impl`.
+
+```td
+// The argument is the c++ trait class name.
+def MyTrait : NativeOpTrait<"MyTrait">;
+
+// The first argument is the parent c++ class name. The second argument is a
+// string containing the parameter list.
+class MyParametricTrait<int prop>
+ : NativeOpTrait<"MyParametricTrait", !cast<string>(!head(parameters))>;
+```
+
+These can then be used in the `traits` list of an op definition:
+
+```td
+def OpWithInferTypeInterfaceOp : Op<...[MyTrait, MyParametricTrait<10>]> { ... }
+```
+
+See the documentation on [operation definitions](OpDefinitions.md) for more
+details.
+
+## Using a Trait
+
+Traits may be used to provide additional methods, static fields, or other
+information directly on the concrete operation. `Traits` internally become
+`Base` classes of the concrete operation, so all of these are directly
+accessible. To expose this information opaquely to transformations and analyses,
+[`interfaces`](Interfaces.md) may be used.
+
+To query if a specific operation contains a specific trait, the `hasTrait<>`
+method may be used. This takes as a template parameter the trait class, which is
+the same as the one passed when attaching the trait to an operation.
+
+```c++
+Operation *op = ..;
+if (op->hasTrait<MyTrait>() || op->hasTrait<MyParametricTrait<10>::Impl>())
+ ...;
+```
+
+## Trait List
+
+MLIR provides a suite of traits that provide various functionalities that are
+common across many different operations. Below is a list of some key traits that
+may be used directly by any dialect. The format of the header for each trait
+section goes as follows:
+
+* `Header`
+ - (`C++ class` -- `ODS class`(if applicable))
+
+### Broadcastable
+
+* `OpTrait::BroadcastableTwoOperandsOneResult` -- `Broadcastable`
+
+This trait provides the API for operations that are known to have
+[broadcast-compatible](https://docs.scipy.org/doc/numpy/user/basics.broadcasting.html)
+operand and result types. Specifically, starting from the most varying
+dimension, each dimension pair of the two operands' types should either be the
+same or one of them is one. Also, the result type should have the corresponding
+dimension equal to the larger one, if known. Shapes are checked partially if
+ranks or dimensions are not known. For example, an op with `tensor<?x2xf32>` and
+`tensor<2xf32>` as operand types and `tensor<3x2xf32>` as the result type is
+broadcast-compatible.
+
+Ths trait assumes the op has two operands and one result, and it asserts if the
+pre-condition is not satisfied.
+
+### Commutative
+
+* `OpTrait::IsCommutative` -- `Commutative`
+
+This trait adds the property that the operation is commutative, i.e. `X op Y ==
+Y op X`
+
+### Function-Like
+
+* `OpTrait::FunctionLike`
+
+This trait provides APIs for operations that behave like functions. In
+particular:
+
+- Ops must be symbols, i.e. also have the `Symbol` trait;
+- Ops have a single region with multiple blocks that corresponds to the body
+ of the function;
+- the absence of a region corresponds to an external function;
+- arguments of the first block of the region are treated as function
+ arguments;
+- they can have argument and result attributes that are stored in dictionary
+ attributes on the operation itself.
+
+This trait does *NOT* provide type support for the functions, meaning that
+concrete Ops must handle the type of the declared or defined function.
+`getTypeAttrName()` is a convenience function that returns the name of the
+attribute that can be used to store the function type, but the trait makes no
+assumption based on it.
+
+### HasParent
+
+* `OpTrait::HasParent<typename ParentOpType>` -- `HasParent<string op>`
+
+This trait provides APIs and verifiers for operations that can only be nested
+within regions that are attached to operations of `ParentOpType`.
+
+### IsolatedFromAbove
+
+* `OpTrait::IsIsolatedFromAbove` -- `IsolatedFromAbove`
+
+This trait signals that the regions of an operations are known to be isolated
+from above. This trait asserts that the regions of an operation will not
+capture, or reference, SSA values defined above the region scope. This means
+that the following is invalid if `foo.region_op` is defined as
+`IsolatedFromAbove`:
+
+```mlir
+%result = constant 10 : i32
+foo.region_op {
+ foo.yield %result : i32
+}
+```
+
+This trait is an important structural property of the IR, and enables operations
+to have [passes](WritingAPass.md) scheduled under them.
+
+### NoSideEffect
+
+* `OpTrait::HasNoSideEffect` -- `NoSideEffect`
+
+This trait signifies that the operation is pure and has no visible side effects.
+
+### Single Block with Implicit Terminator
+
+* `OpTrait::SingleBlockImplicitTerminator<typename TerminatorOpType>` :
+ `SingleBlockImplicitTerminator<string op>`
+
+This trait provides APIs and verifiers for operations with regions that have a
+single block that must terminate with `TerminatorOpType`.
+
+### Symbol
+
+* `OpTrait::Symbol` -- `Symbol`
+
+This trait is used for operations that define a `Symbol`.
+
+TODO(riverriddle) Link to the proper document detailing the design of symbols.
+
+### SymbolTable
+
+* `OpTrait::SymbolTable` -- `SymbolTable`
+
+This trait is used for operations that define a `SymbolTable`.
+
+TODO(riverriddle) Link to the proper document detailing the design of symbols.
+
+### Terminator
+
+* `OpTrait::IsTerminator` -- `Terminator`
+
+This trait provides verification and functionality for operations that are known
+to be [terminators](LangRef.md#terminator-operations).
diff --git a/third_party/mlir/g3doc/Tutorials/Toy/Ch-2.md b/third_party/mlir/g3doc/Tutorials/Toy/Ch-2.md
index 76482a6..34f25ab 100755
--- a/third_party/mlir/g3doc/Tutorials/Toy/Ch-2.md
+++ b/third_party/mlir/g3doc/Tutorials/Toy/Ch-2.md
@@ -41,7 +41,7 @@
Here is the MLIR assembly for the Toy `transpose` operations:
```mlir
-%t_tensor = "toy.transpose"(%tensor) {inplace = true} : (tensor<2x3xf64>) -> tensor<3x2xf64>
+%t_tensor = "toy.transpose"(%tensor) {inplace = true} : (tensor<2x3xf64>) -> tensor<3x2xf64> loc("example/file/path":12:1)
```
Let's break down the anatomy of this MLIR operation:
@@ -74,9 +74,14 @@
- `(tensor<2x3xf64) -> tensor<3x2xf64>`
- * This trailing portion refers to the type of the operation in a
- functional form, spelling the types of the arguments in parentheses and
- the type of the return values afterward.
+ * This refers to the type of the operation in a functional form, spelling
+ the types of the arguments in parentheses and the type of the return
+ values afterward.
+
+- loc("example/file/path":12:1)
+
+ * This is the location in the source code from which this operation
+ originated.
Shown here is the general form of an operation. As described above, the set of
operations in MLIR is extensible. This means that the infrastructure must be
@@ -84,10 +89,11 @@
boiling down the composition of an operation into discrete pieces:
- A name for the operation.
-- A source location for debugging purposes.
- A list of SSA operand values.
-- A list of [types](../../LangRef.md#type-system) for result values.
- A list of [attributes](../../LangRef.md#attributes).
+- A list of [types](../../LangRef.md#type-system) for result values.
+- A [source location](../../Diagnostics.md#source-locations) for debugging
+ purposes.
- A list of successors [blocks](../../LangRef.md#blocks) (for branches,
mostly).
- A list of [regions](../../LangRef.md#regions) (for structural operations
@@ -98,6 +104,15 @@
MLIR, the location is a core requirement, and APIs depend on and manipulate it.
Dropping a location is thus an explicit choice which cannot happen by mistake.
+To provide an illustration: If a transformation replaces an operation by
+another, that new operation must still have a location attached. This makes it
+possible to track where that operation came from.
+
+It's worth noting that the mlir-opt tool - a tool for testing
+compiler passes - does not include locations in the output by default. The
+`-mlir-print-debuginfo` flag specifies to include locations. (Run `mlir-opt
+--help` for more options.)
+
### Opaque API
MLIR is designed to be a completely extensible system, and as such, the
@@ -182,8 +197,9 @@
`value`, and returns a single result of
[TensorType](../../LangRef.md#tensor-type). An operation inherits from the
[CRTP](https://en.wikipedia.org/wiki/Curiously_recurring_template_pattern)
-`mlir::Op` class which also takes some optional *traits* to customize its
-behavior. These traits may provide additional accessors, verification, etc.
+`mlir::Op` class which also takes some optional [*traits*](../../Traits.md) to
+customize its behavior. These traits may provide additional accessors,
+verification, etc.
```c++
class ConstantOp : public mlir::Op<ConstantOp,
diff --git a/third_party/mlir/include/mlir/Analysis/VectorAnalysis.h b/third_party/mlir/include/mlir/Analysis/VectorAnalysis.h
index 8b9992d..350bdfd 100644
--- a/third_party/mlir/include/mlir/Analysis/VectorAnalysis.h
+++ b/third_party/mlir/include/mlir/Analysis/VectorAnalysis.h
@@ -46,14 +46,14 @@
/// - shapeRatio({3, 4, 5, 8}, {2, 5, 2}) returns {3, 2, 1, 4}
/// - shapeRatio({3, 4, 4, 8}, {2, 5, 2}) returns None
/// - shapeRatio({1, 2, 10, 32}, {2, 5, 2}) returns {1, 1, 2, 16}
-llvm::Optional<llvm::SmallVector<unsigned, 4>>
+llvm::Optional<llvm::SmallVector<int64_t, 4>>
shapeRatio(ArrayRef<int64_t> superShape, ArrayRef<int64_t> subShape);
/// Computes and returns the multi-dimensional ratio of the shapes of
/// `superVector` to `subVector`. If integral division is not possible, returns
/// None.
/// Assumes and enforces that the VectorTypes have the same elemental type.
-llvm::Optional<llvm::SmallVector<unsigned, 4>>
+llvm::Optional<llvm::SmallVector<int64_t, 4>>
shapeRatio(VectorType superVectorType, VectorType subVectorType);
/// Constructs a permutation map of invariant memref indices to vector
diff --git a/third_party/mlir/include/mlir/CMakeLists.txt b/third_party/mlir/include/mlir/CMakeLists.txt
index 1a5094d..43eacfc 100644
--- a/third_party/mlir/include/mlir/CMakeLists.txt
+++ b/third_party/mlir/include/mlir/CMakeLists.txt
@@ -1,4 +1,5 @@
add_subdirectory(Analysis)
add_subdirectory(Dialect)
add_subdirectory(EDSC)
+add_subdirectory(IR)
add_subdirectory(Transforms)
diff --git a/third_party/mlir/include/mlir/Conversion/LinalgToLLVM/LinalgToLLVM.h b/third_party/mlir/include/mlir/Conversion/LinalgToLLVM/LinalgToLLVM.h
new file mode 100644
index 0000000..6bae08e
--- /dev/null
+++ b/third_party/mlir/include/mlir/Conversion/LinalgToLLVM/LinalgToLLVM.h
@@ -0,0 +1,39 @@
+//===- LinalgToLLVM.h - Utils to convert from the linalg dialect ----------===//
+//
+// Copyright 2019 The MLIR Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// =============================================================================
+#ifndef MLIR_CONVERSION_LINALGTOLLVM_LINALGTOLLVM_H_
+#define MLIR_CONVERSION_LINALGTOLLVM_LINALGTOLLVM_H_
+
+#include "mlir/Conversion/StandardToLLVM/ConvertStandardToLLVM.h"
+#include "mlir/Transforms/DialectConversion.h"
+
+namespace mlir {
+class MLIRContext;
+
+class LinalgTypeConverter : public LLVMTypeConverter {
+public:
+ using LLVMTypeConverter::LLVMTypeConverter;
+ Type convertType(Type t) override;
+};
+
+/// Populate the given list with patterns that convert from Linalg to LLVM.
+void populateLinalgToLLVMConversionPatterns(LinalgTypeConverter &converter,
+ OwningRewritePatternList &patterns,
+ MLIRContext *ctx);
+
+} // namespace mlir
+
+#endif // MLIR_CONVERSION_LINALGTOLLVM_LINALGTOLLVM_H_
diff --git a/third_party/mlir/include/mlir/Conversion/VectorConversions/VectorConversions.h b/third_party/mlir/include/mlir/Conversion/VectorConversions/VectorConversions.h
index 33234b6..56862ca 100644
--- a/third_party/mlir/include/mlir/Conversion/VectorConversions/VectorConversions.h
+++ b/third_party/mlir/include/mlir/Conversion/VectorConversions/VectorConversions.h
@@ -14,15 +14,16 @@
// See the License for the specific language governing permissions and
// limitations under the License.
// =============================================================================
-#ifndef MLIR_CONVERSION_VECTORTOLLVM_VECTORTOLLVM_H_
-#define MLIR_CONVERSION_VECTORTOLLVM_VECTORTOLLVM_H_
+#ifndef MLIR_CONVERSION_VECTORCONVERSIONS_VECTORCONVERSIONS_H_
+#define MLIR_CONVERSION_VECTORCONVERSIONS_VECTORCONVERSIONS_H_
+
+#include "mlir/Transforms/DialectConversion.h"
namespace mlir {
class LLVMTypeConverter;
class MLIRContext;
class ModuleOp;
template <typename T> class OpPassBase;
-class OwningRewritePatternList;
/// Collect a set of patterns to convert from the Vector dialect to affine loops
/// surrounding ops in different dialects (vector, std etc).
@@ -31,6 +32,13 @@
void populateVectorToAffineLoopsConversionPatterns(
MLIRContext *context, OwningRewritePatternList &patterns);
+/// Collect a set of patterns to convert from the Vector dialect to itself.
+/// Should be merged with populateVectorToAffineLoopsConversionPatterns.
+void populateVectorToVectorConversionPatterns(
+ MLIRContext *context, OwningRewritePatternList &patterns,
+ ArrayRef<int64_t> coarseVectorShape = {},
+ ArrayRef<int64_t> fineVectorShape = {});
+
/// Collect a set of patterns to convert from the Vector dialect to LLVM.
void populateVectorToLLVMConversionPatterns(LLVMTypeConverter &converter,
OwningRewritePatternList &patterns);
@@ -40,4 +48,4 @@
} // namespace mlir
-#endif // MLIR_CONVERSION_VECTORTOLLVM_VECTORTOLLVM_H_
+#endif // MLIR_CONVERSION_VECTORCONVERSIONS_VECTORCONVERSIONS_H_
diff --git a/third_party/mlir/include/mlir/Dialect/Linalg/Passes.h b/third_party/mlir/include/mlir/Dialect/Linalg/Passes.h
index 8a01fe4..5ecd500 100644
--- a/third_party/mlir/include/mlir/Dialect/Linalg/Passes.h
+++ b/third_party/mlir/include/mlir/Dialect/Linalg/Passes.h
@@ -41,7 +41,8 @@
std::unique_ptr<OpPassBase<FuncOp>> createLowerLinalgToLoopsPass();
-std::unique_ptr<OpPassBase<ModuleOp>> createLowerLinalgToLLVMPass();
+/// Create a pass to convert vector operations to the LLVMIR dialect.
+std::unique_ptr<OpPassBase<ModuleOp>> createConvertLinalgToLLVMPass();
} // namespace linalg
} // namespace mlir
diff --git a/third_party/mlir/include/mlir/Dialect/Linalg/Transforms/LinalgTransformPatterns.td b/third_party/mlir/include/mlir/Dialect/Linalg/Transforms/LinalgTransformPatterns.td
index 9cc4ea3..66a3a16 100644
--- a/third_party/mlir/include/mlir/Dialect/Linalg/Transforms/LinalgTransformPatterns.td
+++ b/third_party/mlir/include/mlir/Dialect/Linalg/Transforms/LinalgTransformPatterns.td
@@ -22,82 +22,48 @@
#ifndef LINALG_TRANSFORMS
#define LINALG_TRANSFORMS
+#ifndef LINALG_OPS
include "mlir/Dialect/Linalg/IR/LinalgOps.td"
+#endif // LINALG_OPS
+#ifndef LINALG_LIBRARY_OPS
include "mlir/Dialect/Linalg/IR/LinalgLibraryOps.td"
+#endif // LINALG_LIBRARY_OPS
def HasNoLinalgTransformMarker : CPred<[{
- !$0.getAttrOfType<StringAttr>(kLinalgTransformMarker)
+ !$0.getAttrOfType<StringAttr>(LinalgTransforms::kLinalgTransformMarker)
}]>;
-class HasLinalgTransformMarker<string value> : CPred<[{
- $0.getAttrOfType<StringAttr>(kLinalgTransformMarker).getValue() == "}] #
- value # [{"}]>;
+class HasLinalgTransformMarker<string str> : CPred<[{
+ $0.getAttrOfType<StringAttr>(
+ LinalgTransforms::kLinalgTransformMarker).getValue() == "}] # str # [{"}]>;
-class IsProducedByOpOfType<string value> :
- CPred<"isProducedByOpOfType<" # value # ">($0, $1)">;
+class IsProducedByOpOfType<string str> :
+ CPred<"isProducedByOpOfType<" # str # ">($0, $1)">;
//===----------------------------------------------------------------------===//
// Linalg fusion patterns.
//===----------------------------------------------------------------------===//
//
// In the future, tile sizes should be derived from op properties + machine
-// model but we do not need to wait on this to start having useful patterns.
-class TileAndFuseLinalgOp<list<int> sizes, string value> : NativeCodeCall<
+// description but we do not need to wait on this to start having useful
+// patterns.
+class TileAndFuseLinalgOp<
+ list<int> sizes, list<int> operandIndices, string value> : NativeCodeCall<
"if (failed(tileAndFuseLinalgOpAndSetMarker($_builder, $0, {" #
- StrJoinInt<sizes>.result # "}, \"" # value # "\")))" #
+ StrJoinInt<sizes>.result # "}, {" # StrJoinInt<operandIndices>.result # "}," #
+ " \"" # value # "\")))" #
" return matchFailure();">;
-def : Pat<(MatmulOp:$consumer $A, $B, $C),
- (TileAndFuseLinalgOp<[100, 150], "L1"> $consumer),
- [
- (Constraint<HasNoLinalgTransformMarker> $consumer),
- (Constraint<IsProducedByOpOfType<"MatmulOp">> $consumer, $A),
- ],
- // In the buffer world there is no use-def chains or dags so benefits
- // cannot be computed automatically from the length of the matched
- // pattern. Instead we specify the benefit ourselves for now.
- // This is not expected to be a big challenge long-term because
- // pattern benefits are akin to feature engineering: features should
- // be learned.
- (addBenefit 1)>;
-
//===----------------------------------------------------------------------===//
// Linalg tiling patterns.
//===----------------------------------------------------------------------===//
//
// In the future, tile sizes should be derived from op properties + machine
-// model but we do not need to wait on this to start having useful patterns.
+// description but we do not need to wait on this to start having useful
+// patterns.
class TileLinalgOp<list<int> sizes, string value> : NativeCodeCall<
"if (failed(tileLinalgOpAndSetMarker($_builder, $0, {" #
StrJoinInt<sizes>.result # "}, \"" # value # "\")))" #
" return matchFailure();">;
-def : Pat<(MatmulOp:$op $A, $B, $C),
- (TileLinalgOp<[2000, 3000, 4000], "L3"> $op),
- [(Constraint<Or<[HasNoLinalgTransformMarker,
- HasLinalgTransformMarker<"MEM">]>> $op)]>;
-def : Pat<(MatmulOp:$op $A, $B, $C),
- (TileLinalgOp<[200, 300, 400], "L2"> $op),
- [(Constraint<HasLinalgTransformMarker<"L3">> $op)]>;
-def : Pat<(MatmulOp:$op $A, $B, $C),
- (TileLinalgOp<[20, 30, 40], "L1"> $op),
- [(Constraint<HasLinalgTransformMarker<"L2">> $op)]>;
-def : Pat<(MatmulOp:$op $A, $B, $C),
- (TileLinalgOp<[2, 3, 4], "REG"> $op),
- [(Constraint<HasLinalgTransformMarker<"L1">> $op)]>;
-
-def : Pattern<(MatvecOp:$op $A, $b, $c),
- [(TileLinalgOp<[5, 6], "L1"> $op)],
- [(Constraint<HasNoLinalgTransformMarker> $op)]>;
-
-def : Pattern<(DotOp:$op $a, $b, $c),
- [(TileLinalgOp<[8000], "L1"> $op)],
- [(Constraint<Or<[HasNoLinalgTransformMarker,
- HasLinalgTransformMarker<"MEM">,
- HasLinalgTransformMarker<"L3">,
- HasLinalgTransformMarker<"L2">]>> $op)]>;
-def : Pattern<(DotOp:$op $a, $b, $c),
- [(TileLinalgOp<[8], "REG"> $op)],
- [(Constraint<HasLinalgTransformMarker<"L1">> $op)]>;
-
#endif // LINALG_TRANSFORMS
diff --git a/third_party/mlir/include/mlir/Dialect/Linalg/Transforms/LinalgTransforms.h b/third_party/mlir/include/mlir/Dialect/Linalg/Transforms/LinalgTransforms.h
new file mode 100644
index 0000000..50c566f
--- /dev/null
+++ b/third_party/mlir/include/mlir/Dialect/Linalg/Transforms/LinalgTransforms.h
@@ -0,0 +1,72 @@
+//===- LinalgTransforms.h - Linalg transformations as patterns --*- C++ -*-===//
+//
+// Copyright 2019 The MLIR Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// =============================================================================
+
+#ifndef DIALECT_LINALG_TRANSFORMS_LINALGTRANSFORMS_H_
+#define DIALECT_LINALG_TRANSFORMS_LINALGTRANSFORMS_H_
+
+#include "mlir/Dialect/Linalg/Analysis/DependenceAnalysis.h"
+#include "mlir/Dialect/Linalg/IR/LinalgOps.h"
+#include "mlir/Dialect/Linalg/Passes.h"
+#include "mlir/Dialect/Linalg/Utils/Utils.h"
+#include "mlir/IR/PatternMatch.h"
+#include "mlir/Pass/Pass.h"
+
+#include "llvm/ADT/STLExtras.h"
+
+namespace mlir {
+namespace linalg {
+
+// Marker used as attribute name in generated Linalg rewriting transformations.
+struct LinalgTransforms {
+ static constexpr StringRef kLinalgTransformMarker =
+ "__internal_linalg_transform__";
+};
+
+// Declarative transformation used in tablegen patterns.
+// Tiles `op` by `sizes` and sets the attribute `kLinalgTransformMarker` to
+// `linalgMarker`.
+LogicalResult tileLinalgOpAndSetMarker(PatternRewriter &rewriter, Operation *op,
+ ArrayRef<int64_t> sizes,
+ StringRef linalgMarker);
+
+// Declarative transformation used in tablegen patterns.
+// Tiles `op` by `sizes`, fuses the producers of `operandIndicesToFuse` and sets
+// the attribute `kLinalgTransformMarker` to `linalgMarker`.
+LogicalResult tileAndFuseLinalgOpAndSetMarker(
+ PatternRewriter &rewriter, Operation *op, ArrayRef<int64_t> sizes,
+ ArrayRef<int64_t> operandIndicesToFuse, StringRef linalgMarker);
+
+namespace detail {
+// Implementation detail of isProducedByOpOfType avoids the need for explicit
+// template instantiations.
+bool isProducedByOpOfTypeImpl(Operation *consumerOp, Value *consumedView,
+ llvm::function_ref<bool(Operation *)> isaOpType);
+} // namespace detail
+
+// Returns true if the `consumedView` value use in `consumerOp` is produced by
+// an op of type `OpTy`. This is used to implement use-def type information on
+// buffers.
+template <typename OpTy>
+bool isProducedByOpOfType(Operation *consumerOp, Value *consumedView) {
+ return detail::isProducedByOpOfTypeImpl(
+ consumerOp, consumedView, [](Operation *op) { return isa<OpTy>(op); });
+}
+
+} // namespace linalg
+} // namespace mlir
+
+#endif // DIALECT_LINALG_TRANSFORMS_LINALGTRANSFORMS_H_
diff --git a/third_party/mlir/include/mlir/Dialect/StandardOps/Ops.h b/third_party/mlir/include/mlir/Dialect/StandardOps/Ops.h
index 64e52ba..cd4ce2c 100644
--- a/third_party/mlir/include/mlir/Dialect/StandardOps/Ops.h
+++ b/third_party/mlir/include/mlir/Dialect/StandardOps/Ops.h
@@ -24,10 +24,9 @@
#define MLIR_DIALECT_STANDARDOPS_OPS_H
#include "mlir/Analysis/CallInterfaces.h"
-#include "mlir/IR/Attributes.h"
#include "mlir/IR/Builders.h"
#include "mlir/IR/Dialect.h"
-#include "mlir/IR/OpDefinition.h"
+#include "mlir/IR/OpImplementation.h"
#include "mlir/IR/StandardTypes.h"
// Pull in all enum type definitions and utility function declarations.
diff --git a/third_party/mlir/include/mlir/Dialect/StandardOps/Ops.td b/third_party/mlir/include/mlir/Dialect/StandardOps/Ops.td
index eb7ebbb..66613e0 100644
--- a/third_party/mlir/include/mlir/Dialect/StandardOps/Ops.td
+++ b/third_party/mlir/include/mlir/Dialect/StandardOps/Ops.td
@@ -26,6 +26,7 @@
include "mlir/IR/OpBase.td"
#endif // OP_BASE
+include "mlir/IR/OpAsmInterface.td"
include "mlir/Analysis/CallInterfaces.td"
def Std_Dialect : Dialect {
@@ -580,7 +581,8 @@
let hasCanonicalizer = 1;
}
-def ConstantOp : Std_Op<"constant", [NoSideEffect]> {
+def ConstantOp : Std_Op<"constant",
+ [NoSideEffect, DeclareOpInterfaceMethods<OpAsmOpInterface>]> {
let summary = "constant";
let arguments = (ins AnyAttr:$value);
@@ -1237,7 +1239,7 @@
let hasCanonicalizer = 1;
}
-def SubViewOp : Std_Op<"subview", [SameVariadicOperandSize, NoSideEffect]> {
+def SubViewOp : Std_Op<"subview", [NoSideEffect]> {
let summary = "memref subview operation";
let description = [{
The "subview" operation converts a memref type to another memref type
@@ -1252,11 +1254,11 @@
dynamic sizes of the result "view" memref type.
*) Strides: zero or memref-rank number of dynamic strides which are applied
multiplicatively to the base memref strides in each dimension.
- Note on the number of operands for offsets, sizes and strides: either
- memref-rank number of operands must be set for each of offsets, sizes and
- strides, or zero operands must be specified for offsets, sizes and strides
- (in which case the base and subview memrefs must all have constant offset
- sizes and strides).
+
+ Note on the number of operands for offsets, sizes and strides: For
+ each of these, the number of operands must either be same as the
+ memref-rank number or empty. For the latter, those values will be
+ treated as constants.
Example 1:
@@ -1292,11 +1294,62 @@
%1 = subview %0[][][]
: memref<8x16x4xf32, (d0, d1, d2) -> (d0 * 64 + d1 * 4 + d2)> to
memref<4x4x4xf32, (d0, d1, d2) -> (d0 * 16 + d1 * 4 + d2 + 8)>
+
+ Example 4:
+
+ %0 = alloc(%arg0, %arg1) : memref<?x?xf32>
+
+ // Subview with constant size, but dynamic offsets and
+ // strides. The resulting memref has a static shape, but if the
+ // base memref has an affine map to describe the layout, the result
+ // memref also uses an affine map to describe the layout. The
+ // strides of the result memref is computed as follows:
+ //
+ // Let #map1 represents the layout of the base memref, and #map2
+ // represents the layout of the result memref. A #mapsubview can be
+ // constructed to map an index from the result memref to the base
+ // memref (note that the description below uses more convenient
+ // naming for symbols, while in affine maps, symbols are
+ // represented as unsigned numbers that identify that symbol in the
+ // given affine map.
+ //
+ // #mapsubview = (d0, d1)[o0, o1, t0, t1] -> (d0 * t0 + o0, d1 * t1 + o1)
+ //
+ // where, o0, o1, ... are offsets, and t0, t1, ... are strides. Then,
+ //
+ // #map2 = #map1.compose(#mapsubview)
+ //
+ // If the layout map is represented as
+ //
+ // #map1 = (d0, d1)[s0, s1, s2] -> (d0 * s1 + d1 * s2 + s0)
+ //
+ // then,
+ //
+ // #map2 = (d0, d1)[s0, s1, s2, o0, o1, t0, t1] ->
+ // (d0 * s1 * t0 + d1 * s2 * t1 + o0 * s1 + o1 * s2 + s0)
+ //
+ // Representing this canonically
+ //
+ // #map2 = (d0, d1)[r0, r1, r2] -> (d0 * r1 + d1 * r2 + r0)
+ //
+ // where, r0 = o0 * s1 + o1 * s2 + s0, r1 = s1 * t0, r2 = s2 * t1.
+ %1 = subview %0[%i, %j][][%x, %y] :
+ : memref<?x?xf32, (d0, d1)[s0, s1, s2] -> (d0 * s1 + d1 * s2 + s0)> to
+ memref<4x4xf32, (d0, d1)[r0, r1, r2] -> (d0 * r1 + d1 * r2 + r0)>
+
+ // Note that the subview op does not gaurantee that the result
+ // memref is "inbounds" w.r.t to base memref. It is upto the client
+ // to ensure that the subview is accessed in a manner that is
+ // in-bounds.
+
}
}];
- let arguments = (ins AnyMemRef:$source, Variadic<Index>:$offsets,
- Variadic<Index>:$sizes, Variadic<Index>:$strides);
+ // TODO(b/144779634, ravishankarm) : Use different arguments for
+ // offsets, sizes and strides.
+ let arguments = (ins AnyMemRef:$source, I32Attr:$num_offsets,
+ I32Attr:$num_sizes, I32Attr:$num_strides,
+ Variadic<Index>:$operands);
let results = (outs AnyMemRef);
let builders = [OpBuilder<
@@ -1305,11 +1358,12 @@
"ArrayRef<Value *> strides, Type resultType = Type(), "
"ArrayRef<NamedAttribute> attrs = {}">,
OpBuilder<
- "Builder *builder, OperationState &result, Type resultType, Value *source",
- [{
- result.addOperands(source);
- result.addTypes(resultType);
- }]>];
+ "Builder *builder, OperationState &result, Type resultType, Value *source">,
+ OpBuilder<
+ "Builder *builder, OperationState &result, Type resultType, Value *source, "
+ "unsigned num_offsets, unsigned num_sizes, unsigned num_strides, "
+ "ArrayRef<Value *> offsets, ArrayRef<Value *> sizes, "
+ "ArrayRef<Value *> strides">];
let extraClassDeclaration = [{
/// Returns the type of the base memref operand.
@@ -1320,6 +1374,21 @@
/// The result of a subview is always a memref.
MemRefType getType() { return getResult()->getType().cast<MemRefType>(); }
+ /// Returns as integer value the number of offset operands.
+ int64_t getNumOffsets() {
+ return num_offsets().getSExtValue();
+ }
+
+ /// Returns as integer value the number of size operands.
+ int64_t getNumSizes() {
+ return num_sizes().getSExtValue();
+ }
+
+ /// Returns as integer value the number of stride operands.
+ int64_t getNumStrides() {
+ return num_strides().getSExtValue();
+ }
+
/// Returns the dynamic offsets for this subview operation.
operand_range getDynamicOffsets();
diff --git a/third_party/mlir/include/mlir/Dialect/VectorOps/CMakeLists.txt b/third_party/mlir/include/mlir/Dialect/VectorOps/CMakeLists.txt
index 6cc7e44..3849dd7 100644
--- a/third_party/mlir/include/mlir/Dialect/VectorOps/CMakeLists.txt
+++ b/third_party/mlir/include/mlir/Dialect/VectorOps/CMakeLists.txt
@@ -2,3 +2,7 @@
mlir_tablegen(VectorOps.h.inc -gen-op-decls)
mlir_tablegen(VectorOps.cpp.inc -gen-op-defs)
add_public_tablegen_target(MLIRVectorOpsIncGen)
+
+set(LLVM_TARGET_DEFINITIONS VectorTransformPatterns.td)
+mlir_tablegen(VectorTransformPatterns.h.inc -gen-rewriters)
+add_public_tablegen_target(MLIRVectorTransformPatternsIncGen)
diff --git a/third_party/mlir/include/mlir/Dialect/VectorOps/VectorOps.td b/third_party/mlir/include/mlir/Dialect/VectorOps/VectorOps.td
index 12c5612..3b71e59 100644
--- a/third_party/mlir/include/mlir/Dialect/VectorOps/VectorOps.td
+++ b/third_party/mlir/include/mlir/Dialect/VectorOps/VectorOps.td
@@ -49,7 +49,105 @@
let parser = [{ return ::parse$cppClass(parser, result); }];
}
-def VectorExtractElementOp :
+// TODO(andydavis, ntv) Add an attribute to specify a different algebra
+// with operators other than the current set: {*, +}.
+// TODO(andydavis) Consider using AffineMaps to express contracting, batch
+// and free dimension pairs.
+def Vector_ContractionOp :
+ Vector_Op<"contract", [NoSideEffect]>,
+ Arguments<(ins AnyVector:$lhs, AnyVector:$rhs, AnyVector:$acc,
+ Variadic<TupleOf<[Index]>>:$masks)>,
+ Results<(outs AnyVector)> {
+ let summary = "vector contraction operation";
+ let description = [{
+ Computes the sum of products of vector elements along contracting
+ dimension pairs from 2 vectors of rank M and N respectively, adds this
+ intermediate result to the accumulator argument of rank K, and returns a
+ vector result of rank K (where K = num_lhs_free_dims + num_rhs_free_dims +
+ num_batch_dims (see dimension type descriptions below)).
+
+ Optional vector mask arguments specify the dynamic dimension sizes of
+ valid data within the lhs/rhs vector arguments.
+
+ Dimensions for the arguments and result type fall into three categories:
+ *) Contracting: contracting dimensions are present in the lhs and rhs
+ arguments but not in the output (or optional accumulator
+ argument). These are the dimensions along which the vector
+ contraction op computes the sum of products, and contracting
+ dimension pair dimension sizes must match between lhs/rhs.
+ *) Batch: batch dimensions are non-contracting dimensions and so are
+ present in the output and in the accumulator argument. The lhs
+ and rhs co-iterate along the batch dimension and so dimension
+ sizes must match across all arguments and result.
+ *) Free: free dimensions are non-contraction, non-batch dimensions and
+ are present in the output and accumulator argument. The lhs and
+ rhs free dimensions are unrelated to each other and do not
+ co-iterate.
+
+ Contracting and batch dimensions are specified as dimension pairs
+ of logical dimension numbers: the first in the pair represents the lhs
+ logical dimension number and the second in the pair represents the
+ associated rhs logical dimension number. A dimension pair binds together
+ logical dimension numbers from the lhs/rhs which co-iterate together, either
+ as contracting or batch dimensions.
+
+ Examples:
+
+ // 2D vector contraction with one contracting dimension (matmul).
+ %3 = vector.contract %0, %1, %2
+ { contracting_dim_map = [[1, 0]] }
+ : vector<4x3xf32>, vector<3x7xf32> into vector<4x7xf32>
+
+ // 4D to 3D vector contraction with two contracting dimensions and
+ // one batch dimension.
+ %4 = vector.contract %0, %1, %2
+ { batch_dim_map = [[1, 0]], contracting_dim_map = [[0, 2], [2, 1]] }
+ : vector<7x8x16x15xf32>, vector<8x16x7x5xf32> into vector<8x15x5xf32>
+
+ // 4D vector contraction with two contracting dimensions and optional
+ // vector mask arguments.
+ %lhs_mask = vector.make_tuple %size0, %size1, %size2, %size3
+ : tuple<index, index, index, index>
+ %rhs_mask = vector.make_tuple %size4, %size5, %size6, %size7
+ : tuple<index, index, index, index>
+
+ %5 = vector.contract %0, %1, %2, %lhs_mask, %rhs_mask
+ { contracting_dim_map = [[0, 2], [2, 1]] }
+ : vector<7x8x16x15xf32>, vector<8x16x7x5xf32> into vector<8x15x8x5xf32>
+ }];
+ let extraClassDeclaration = [{
+ VectorType getLhsType() {
+ return lhs()->getType().cast<VectorType>();
+ }
+ VectorType getRhsType() {
+ return rhs()->getType().cast<VectorType>();
+ }
+ VectorType getAccType() {
+ return acc()->getType().cast<VectorType>();
+ }
+ TupleType getLHSVectorMaskType() {
+ if (llvm::size(masks()) != 2) return TupleType();
+ return getOperand(3)->getType().cast<TupleType>();
+ }
+ TupleType getRHSVectorMaskType() {
+ if (llvm::size(masks()) != 2) return TupleType();
+ return getOperand(4)->getType().cast<TupleType>();
+ }
+ VectorType getResultType() {
+ return getResult()->getType().cast<VectorType>();
+ }
+ static StringRef getContractingDimMapAttrName() {
+ return "contracting_dim_map";
+ }
+ static StringRef getBatchDimMapAttrName() {
+ return "batch_dim_map";
+ }
+ std::vector<std::pair<int64_t, int64_t>> getContractingDimMap();
+ std::vector<std::pair<int64_t, int64_t>> getBatchDimMap();
+ }];
+}
+
+def Vector_ExtractElementOp :
Vector_Op<"extractelement", [NoSideEffect,
PredOpTrait<"operand and result have same element type",
TCresVTEtIsSameAsOpBase<0, 0>>]>,
@@ -76,7 +174,7 @@
}];
}
-def VectorStridedSliceOp :
+def Vector_StridedSliceOp :
Vector_Op<"strided_slice", [NoSideEffect,
PredOpTrait<"operand and result have same element type",
TCresVTEtIsSameAsOpBase<0, 0>>]>,
@@ -118,7 +216,7 @@
}];
}
-def VectorOuterProductOp :
+def Vector_OuterProductOp :
Vector_Op<"outerproduct", [NoSideEffect, SameOperandsAndResultElementType]>,
Arguments<(ins AnyVector:$lhs, AnyVector:$rhs, Variadic<AnyVector>:$acc)>,
Results<(outs AnyVector)> {
@@ -157,7 +255,7 @@
}];
}
-def VectorTransferReadOp :
+def Vector_TransferReadOp :
Vector_Op<"transfer_read">,
Arguments<(ins AnyMemRef:$memref, Variadic<Index>:$indices,
AffineMapAttr:$permutation_map, AnyType:$padding)>,
@@ -288,7 +386,7 @@
}];
}
-def VectorTransferWriteOp :
+def Vector_TransferWriteOp :
Vector_Op<"transfer_write">,
Arguments<(ins AnyVector:$vector, AnyMemRef:$memref,
Variadic<Index>:$indices,
@@ -349,7 +447,7 @@
}];
}
-def VectorTypeCastOp :
+def Vector_TypeCastOp :
Vector_Op<"type_cast", [NoSideEffect]>,
Arguments<(ins StaticShapeMemRefOf<[AnyType]>:$memref)>,
Results<(outs AnyMemRef)> {
@@ -391,4 +489,21 @@
}
}];
}
+
+// TODO(andydavis) Morph this operation into a Vector_MaskOp.
+def Vector_IndexTupleOp :
+ Vector_Op<"make_index_tuple", [NoSideEffect]>,
+ Arguments<(ins Variadic<Index>:$operands)>,
+ Results<(outs TupleOf<[Index]>)> {
+ let summary = "creates a tuple of operand values";
+ let description = [{
+ Creates and returns a tuple of its operands which must be of index type.
+
+ Example:
+
+ %1 = vector.make_index_tuple %size0, %size1, %size2
+ : tuple<index, index, index>
+
+ }];
+}
#endif // VECTOR_OPS
diff --git a/third_party/mlir/include/mlir/Dialect/VectorOps/VectorTransformPatterns.td b/third_party/mlir/include/mlir/Dialect/VectorOps/VectorTransformPatterns.td
new file mode 100644
index 0000000..fe0940c
--- /dev/null
+++ b/third_party/mlir/include/mlir/Dialect/VectorOps/VectorTransformPatterns.td
@@ -0,0 +1,43 @@
+//===- VectorTransformPatterns.td - Vector-Vector patterns -*- tablegen -*-===//
+//
+// Copyright 2019 The MLIR Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// =============================================================================
+//
+// This is the pattern definition file for declarative Vector transformations.
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef VECTOR_TRANSFORMS
+#define VECTOR_TRANSFORMS
+
+include "mlir/Dialect/StandardOps/Ops.td"
+include "mlir/Dialect/VectorOps/VectorOps.td"
+
+class HasShape<list<int> shape> :
+ CPred<"hasShape($0, {" # StrJoinInt<shape>.result # "})">;
+
+class UnrollVectorOp<list<int> factors> : NativeCodeCall<
+ "unrollSingleResultOpMatchingType($_builder, $0->getDefiningOp(), " #
+ "{" # StrJoinInt<factors>.result # "})">;
+
+def : Pat<(AddFOp:$op_results $a, $b),
+ (UnrollVectorOp<[2, 2]> $op_results, $a, $b),
+ [(Constraint<HasShape<[4, 2]>> $a)]>;
+
+def : Pat<(AddFOp:$op_results $a, $b),
+ (UnrollVectorOp<[2, 2]> $op_results, $a, $b),
+ [(Constraint<HasShape<[4, 4]>> $a)]>;
+
+#endif // VECTOR_TRANSFORMS
diff --git a/third_party/mlir/include/mlir/EDSC/Intrinsics.h b/third_party/mlir/include/mlir/EDSC/Intrinsics.h
index e76bc2f..6e1c49f 100644
--- a/third_party/mlir/include/mlir/EDSC/Intrinsics.h
+++ b/third_party/mlir/include/mlir/EDSC/Intrinsics.h
@@ -215,7 +215,7 @@
using std_load = ValueBuilder<LoadOp>;
using std_store = OperationBuilder<StoreOp>;
using subi = ValueBuilder<SubIOp>;
-using vector_type_cast = ValueBuilder<vector::VectorTypeCastOp>;
+using vector_type_cast = ValueBuilder<vector::TypeCastOp>;
using view = ValueBuilder<ViewOp>;
/// Branches into the mlir::Block* captured by BlockHandle `b` with `operands`.
diff --git a/third_party/mlir/include/mlir/IR/Attributes.h b/third_party/mlir/include/mlir/IR/Attributes.h
index 8a5e3b5..b5b9a04 100644
--- a/third_party/mlir/include/mlir/IR/Attributes.h
+++ b/third_party/mlir/include/mlir/IR/Attributes.h
@@ -974,6 +974,20 @@
using DenseElementsAttr::DenseElementsAttr;
+ /// Get an instance of a DenseFPElementsAttr with the given arguments. This
+ /// simply wraps the DenseElementsAttr::get calls.
+ template <typename Arg>
+ static DenseFPElementsAttr get(const ShapedType &type, Arg &&arg) {
+ return DenseElementsAttr::get(type, llvm::makeArrayRef(arg))
+ .template cast<DenseFPElementsAttr>();
+ }
+ template <typename T>
+ static DenseFPElementsAttr get(const ShapedType &type,
+ const std::initializer_list<T> &list) {
+ return DenseElementsAttr::get(type, list)
+ .template cast<DenseFPElementsAttr>();
+ }
+
/// Generates a new DenseElementsAttr by mapping each value attribute, and
/// constructing the DenseElementsAttr given the new element type.
DenseElementsAttr
@@ -998,6 +1012,20 @@
using DenseElementsAttr::DenseElementsAttr;
+ /// Get an instance of a DenseIntElementsAttr with the given arguments. This
+ /// simply wraps the DenseElementsAttr::get calls.
+ template <typename Arg>
+ static DenseIntElementsAttr get(const ShapedType &type, Arg &&arg) {
+ return DenseElementsAttr::get(type, llvm::makeArrayRef(arg))
+ .template cast<DenseIntElementsAttr>();
+ }
+ template <typename T>
+ static DenseIntElementsAttr get(const ShapedType &type,
+ const std::initializer_list<T> &list) {
+ return DenseElementsAttr::get(type, list)
+ .template cast<DenseIntElementsAttr>();
+ }
+
/// Generates a new DenseElementsAttr by mapping each value attribute, and
/// constructing the DenseElementsAttr given the new element type.
DenseElementsAttr
diff --git a/third_party/mlir/include/mlir/IR/CMakeLists.txt b/third_party/mlir/include/mlir/IR/CMakeLists.txt
new file mode 100644
index 0000000..555b16f
--- /dev/null
+++ b/third_party/mlir/include/mlir/IR/CMakeLists.txt
@@ -0,0 +1,4 @@
+set(LLVM_TARGET_DEFINITIONS OpAsmInterface.td)
+mlir_tablegen(OpAsmInterface.h.inc -gen-op-interface-decls)
+mlir_tablegen(OpAsmInterface.cpp.inc -gen-op-interface-defs)
+add_public_tablegen_target(MLIROpAsmInterfacesIncGen)
diff --git a/third_party/mlir/include/mlir/IR/OpAsmInterface.td b/third_party/mlir/include/mlir/IR/OpAsmInterface.td
new file mode 100644
index 0000000..974360e
--- /dev/null
+++ b/third_party/mlir/include/mlir/IR/OpAsmInterface.td
@@ -0,0 +1,65 @@
+//===- OpAsmInterface.td - Asm Interfaces for opse ---------*- tablegen -*-===//
+//
+// Copyright 2019 The MLIR Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// =============================================================================
+//
+// This file contains Interfaces for interacting with the AsmParser and
+// AsmPrinter.
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef MLIR_OPASMINTERFACE
+#define MLIR_OPASMINTERFACE
+
+#ifndef OP_BASE
+include "mlir/IR/OpBase.td"
+#endif // OP_BASE
+
+/// Interface for hooking into the OpAsmPrinter and OpAsmParser.
+def OpAsmOpInterface : OpInterface<"OpAsmOpInterface"> {
+ let description = [{
+ This interface provides hooks to interact with the AsmPrinter and AsmParser
+ classes.
+ }];
+
+ let methods = [
+ InterfaceMethod<[{
+ Get a special name to use when printing the results of this operation.
+ The given callback is invoked with a specific result value that starts a
+ result "pack", and the name to give this result pack. To signal that a
+ result pack should use the default naming scheme, a None can be passed
+ in instead of the name.
+
+ For example, if you have an operation that has four results and you want
+ to split these into three distinct groups you could do the following:
+
+ ```c++
+ setNameFn(getResult(0), "first_result");
+ setNameFn(getResult(1), "middle_results");
+ setNameFn(getResult(3), ""); // use the default numbering.
+ ```
+
+ This would print the operation as follows:
+
+ ```mlir
+ %first_result, %middle_results:2, %0 = "my.op" ...
+ ```
+ }],
+ "void", "getAsmResultNames", (ins "OpAsmSetValueNameFn":$setNameFn)
+ >,
+ ];
+}
+
+#endif // MLIR_OPASMINTERFACE
diff --git a/third_party/mlir/include/mlir/IR/OpImplementation.h b/third_party/mlir/include/mlir/IR/OpImplementation.h
index 4a970c0..666a90e 100644
--- a/third_party/mlir/include/mlir/IR/OpImplementation.h
+++ b/third_party/mlir/include/mlir/IR/OpImplementation.h
@@ -600,6 +600,10 @@
// Dialect OpAsm interface.
//===--------------------------------------------------------------------===//
+/// A functor used to set the name of the start of a result group of an
+/// operation. See 'getAsmResultNames' below for more details.
+using OpAsmSetValueNameFn = function_ref<void(Value *, StringRef)>;
+
class OpAsmDialectInterface
: public DialectInterface::Base<OpAsmDialectInterface> {
public:
@@ -621,11 +625,19 @@
virtual void
getTypeAliases(SmallVectorImpl<std::pair<Type, StringRef>> &aliases) const {}
- /// Get a special name to use when printing the given operation. The desired
- /// name should be streamed into 'os'.
- virtual void getOpResultName(Operation *op, raw_ostream &os) const {}
+ /// Get a special name to use when printing the given operation. See
+ /// OpAsmInterface.td#getAsmResultNames for usage details and documentation.
+ virtual void getAsmResultNames(Operation *op,
+ OpAsmSetValueNameFn setNameFn) const {}
};
+//===--------------------------------------------------------------------===//
+// Operation OpAsm interface.
+//===--------------------------------------------------------------------===//
+
+/// The OpAsmOpInterface, see OpAsmInterface.td for more details.
+#include "mlir/IR/OpAsmInterface.h.inc"
+
} // end namespace mlir
#endif
diff --git a/third_party/mlir/include/mlir/IR/Operation.h b/third_party/mlir/include/mlir/IR/Operation.h
index 92f7f9f..ab14e66 100644
--- a/third_party/mlir/include/mlir/IR/Operation.h
+++ b/third_party/mlir/include/mlir/IR/Operation.h
@@ -438,6 +438,23 @@
/// index.
unsigned getSuccessorOperandIndex(unsigned index);
+ /// Return a pair (successorIndex, successorArgIndex) containing the index
+ /// of the successor that `operandIndex` belongs to and the index of the
+ /// argument to that successor that `operandIndex` refers to.
+ ///
+ /// If `operandIndex` is not a successor operand, None is returned.
+ Optional<std::pair<unsigned, unsigned>>
+ decomposeSuccessorOperandIndex(unsigned operandIndex);
+
+ /// Returns the `BlockArgument*` corresponding to operand `operandIndex` in
+ /// some successor, or None if `operandIndex` isn't a successor operand index.
+ Optional<BlockArgument *> getSuccessorBlockArgument(unsigned operandIndex) {
+ auto decomposed = decomposeSuccessorOperandIndex(operandIndex);
+ if (!decomposed.hasValue())
+ return None;
+ return getSuccessor(decomposed->first)->getArgument(decomposed->second);
+ }
+
//===--------------------------------------------------------------------===//
// Accessors for various properties of operations
//===--------------------------------------------------------------------===//
diff --git a/third_party/mlir/include/mlir/Support/Functional.h b/third_party/mlir/include/mlir/Support/Functional.h
index edc5e1d..eca7504 100644
--- a/third_party/mlir/include/mlir/Support/Functional.h
+++ b/third_party/mlir/include/mlir/Support/Functional.h
@@ -19,6 +19,7 @@
#define MLIR_SUPPORT_FUNCTIONAL_H_
#include "llvm/ADT/STLExtras.h"
+#include "llvm/ADT/SmallVector.h"
#include "llvm/Support/Casting.h"
/// This file provides some simple template functional-style sugar to operate
diff --git a/third_party/mlir/include/mlir/TableGen/OpInterfaces.h b/third_party/mlir/include/mlir/TableGen/OpInterfaces.h
index 46f43c6..4a87876 100644
--- a/third_party/mlir/include/mlir/TableGen/OpInterfaces.h
+++ b/third_party/mlir/include/mlir/TableGen/OpInterfaces.h
@@ -23,6 +23,7 @@
#define MLIR_TABLEGEN_OPINTERFACES_H_
#include "mlir/Support/LLVM.h"
+#include "llvm/ADT/SmallVector.h"
#include "llvm/ADT/StringRef.h"
namespace llvm {
diff --git a/third_party/mlir/include/mlir/Transforms/RegionUtils.h b/third_party/mlir/include/mlir/Transforms/RegionUtils.h
index 10e6dfb..944f601 100644
--- a/third_party/mlir/include/mlir/Transforms/RegionUtils.h
+++ b/third_party/mlir/include/mlir/Transforms/RegionUtils.h
@@ -60,6 +60,12 @@
void getUsedValuesDefinedAbove(llvm::MutableArrayRef<Region> regions,
llvm::SetVector<Value *> &values);
+/// Run a set of structural simplifications over the given regions. This
+/// includes transformations like unreachable block elimination, dead argument
+/// elimination, as well as some other DCE. This function returns success if any
+/// of the regions were simplified, failure otherwise.
+LogicalResult simplifyRegions(llvm::MutableArrayRef<Region> regions);
+
} // namespace mlir
#endif // MLIR_TRANSFORMS_REGIONUTILS_H_
diff --git a/third_party/mlir/lib/Analysis/LoopAnalysis.cpp b/third_party/mlir/lib/Analysis/LoopAnalysis.cpp
index b466764..f01e548 100644
--- a/third_party/mlir/lib/Analysis/LoopAnalysis.cpp
+++ b/third_party/mlir/lib/Analysis/LoopAnalysis.cpp
@@ -274,8 +274,7 @@
}
static bool isVectorTransferReadOrWrite(Operation &op) {
- return isa<vector::VectorTransferReadOp>(op) ||
- isa<vector::VectorTransferWriteOp>(op);
+ return isa<vector::TransferReadOp>(op) || isa<vector::TransferWriteOp>(op);
}
using VectorizableOpFun = std::function<bool(AffineForOp, Operation &)>;
diff --git a/third_party/mlir/lib/Analysis/VectorAnalysis.cpp b/third_party/mlir/lib/Analysis/VectorAnalysis.cpp
index 2dab348..666ee071 100644
--- a/third_party/mlir/lib/Analysis/VectorAnalysis.cpp
+++ b/third_party/mlir/lib/Analysis/VectorAnalysis.cpp
@@ -39,15 +39,15 @@
using llvm::SetVector;
-Optional<SmallVector<unsigned, 4>>
-mlir::shapeRatio(ArrayRef<int64_t> superShape, ArrayRef<int64_t> subShape) {
+Optional<SmallVector<int64_t, 4>> mlir::shapeRatio(ArrayRef<int64_t> superShape,
+ ArrayRef<int64_t> subShape) {
if (superShape.size() < subShape.size()) {
- return Optional<SmallVector<unsigned, 4>>();
+ return Optional<SmallVector<int64_t, 4>>();
}
// Starting from the end, compute the integer divisors.
// Set the boolean `divides` if integral division is not possible.
- std::vector<unsigned> result;
+ std::vector<int64_t> result;
result.reserve(superShape.size());
bool divides = true;
auto divide = [÷s, &result](int superSize, int subSize) {
@@ -76,11 +76,11 @@
"super to sub shape ratio is not of the same size as the super rank");
// Reverse again to get it back in the proper order and return.
- return SmallVector<unsigned, 4>{result.rbegin(), result.rend()};
+ return SmallVector<int64_t, 4>{result.rbegin(), result.rend()};
}
-Optional<SmallVector<unsigned, 4>> mlir::shapeRatio(VectorType superVectorType,
- VectorType subVectorType) {
+Optional<SmallVector<int64_t, 4>> mlir::shapeRatio(VectorType superVectorType,
+ VectorType subVectorType) {
assert(superVectorType.getElementType() == subVectorType.getElementType() &&
"vector types must be of the same elemental type");
return shapeRatio(superVectorType.getShape(), subVectorType.getShape());
@@ -194,10 +194,10 @@
bool mustDivide = false;
(void)mustDivide;
VectorType superVectorType;
- if (auto read = dyn_cast<vector::VectorTransferReadOp>(op)) {
+ if (auto read = dyn_cast<vector::TransferReadOp>(op)) {
superVectorType = read.getVectorType();
mustDivide = true;
- } else if (auto write = dyn_cast<vector::VectorTransferWriteOp>(op)) {
+ } else if (auto write = dyn_cast<vector::TransferWriteOp>(op)) {
superVectorType = write.getVectorType();
mustDivide = true;
} else if (op.getNumResults() == 0) {
diff --git a/third_party/mlir/lib/Conversion/CMakeLists.txt b/third_party/mlir/lib/Conversion/CMakeLists.txt
index fe8496d..6d370f7 100644
--- a/third_party/mlir/lib/Conversion/CMakeLists.txt
+++ b/third_party/mlir/lib/Conversion/CMakeLists.txt
@@ -3,6 +3,7 @@
add_subdirectory(GPUToNVVM)
add_subdirectory(GPUToROCDL)
add_subdirectory(GPUToSPIRV)
+add_subdirectory(LinalgToLLVM)
add_subdirectory(LoopsToGPU)
add_subdirectory(LoopToStandard)
add_subdirectory(StandardToLLVM)
diff --git a/third_party/mlir/lib/Conversion/LinalgToLLVM/CMakeLists.txt b/third_party/mlir/lib/Conversion/LinalgToLLVM/CMakeLists.txt
new file mode 100644
index 0000000..9d2b5da
--- /dev/null
+++ b/third_party/mlir/lib/Conversion/LinalgToLLVM/CMakeLists.txt
@@ -0,0 +1,15 @@
+add_llvm_library(MLIRLinalgToLLVM
+ LinalgToLLVM.cpp
+
+ ADDITIONAL_HEADER_DIRS
+ ${MLIR_MAIN_INCLUDE_DIR}/mlir/Conversion/LinalgToLLVM
+)
+set(LIBS
+ MLIRLLVMIR
+ MLIRTransforms
+ LLVMCore
+ LLVMSupport
+ )
+
+add_dependencies(MLIRLinalgToLLVM ${LIBS})
+target_link_libraries(MLIRLinalgToLLVM ${LIBS})
diff --git a/third_party/mlir/lib/Dialect/Linalg/Transforms/LowerToLLVMDialect.cpp b/third_party/mlir/lib/Conversion/LinalgToLLVM/LinalgToLLVM.cpp
similarity index 94%
rename from third_party/mlir/lib/Dialect/Linalg/Transforms/LowerToLLVMDialect.cpp
rename to third_party/mlir/lib/Conversion/LinalgToLLVM/LinalgToLLVM.cpp
index 6e97a7a..ebb0fd7 100644
--- a/third_party/mlir/lib/Dialect/Linalg/Transforms/LowerToLLVMDialect.cpp
+++ b/third_party/mlir/lib/Conversion/LinalgToLLVM/LinalgToLLVM.cpp
@@ -1,4 +1,4 @@
-//===- LowerToLLVMDialect.cpp - conversion from Linalg to LLVM dialect ----===//
+//===- LinalgToLLVM.cpp - conversion from Linalg to LLVM dialect ----------===//
//
// Copyright 2019 The MLIR Authors.
//
@@ -15,6 +15,7 @@
// limitations under the License.
// =============================================================================
+#include "mlir/Conversion/LinalgToLLVM/LinalgToLLVM.h"
#include "mlir/Conversion/AffineToStandard/AffineToStandard.h"
#include "mlir/Conversion/LoopToStandard/ConvertLoopToStandard.h"
#include "mlir/Conversion/StandardToLLVM/ConvertStandardToLLVM.h"
@@ -373,19 +374,11 @@
return fnNameAttr;
}
-namespace {
-// The conversion class from Linalg to LLVMIR.
-class LinalgTypeConverter : public LLVMTypeConverter {
- using LLVMTypeConverter::LLVMTypeConverter;
-
-public:
- Type convertType(Type t) override {
- if (auto result = LLVMTypeConverter::convertType(t))
- return result;
- return convertLinalgType(t, *this);
- }
-};
-} // end anonymous namespace
+Type LinalgTypeConverter::convertType(Type t) {
+ if (auto result = LLVMTypeConverter::convertType(t))
+ return result;
+ return convertLinalgType(t, *this);
+}
// LinalgOpConversion<LinalgOp> creates a new call to the
// `LinalgOp::getLibraryCallName()` function.
@@ -483,21 +476,20 @@
}
/// Populate the given list with patterns that convert from Linalg to LLVM.
-static void
-populateLinalgToLLVMConversionPatterns(LinalgTypeConverter &converter,
- OwningRewritePatternList &patterns,
- MLIRContext *ctx) {
+void mlir::populateLinalgToLLVMConversionPatterns(
+ LinalgTypeConverter &converter, OwningRewritePatternList &patterns,
+ MLIRContext *ctx) {
patterns.insert<RangeOpConversion, SliceOpConversion, TransposeOpConversion,
YieldOpConversion>(ctx, converter);
}
namespace {
-struct LowerLinalgToLLVMPass : public ModulePass<LowerLinalgToLLVMPass> {
+struct ConvertLinalgToLLVMPass : public ModulePass<ConvertLinalgToLLVMPass> {
void runOnModule() override;
};
} // namespace
-void LowerLinalgToLLVMPass::runOnModule() {
+void ConvertLinalgToLLVMPass::runOnModule() {
auto module = getModule();
// Convert to the LLVM IR dialect using the converter defined above.
@@ -520,10 +512,10 @@
}
std::unique_ptr<OpPassBase<ModuleOp>>
-mlir::linalg::createLowerLinalgToLLVMPass() {
- return std::make_unique<LowerLinalgToLLVMPass>();
+mlir::linalg::createConvertLinalgToLLVMPass() {
+ return std::make_unique<ConvertLinalgToLLVMPass>();
}
-static PassRegistration<LowerLinalgToLLVMPass>
+static PassRegistration<ConvertLinalgToLLVMPass>
pass("convert-linalg-to-llvm",
- "Lower the operations from the linalg dialect into the LLVM dialect");
+ "Convert the operations from the linalg dialect into the LLVM dialect");
diff --git a/third_party/mlir/lib/Conversion/StandardToLLVM/ConvertStandardToLLVM.cpp b/third_party/mlir/lib/Conversion/StandardToLLVM/ConvertStandardToLLVM.cpp
index 33f5616..ae2b783 100644
--- a/third_party/mlir/lib/Conversion/StandardToLLVM/ConvertStandardToLLVM.cpp
+++ b/third_party/mlir/lib/Conversion/StandardToLLVM/ConvertStandardToLLVM.cpp
@@ -1477,6 +1477,21 @@
auto loc = op->getLoc();
auto viewOp = cast<SubViewOp>(op);
SubViewOpOperandAdaptor adaptor(operands);
+ // TODO(b/144779634, ravishankarm) : After Tblgen is adapted to support
+ // having multiple variadic operands where each operand can have different
+ // number of entries, clean all of this up.
+ SmallVector<Value *, 2> dynamicOffsets(
+ std::next(operands.begin()),
+ std::next(operands.begin(), 1 + viewOp.getNumOffsets()));
+ SmallVector<Value *, 2> dynamicSizes(
+ std::next(operands.begin(), 1 + viewOp.getNumOffsets()),
+ std::next(operands.begin(),
+ 1 + viewOp.getNumOffsets() + viewOp.getNumSizes()));
+ SmallVector<Value *, 2> dynamicStrides(
+ std::next(operands.begin(),
+ 1 + viewOp.getNumOffsets() + viewOp.getNumSizes()),
+ operands.end());
+
auto sourceMemRefType = viewOp.source()->getType().cast<MemRefType>();
auto sourceElementTy =
lowering.convertType(sourceMemRefType.getElementType())
@@ -1492,8 +1507,8 @@
// Early exit for 0-D and operands lesser than `rank` corner cases.
unsigned rank = sourceMemRefType.getRank();
- if (viewMemRefType.getRank() == 0 || rank != adaptor.offsets().size() ||
- rank != adaptor.sizes().size() || rank != adaptor.strides().size())
+ if (viewMemRefType.getRank() == 0 || rank != dynamicOffsets.size() ||
+ rank != dynamicSizes.size() || rank != dynamicStrides.size())
return matchFailure();
int64_t offset;
@@ -1526,7 +1541,7 @@
// Offset.
Value *baseOffset = sourceMemRef.offset(rewriter, loc);
for (int i = 0, e = viewMemRefType.getRank(); i < e; ++i) {
- Value *min = adaptor.offsets()[i];
+ Value *min = dynamicOffsets[i];
baseOffset = rewriter.create<LLVM::AddOp>(
loc, baseOffset,
rewriter.create<LLVM::MulOp>(loc, min, strideValues[i]));
@@ -1535,10 +1550,10 @@
// Update sizes and strides.
for (int i = viewMemRefType.getRank() - 1; i >= 0; --i) {
- targetMemRef.setSize(rewriter, loc, i, adaptor.sizes()[i]);
+ targetMemRef.setSize(rewriter, loc, i, dynamicSizes[i]);
targetMemRef.setStride(rewriter, loc, i,
rewriter.create<LLVM::MulOp>(
- loc, adaptor.strides()[i], strideValues[i]));
+ loc, dynamicStrides[i], strideValues[i]));
}
rewriter.replaceOp(op, {targetMemRef});
diff --git a/third_party/mlir/lib/Conversion/VectorConversions/CMakeLists.txt b/third_party/mlir/lib/Conversion/VectorConversions/CMakeLists.txt
index f76b413..c8d699e 100644
--- a/third_party/mlir/lib/Conversion/VectorConversions/CMakeLists.txt
+++ b/third_party/mlir/lib/Conversion/VectorConversions/CMakeLists.txt
@@ -1,6 +1,7 @@
-add_llvm_library(MLIRVectorToLLVM
+add_llvm_library(MLIRVectorConversions
VectorToLLVM.cpp
VectorToLoops.cpp
+ VectorToVector.cpp
ADDITIONAL_HEADER_DIRS
${MLIR_MAIN_INCLUDE_DIR}/mlir/Conversion/VectorConversions
@@ -12,5 +13,6 @@
LLVMSupport
)
-add_dependencies(MLIRVectorToLLVM ${LIBS})
-target_link_libraries(MLIRVectorToLLVM ${LIBS})
+add_dependencies(MLIRVectorConversions ${LIBS})
+add_dependencies(MLIRVectorConversions MLIRVectorTransformPatternsIncGen)
+target_link_libraries(MLIRVectorConversions ${LIBS})
diff --git a/third_party/mlir/lib/Conversion/VectorConversions/VectorToLLVM.cpp b/third_party/mlir/lib/Conversion/VectorConversions/VectorToLLVM.cpp
index cd01666..5420ad0 100644
--- a/third_party/mlir/lib/Conversion/VectorConversions/VectorToLLVM.cpp
+++ b/third_party/mlir/lib/Conversion/VectorConversions/VectorToLLVM.cpp
@@ -53,15 +53,15 @@
public:
explicit VectorExtractElementOpConversion(MLIRContext *context,
LLVMTypeConverter &typeConverter)
- : LLVMOpLowering(vector::VectorExtractElementOp::getOperationName(),
- context, typeConverter) {}
+ : LLVMOpLowering(vector::ExtractElementOp::getOperationName(), context,
+ typeConverter) {}
PatternMatchResult
matchAndRewrite(Operation *op, ArrayRef<Value *> operands,
ConversionPatternRewriter &rewriter) const override {
auto loc = op->getLoc();
- auto adaptor = vector::VectorExtractElementOpOperandAdaptor(operands);
- auto extractOp = cast<vector::VectorExtractElementOp>(op);
+ auto adaptor = vector::ExtractElementOpOperandAdaptor(operands);
+ auto extractOp = cast<vector::ExtractElementOp>(op);
auto vectorType = extractOp.vector()->getType().cast<VectorType>();
auto resultType = extractOp.getResult()->getType();
auto llvmResultType = lowering.convertType(resultType);
@@ -107,21 +107,21 @@
public:
explicit VectorOuterProductOpConversion(MLIRContext *context,
LLVMTypeConverter &typeConverter)
- : LLVMOpLowering(vector::VectorOuterProductOp::getOperationName(),
- context, typeConverter) {}
+ : LLVMOpLowering(vector::OuterProductOp::getOperationName(), context,
+ typeConverter) {}
PatternMatchResult
matchAndRewrite(Operation *op, ArrayRef<Value *> operands,
ConversionPatternRewriter &rewriter) const override {
auto loc = op->getLoc();
- auto adaptor = vector::VectorOuterProductOpOperandAdaptor(operands);
+ auto adaptor = vector::OuterProductOpOperandAdaptor(operands);
auto *ctx = op->getContext();
auto vLHS = adaptor.lhs()->getType().cast<LLVM::LLVMType>();
auto vRHS = adaptor.rhs()->getType().cast<LLVM::LLVMType>();
auto rankLHS = vLHS.getUnderlyingType()->getVectorNumElements();
auto rankRHS = vRHS.getUnderlyingType()->getVectorNumElements();
auto llvmArrayOfVectType = lowering.convertType(
- cast<vector::VectorOuterProductOp>(op).getResult()->getType());
+ cast<vector::OuterProductOp>(op).getResult()->getType());
Value *desc = rewriter.create<LLVM::UndefOp>(loc, llvmArrayOfVectType);
Value *a = adaptor.lhs(), *b = adaptor.rhs();
Value *acc = adaptor.acc().empty() ? nullptr : adaptor.acc().front();
@@ -159,14 +159,14 @@
public:
explicit VectorTypeCastOpConversion(MLIRContext *context,
LLVMTypeConverter &typeConverter)
- : LLVMOpLowering(vector::VectorTypeCastOp::getOperationName(), context,
+ : LLVMOpLowering(vector::TypeCastOp::getOperationName(), context,
typeConverter) {}
PatternMatchResult
matchAndRewrite(Operation *op, ArrayRef<Value *> operands,
ConversionPatternRewriter &rewriter) const override {
auto loc = op->getLoc();
- vector::VectorTypeCastOp castOp = cast<vector::VectorTypeCastOp>(op);
+ vector::TypeCastOp castOp = cast<vector::TypeCastOp>(op);
MemRefType sourceMemRefType =
castOp.getOperand()->getType().cast<MemRefType>();
MemRefType targetMemRefType =
diff --git a/third_party/mlir/lib/Conversion/VectorConversions/VectorToLoops.cpp b/third_party/mlir/lib/Conversion/VectorConversions/VectorToLoops.cpp
index 672b55c..74479b9 100644
--- a/third_party/mlir/lib/Conversion/VectorConversions/VectorToLoops.cpp
+++ b/third_party/mlir/lib/Conversion/VectorConversions/VectorToLoops.cpp
@@ -36,12 +36,12 @@
#include "mlir/IR/Types.h"
using namespace mlir;
-using vector::VectorTransferReadOp;
-using vector::VectorTransferWriteOp;
+using vector::TransferReadOp;
+using vector::TransferWriteOp;
namespace {
-/// Implements lowering of VectorTransferReadOp and VectorTransferWriteOp to a
+/// Implements lowering of TransferReadOp and TransferWriteOp to a
/// proper abstraction for the hardware.
///
/// For now, we only emit a simple loop nest that performs clipped pointwise
@@ -89,22 +89,22 @@
/// load vectors + mask them. Similarly on the write side, load/mask/store for
/// implementing RMW behavior.
///
-/// Lowers VectorTransferOp into a combination of:
+/// Lowers TransferOp into a combination of:
/// 1. local memory allocation;
/// 2. perfect loop nest over:
/// a. scalar load/stores from local buffers (viewed as a scalar memref);
/// a. scalar store/load to original memref (with clipping).
/// 3. vector_load/store
/// 4. local memory deallocation.
-/// Minor variations occur depending on whether a VectorTransferReadOp or
-/// a VectorTransferWriteOp is rewritten.
-template <typename VectorTransferOpTy>
+/// Minor variations occur depending on whether a TransferReadOp or
+/// a TransferWriteOp is rewritten.
+template <typename TransferOpTy>
struct VectorTransferRewriter : public RewritePattern {
explicit VectorTransferRewriter(MLIRContext *context)
- : RewritePattern(VectorTransferOpTy::getOperationName(), 1, context) {}
+ : RewritePattern(TransferOpTy::getOperationName(), 1, context) {}
/// Used for staging the transfer in a local scalar buffer.
- MemRefType tmpMemRefType(VectorTransferOpTy transfer) const {
+ MemRefType tmpMemRefType(TransferOpTy transfer) const {
auto vectorType = transfer.getVectorType();
return MemRefType::get(vectorType.getShape(), vectorType.getElementType(),
{}, 0);
@@ -119,8 +119,8 @@
/// MemRef dimension. If such a dimension with coalescing properties is found,
/// `pivs` and `vectorView` are swapped so that the invocation of
/// LoopNestBuilder captures it in the innermost loop.
-template <typename VectorTransferOpTy>
-void coalesceCopy(VectorTransferOpTy transfer,
+template <typename TransferOpTy>
+void coalesceCopy(TransferOpTy transfer,
SmallVectorImpl<edsc::ValueHandle *> *pivs,
edsc::VectorView *vectorView) {
// rank of the remote memory access, coalescing behavior occurs on the
@@ -152,8 +152,8 @@
/// Emits remote memory accesses that are clipped to the boundaries of the
/// MemRef.
-template <typename VectorTransferOpTy>
-llvm::SmallVector<edsc::ValueHandle, 8> clip(VectorTransferOpTy transfer,
+template <typename TransferOpTy>
+llvm::SmallVector<edsc::ValueHandle, 8> clip(TransferOpTy transfer,
edsc::MemRefView &view,
ArrayRef<edsc::IndexHandle> ivs) {
using namespace mlir::edsc;
@@ -207,7 +207,7 @@
return clippedScalarAccessExprs;
}
-/// Lowers VectorTransferReadOp into a combination of:
+/// Lowers TransferReadOp into a combination of:
/// 1. local memory allocation;
/// 2. perfect loop nest over:
/// a. scalar load from local buffers (viewed as a scalar memref);
@@ -215,7 +215,7 @@
/// 3. vector_load from local buffer (viewed as a memref<1 x vector>);
/// 4. local memory deallocation.
///
-/// Lowers the data transfer part of a VectorTransferReadOp while ensuring no
+/// Lowers the data transfer part of a TransferReadOp while ensuring no
/// out-of-bounds accesses are possible. Out-of-bounds behavior is handled by
/// clipping. This means that a given value in memory can be read multiple
/// times and concurrently.
@@ -251,8 +251,7 @@
/// Performs the rewrite.
template <>
-PatternMatchResult
-VectorTransferRewriter<VectorTransferReadOp>::matchAndRewrite(
+PatternMatchResult VectorTransferRewriter<TransferReadOp>::matchAndRewrite(
Operation *op, PatternRewriter &rewriter) const {
using namespace mlir::edsc;
using namespace mlir::edsc::op;
@@ -260,7 +259,7 @@
using IndexedValue =
TemplatedIndexedValue<intrinsics::std_load, intrinsics::std_store>;
- VectorTransferReadOp transfer = cast<VectorTransferReadOp>(op);
+ TransferReadOp transfer = cast<TransferReadOp>(op);
// 1. Setup all the captures.
ScopedContext scope(rewriter, transfer.getLoc());
@@ -295,7 +294,7 @@
return matchSuccess();
}
-/// Lowers VectorTransferWriteOp into a combination of:
+/// Lowers TransferWriteOp into a combination of:
/// 1. local memory allocation;
/// 2. vector_store to local buffer (viewed as a memref<1 x vector>);
/// 3. perfect loop nest over:
@@ -314,8 +313,7 @@
/// TODO(ntv): implement alternatives to clipping.
/// TODO(ntv): support non-data-parallel operations.
template <>
-PatternMatchResult
-VectorTransferRewriter<VectorTransferWriteOp>::matchAndRewrite(
+PatternMatchResult VectorTransferRewriter<TransferWriteOp>::matchAndRewrite(
Operation *op, PatternRewriter &rewriter) const {
using namespace mlir::edsc;
using namespace mlir::edsc::op;
@@ -323,7 +321,7 @@
using IndexedValue =
TemplatedIndexedValue<intrinsics::std_load, intrinsics::std_store>;
- VectorTransferWriteOp transfer = cast<VectorTransferWriteOp>(op);
+ TransferWriteOp transfer = cast<TransferWriteOp>(op);
// 1. Setup all the captures.
ScopedContext scope(rewriter, transfer.getLoc());
@@ -361,7 +359,6 @@
/// Populate the given list with patterns that convert from Vector to LLVM.
void mlir::populateVectorToAffineLoopsConversionPatterns(
MLIRContext *context, OwningRewritePatternList &patterns) {
- patterns.insert<VectorTransferRewriter<vector::VectorTransferReadOp>,
- VectorTransferRewriter<vector::VectorTransferWriteOp>>(
- context);
+ patterns.insert<VectorTransferRewriter<vector::TransferReadOp>,
+ VectorTransferRewriter<vector::TransferWriteOp>>(context);
}
diff --git a/third_party/mlir/lib/Conversion/VectorConversions/VectorToVector.cpp b/third_party/mlir/lib/Conversion/VectorConversions/VectorToVector.cpp
new file mode 100644
index 0000000..967946e
--- /dev/null
+++ b/third_party/mlir/lib/Conversion/VectorConversions/VectorToVector.cpp
@@ -0,0 +1,397 @@
+//===- VectorToLoops.cpp - Conversion within the Vector dialect -----------===//
+//
+// Copyright 2019 The MLIR Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// =============================================================================
+//
+// This file implements target-independent rewrites as 1->N patterns.
+//
+//===----------------------------------------------------------------------===//
+
+#include <type_traits>
+
+#include "mlir/Analysis/VectorAnalysis.h"
+#include "mlir/Conversion/VectorConversions/VectorConversions.h"
+#include "mlir/Dialect/StandardOps/Ops.h"
+#include "mlir/Dialect/VectorOps/VectorOps.h"
+#include "mlir/EDSC/Builders.h"
+#include "mlir/EDSC/Helpers.h"
+#include "mlir/IR/AffineExpr.h"
+#include "mlir/IR/AffineMap.h"
+#include "mlir/IR/Attributes.h"
+#include "mlir/IR/Builders.h"
+#include "mlir/IR/Function.h"
+#include "mlir/IR/Location.h"
+#include "mlir/IR/Matchers.h"
+#include "mlir/IR/Module.h"
+#include "mlir/IR/OperationSupport.h"
+#include "mlir/IR/PatternMatch.h"
+#include "mlir/IR/Types.h"
+#include "mlir/Support/Functional.h"
+#include "mlir/Support/STLExtras.h"
+
+#include "llvm/Support/CommandLine.h"
+#include "llvm/Support/Debug.h"
+#include "llvm/Support/raw_ostream.h"
+
+#define DEBUG_TYPE "vector-to-vector"
+
+using namespace mlir;
+using llvm::dbgs;
+using mlir::functional::zipMap;
+
+/// Given a shape with sizes greater than 0 along all dimensions,
+/// returns the distance, in number of elements, between a slice in a dimension
+/// and the next slice in the same dimension.
+/// e.g. shape[3, 4, 5] -> linearization_basis[20, 5, 1]
+static SmallVector<int64_t, 8> computeStrides(ArrayRef<int64_t> shape) {
+ if (shape.empty())
+ return {};
+ SmallVector<int64_t, 8> tmp;
+ tmp.reserve(shape.size());
+ int64_t running = 1;
+ for (auto size : llvm::reverse(shape)) {
+ assert(size > 0 && "size must be nonnegative");
+ tmp.push_back(running);
+ running *= size;
+ }
+ return SmallVector<int64_t, 8>(tmp.rbegin(), tmp.rend());
+}
+
+static int64_t computeMaxLinearIndex(ArrayRef<int64_t> basis) {
+ if (basis.empty())
+ return 0;
+ int64_t res = 1;
+ for (auto b : basis)
+ res *= b;
+ return res;
+}
+
+/// Given a shape with sizes greater than 0 along all dimensions, returns the
+/// delinearized components of linearIndex along shape.
+static SmallVector<int64_t, 8> delinearize(int64_t linearIndex,
+ ArrayRef<int64_t> basis) {
+ SmallVector<int64_t, 8> res;
+ res.reserve(basis.size());
+ for (unsigned idx = 0, e = basis.size(); idx < e; ++idx) {
+ assert(basis[idx] > 0);
+ res.push_back(linearIndex / basis[idx]);
+ linearIndex %= basis[idx];
+ }
+ // Sanity check.
+ assert(linearIndex == 0 && "linear index remainder must be 0");
+ return res;
+}
+
+static constexpr auto kFakeForkOp = "__fake_fork__";
+static constexpr auto kFakeJoinOp = "__fake_join__";
+static constexpr auto kUnrollAttrName = "__unroll__";
+static constexpr auto kBaseCoordAttrName = "__base_coord__";
+
+// Reads the IntegerArray attribute named `kUnrollAttrName` from `op` and
+// returns its representation as a vector of integers.
+static SmallVector<int64_t, 8> extractUnrollFactors(Operation *op) {
+ SmallVector<int64_t, 8> res;
+ auto unrollAttr = op->getAttr(kUnrollAttrName);
+ if (!unrollAttr)
+ return res;
+ auto unrollArrayAttr = unrollAttr.cast<ArrayAttr>();
+ res.reserve(unrollArrayAttr.size());
+ for (auto attr : unrollArrayAttr) {
+ auto unroll = attr.cast<IntegerAttr>().getValue().getSExtValue();
+ assert(unroll > 0);
+ res.push_back(unroll);
+ }
+ return res;
+}
+
+// Creates a custom `kFakeForkOp` used in progressive lowering to other vector
+// operations.
+static Operation *createFakeForkOp(PatternRewriter &builder, Location loc,
+ Value *operand, ArrayRef<Type> resultTypes,
+ ArrayRef<int64_t> unrollFactors = {}) {
+ OperationState *forkOp =
+ new OperationState(loc, kFakeForkOp, operand, resultTypes, {});
+ if (!unrollFactors.empty())
+ forkOp->addAttribute(kUnrollAttrName,
+ builder.getI64ArrayAttr(unrollFactors));
+ return builder.createOperation(*forkOp);
+}
+
+// Creates a custom `kFakeJoinOp` used in progressive lowering to other vector
+// operations.
+static Operation *createFakeJoinOp(PatternRewriter &builder, Location loc,
+ ArrayRef<Value *> operands, Type resultType,
+ ArrayRef<int64_t> unrollFactors = {},
+ ArrayRef<int64_t> baseCoords = {}) {
+ OperationState *joinOp =
+ new OperationState(loc, kFakeJoinOp, operands, resultType, {});
+ if (!unrollFactors.empty())
+ joinOp->addAttribute(kUnrollAttrName,
+ builder.getI64ArrayAttr(unrollFactors));
+ if (!baseCoords.empty())
+ joinOp->addAttribute(kBaseCoordAttrName,
+ builder.getI64ArrayAttr(baseCoords));
+ return builder.createOperation(*joinOp);
+}
+
+// Clones `op` into a new operations that takes `operands` and returns
+// `resultTypes`.
+static Operation *cloneOpWithOperandsAndTypes(PatternRewriter &builder,
+ Location loc, Operation *op,
+ ArrayRef<Value *> operands,
+ ArrayRef<Type> resultTypes) {
+ OperationState *res = new OperationState(loc, op->getName().getStringRef(),
+ operands, resultTypes, {});
+ return builder.createOperation(*res);
+}
+
+// Helper function for Tablegen.
+static bool hasShape(Value *v, ArrayRef<int64_t> shape) {
+ auto t = v->getType().dyn_cast<ShapedType>();
+ if (!t)
+ return false;
+ return std::equal(t.getShape().begin(), t.getShape().end(), shape.begin());
+}
+
+// Entry point for unrolling declarative pattern rewrites.
+// `op` is unrolled to the `targetShape` as follows, for each of its operands:
+// 1. the unrolled type `unrolledVectorType` and number of unrolled instances
+// `numUnrolledInstances` are computed from the `targetShape`. For now it is
+// assumed the unrolling factors divide the vector sizes.
+// 2. a fakeFork cast op is inserted that takes the operand and returns
+// `numUnrolledInstances` results of type `unrolledVectorType`.
+// 3. the original op is cloned `numUnrolledInstances` times, once for each
+// result of the fakeFork cast op.
+// 4. a fakeJoin cast op takes all these results and merges them into a single
+// aggregate vector result whose size matches the original non-unrolled op
+// operand types.
+//
+// Example:
+//
+// opA(operand0, operand1) // numUnrolledInstances = 3
+//
+// operand0 operand1
+// | |
+// fork fork
+// <----------gather all fork ops --------->
+// /|\ /|\
+// f00 f01 f02 f10 f11 f12
+// <---------- clone op 3 times --------->
+// opA0(f00, f10), opA1(f01, f11), opA2(f02, f12)
+// \ | /
+// <-------------------- join ------------------------->
+//
+// Other local patterns then kick in iteratively (including DCE) and compose
+// until all the fakeFork and fakeJoin ops are removed.
+//
+// This will be extended in the future to support more advanced use cases than
+// simple pointwise ops.
+static Value *unrollSingleResultOpMatchingType(PatternRewriter &builder,
+ Operation *op,
+ ArrayRef<int64_t> targetShape) {
+ LLVM_DEBUG(dbgs() << "\n[" DEBUG_TYPE
+ "]: unrollSingleResultOpMatchingType on func:\n");
+ LLVM_DEBUG(op->getParentOfType<FuncOp>().print(dbgs()));
+ if (!op->getNumResults())
+ assert(false && "Use precondition till RewriterGen can act on nullptr");
+
+ auto shapedType = op->getResult(0)->getType().dyn_cast_or_null<ShapedType>();
+ if (!shapedType || !shapedType.hasStaticShape())
+ assert(false && "Use precondition till RewriterGen can act on nullptr");
+
+ auto shape = shapedType.getShape();
+ auto maybeUnrollFactors = shapeRatio(shape, targetShape);
+ if (!maybeUnrollFactors.hasValue())
+ assert(false && "Use precondition till RewriterGen can act on nullptr");
+ auto unrollFactors = *maybeUnrollFactors;
+
+ auto loc = op->getLoc();
+ auto numUnrolledInstances = computeMaxLinearIndex(unrollFactors);
+ auto unrolledVectorType =
+ VectorType::get(targetShape, shapedType.getElementType());
+ SmallVector<Type, 4> forkedType(numUnrolledInstances, unrolledVectorType);
+ SmallVector<Operation *, 4> forkeds;
+ forkeds.reserve(numUnrolledInstances);
+ // Create a new forkOp for each operand.
+ for (auto *operand : op->getOperands())
+ forkeds.push_back(
+ createFakeForkOp(builder, loc, operand, forkedType, unrollFactors));
+
+ SmallVector<Operation *, 4> newOps;
+ newOps.reserve(numUnrolledInstances);
+ for (int64_t idx = 0; idx < numUnrolledInstances; ++idx) {
+ SmallVector<Value *, 4> operands;
+ operands.reserve(forkeds.size());
+ for (auto *fork : forkeds) {
+ operands.push_back(fork->getResult(idx));
+ }
+ newOps.push_back(cloneOpWithOperandsAndTypes(builder, loc, op, operands,
+ unrolledVectorType));
+ }
+
+ SmallVector<Value *, 4> newOpResults;
+ newOpResults.reserve(newOps.size());
+ for (auto *newOp : newOps)
+ newOpResults.push_back(newOp->getResult(0));
+
+ return createFakeJoinOp(builder, loc, newOpResults, shapedType, unrollFactors,
+ {0})
+ ->getResult(0);
+}
+
+// Patterns with this benefit just forwards arguments to clean up fake fork and
+// fake joins. It is a nicer and more direct cleanup when we can use it so it
+// kicks in with higher precedence.
+static constexpr int64_t kMatchingFakeForkFakeJoinBenefit = 2;
+// Patterns with this benefit extract subvectors with ExtractElementOp and join
+// them to allow creating subvectors.
+static constexpr int64_t kFakeForkFromBlockArgBenefit = 1;
+
+namespace mlir {
+namespace vector {
+namespace {
+#include "mlir/Dialect/VectorOps/VectorTransformPatterns.h.inc"
+} // end namespace
+} // end namespace vector
+} // end namespace mlir
+
+// Match a fakeFork fed by a fakeJoin and just forward its operands.
+// This is akin to calling `replaceAllUsesOf` but made to play nice with all the
+// other RewritePattern.
+struct ConvertMatchingFakeForkFakeJoinOp : public RewritePattern {
+ ConvertMatchingFakeForkFakeJoinOp(MLIRContext *context)
+ // low-benefit to kick-in late
+ : RewritePattern(kFakeForkOp, kMatchingFakeForkFakeJoinBenefit, context) {
+ }
+
+ PatternMatchResult matchAndRewrite(Operation *op,
+ PatternRewriter &rewriter) const override {
+ if (op->getNumOperands() != 1)
+ return matchFailure();
+
+ auto *definingOp = op->getOperand(0)->getDefiningOp();
+ if (!definingOp || definingOp->getName().getStringRef() != kFakeJoinOp)
+ return matchFailure();
+
+ if (definingOp->getNumOperands() != op->getNumResults())
+ return matchFailure();
+
+ for (auto it : llvm::zip(definingOp->getOperands(), op->getResults())) {
+ if (std::get<0>(it)->getType() != std::get<1>(it)->getType())
+ return matchFailure();
+ }
+
+ LLVM_DEBUG(dbgs() << "\n[" DEBUG_TYPE
+ "]: ConvertMatchingFakeForkFakeJoinOp on op: "
+ << *op << " in func:\n");
+ LLVM_DEBUG(op->getParentOfType<FuncOp>().print(dbgs()));
+ SmallVector<Value *, 4> forwardedOperands;
+ forwardedOperands.append(definingOp->getOperands().begin(),
+ definingOp->getOperands().end());
+ rewriter.replaceOp(op, forwardedOperands);
+ return matchSuccess();
+ }
+};
+
+// Rewrites a fakeFork, whose (unique) operand is a blockArgument, into multiple
+// vector.strided_slice ops.
+struct ConvertFakeForkFromBlockArgsOp : public RewritePattern {
+ ConvertFakeForkFromBlockArgsOp(MLIRContext *context)
+ // low-benefit to kick-in late
+ : RewritePattern(kFakeForkOp, kFakeForkFromBlockArgBenefit, context) {}
+
+ PatternMatchResult matchAndRewrite(Operation *op,
+ PatternRewriter &rewriter) const override {
+ if (op->getNumOperands() != 1)
+ return matchFailure();
+
+ auto *blockArg = op->getOperand(0);
+ if (!isa<BlockArgument>(blockArg))
+ return matchFailure();
+
+ LLVM_DEBUG(dbgs() << "\n[" DEBUG_TYPE
+ "]: ConvertFakeForkFromBlockArgsOp on op: "
+ << *op << " in func:\n");
+ LLVM_DEBUG(op->getParentOfType<FuncOp>().print(dbgs()));
+
+ // Look at the unroll factors remaining on this op and act on the first one.
+ auto unrollFactorsStorage = extractUnrollFactors(op);
+ ArrayRef<int64_t> unrollFactors{unrollFactorsStorage};
+ if (unrollFactors.empty()) {
+ // No more unrollFactors, just sanity check + forward the unique operand.
+ assert(op->getNumResults() == 1);
+ assert(op->getOperand(0)->getType() == op->getResult(0)->getType());
+ rewriter.replaceOp(op, op->getOperand(0));
+ return matchSuccess();
+ }
+
+ // Strides are always 1 for now.
+ // TODO(b/144845578) support non-1 strides.
+ auto forkedVectorType = op->getOperand(0)->getType().cast<VectorType>();
+ SmallVector<int64_t, 4> strides(unrollFactors.size(), 1);
+ auto nUnrolled = computeMaxLinearIndex(unrollFactors);
+ SmallVector<Value *, 4> extractedVectors;
+ extractedVectors.reserve(op->getNumResults());
+ auto linearizationBasis = computeStrides(unrollFactors);
+ for (unsigned idx = 0; idx < nUnrolled; ++idx) {
+ auto offsets = delinearize(idx, linearizationBasis);
+ offsets = zipMap([](int64_t v1, int64_t v2) { return v1 * v2; }, offsets,
+ unrollFactors);
+ auto leadingSize =
+ forkedVectorType.getShape().take_front(unrollFactors.size());
+ auto sizes = zipMap([](int64_t v1, int64_t v2) { return v1 / v2; },
+ leadingSize, unrollFactors);
+ extractedVectors.push_back(
+ rewriter
+ .create<vector::StridedSliceOp>(op->getLoc(), blockArg, offsets,
+ sizes, strides)
+ .getResult());
+ }
+ rewriter.replaceOp(op, extractedVectors);
+ return matchSuccess();
+ }
+};
+
+// Simple DCE for fakeForkOps/fakeJoinOps, we do not want them to escape a
+// transformation (otherwise the transformation is considered incorrect).
+struct FakeForkTrait {
+ static constexpr char const *name = kFakeForkOp;
+};
+struct FakeJoinTrait {
+ static constexpr char const *name = kFakeJoinOp;
+};
+
+template <typename OpNameTrait> struct DCEPattern : public RewritePattern {
+ DCEPattern(MLIRContext *context)
+ // low-benefit to kick-in late
+ : RewritePattern(OpNameTrait::name, 0, context) {}
+
+ PatternMatchResult matchAndRewrite(Operation *op,
+ PatternRewriter &rewriter) const override {
+ if (!op->use_empty())
+ return matchFailure();
+ rewriter.eraseOp(op);
+ return matchSuccess();
+ }
+};
+
+void mlir::populateVectorToVectorConversionPatterns(
+ MLIRContext *context, OwningRewritePatternList &patterns,
+ ArrayRef<int64_t> coarseVectorShape, ArrayRef<int64_t> fineVectorShape) {
+ vector::populateWithGenerated(context, &patterns);
+ patterns
+ .insert<ConvertMatchingFakeForkFakeJoinOp, ConvertFakeForkFromBlockArgsOp,
+ DCEPattern<FakeForkTrait>, DCEPattern<FakeJoinTrait>>(context);
+}
diff --git a/third_party/mlir/lib/Dialect/Linalg/CMakeLists.txt b/third_party/mlir/lib/Dialect/Linalg/CMakeLists.txt
index 884e4d2..4b7cd81 100644
--- a/third_party/mlir/lib/Dialect/Linalg/CMakeLists.txt
+++ b/third_party/mlir/lib/Dialect/Linalg/CMakeLists.txt
@@ -5,7 +5,6 @@
IR/LinalgTypes.cpp
Transforms/Fusion.cpp
Transforms/LinalgTransforms.cpp
- Transforms/LowerToLLVMDialect.cpp
Transforms/LowerToLoops.cpp
Transforms/Promotion.cpp
Transforms/Tiling.cpp
diff --git a/third_party/mlir/lib/Dialect/Linalg/Transforms/LinalgTransforms.cpp b/third_party/mlir/lib/Dialect/Linalg/Transforms/LinalgTransforms.cpp
index 0b6aae6..b027d55 100644
--- a/third_party/mlir/lib/Dialect/Linalg/Transforms/LinalgTransforms.cpp
+++ b/third_party/mlir/lib/Dialect/Linalg/Transforms/LinalgTransforms.cpp
@@ -19,9 +19,9 @@
//
//===----------------------------------------------------------------------===//
+#include "mlir/Dialect/Linalg/Transforms/LinalgTransforms.h"
#include "mlir/Dialect/Linalg/Analysis/DependenceAnalysis.h"
#include "mlir/Dialect/Linalg/IR/LinalgOps.h"
-#include "mlir/Dialect/Linalg/Passes.h"
#include "mlir/Dialect/Linalg/Utils/Utils.h"
#include "mlir/IR/PatternMatch.h"
#include "mlir/Pass/Pass.h"
@@ -30,54 +30,60 @@
using namespace mlir::linalg;
// Marker used as attribute name in generated Linalg rewriting transformations.
-static constexpr auto kLinalgTransformMarker = "__internal_linalg_transform__";
+constexpr StringRef mlir::linalg::LinalgTransforms::kLinalgTransformMarker;
-static LogicalResult tileLinalgOpAndSetMarker(PatternRewriter &rewriter,
- Operation *op,
- ArrayRef<int64_t> sizes,
- StringRef linalgMarker) {
- auto tileRes = tileLinalgOperation(rewriter, op, sizes);
- if (!tileRes)
- return failure();
- tileRes->op.setAttr(kLinalgTransformMarker,
- rewriter.getStringAttr(linalgMarker));
- return success();
-}
-
-static LogicalResult tileAndFuseLinalgOpAndSetMarker(PatternRewriter &rewriter,
+LogicalResult mlir::linalg::tileLinalgOpAndSetMarker(PatternRewriter &rewriter,
Operation *op,
ArrayRef<int64_t> sizes,
StringRef linalgMarker) {
auto tileRes = tileLinalgOperation(rewriter, op, sizes);
if (!tileRes)
return failure();
- tileRes->op.setAttr(kLinalgTransformMarker,
+ tileRes->op.setAttr(LinalgTransforms::kLinalgTransformMarker,
+ rewriter.getStringAttr(linalgMarker));
+ return success();
+}
+
+LogicalResult mlir::linalg::tileAndFuseLinalgOpAndSetMarker(
+ PatternRewriter &rewriter, Operation *op, ArrayRef<int64_t> sizes,
+ ArrayRef<int64_t> operandIndicesToFuse, StringRef linalgMarker) {
+ auto tileRes = tileLinalgOperation(rewriter, op, sizes);
+ if (!tileRes)
+ return failure();
+ tileRes->op.setAttr(LinalgTransforms::kLinalgTransformMarker,
rewriter.getStringAttr(linalgMarker));
Aliases aliases;
auto G = LinalgDependenceGraph::buildDependenceGraph(
aliases, op->getParentOfType<FuncOp>());
- auto fusionRes = fuseProducerOf(rewriter, tileRes->op, 0, G);
- if (!fusionRes) {
- // Linalg fusion requires tiled loops to even determine whether it is
- // possible to fuse. As a consequence, the pattern may fail even though a
- // tiled version of op has already been introduced.
- // So we need to remove the tiled version ourselves in case of failure.
- // Another possibility is to ensure the constraints on the pattern guarantee
- // that fusion will occur and just assert here.
- // As we develop more complex patterns we can choose what is best.
- rewriter.eraseOp(tileRes->loops[0]);
- return failure();
+ SmallVector<Operation *, 4> originalProducers;
+ for (auto operandIdx : operandIndicesToFuse) {
+ auto fusionRes = fuseProducerOf(rewriter, tileRes->op, operandIdx, G);
+ if (!fusionRes) {
+ // Linalg fusion requires tiled loops to even determine whether it is
+ // possible to fuse. As a consequence, the pattern may fail even though a
+ // tiled version of op has already been introduced.
+ // So we need to remove the tiled version ourselves in case of failure.
+ // Another possibility is to ensure the constraints on the pattern
+ // guarantee that fusion will occur and just assert here. As we develop
+ // more complex patterns we can choose what is best.
+ rewriter.eraseOp(tileRes->loops[0]);
+ return failure();
+ }
+ fusionRes->fusedProducer.setAttr(LinalgTransforms::kLinalgTransformMarker,
+ rewriter.getStringAttr(linalgMarker));
+ originalProducers.push_back(fusionRes->originalProducer);
}
- fusionRes->fusedProducer.setAttr(kLinalgTransformMarker,
- rewriter.getStringAttr(linalgMarker));
- // The originalProducer can now be safely erased. This is similar to SSA-value
- // use-def but in the world of buffer + structured ops.
- rewriter.eraseOp(fusionRes->originalProducer);
+
+ // The originalProducers can now be safely erased. This is similar to
+ // SSA-value use-def but in the world of buffer + structured ops.
+ for (auto *originalProducer : originalProducers)
+ rewriter.eraseOp(originalProducer);
return success();
}
-template <typename OpTy>
-bool isProducedByOpOfType(Operation *consumerOp, Value *consumedView) {
+bool mlir::linalg::detail::isProducedByOpOfTypeImpl(
+ Operation *consumerOp, Value *consumedView,
+ llvm::function_ref<bool(Operation *)> isaOpType) {
LinalgOp consumer = dyn_cast<LinalgOp>(consumerOp);
if (!consumer)
return false;
@@ -94,39 +100,8 @@
auto producer = cast<LinalgOp>(dependence.dependentOpView.op);
if (!isProducerLastWriteOfView(G, consumer, consumedView, producer))
continue;
- if (isa<OpTy>(dependence.dependentOpView.op))
+ if (isaOpType(dependence.dependentOpView.op))
return true;
}
return false;
}
-
-namespace mlir {
-namespace linalg {
-namespace {
-#include "mlir/Dialect/Linalg/Transforms/LinalgTransformPatterns.h.inc"
-} // end namespace
-} // end namespace linalg
-} // end namespace mlir
-
-namespace {
-struct LinalgTransforms : public FunctionPass<LinalgTransforms> {
- void runOnFunction() override;
-};
-} // end anonymous namespace
-
-/// Apply transformations specified as patterns.
-void LinalgTransforms::runOnFunction() {
- OwningRewritePatternList patterns;
- auto funcOp = getFunction();
-
- // Add the generated patterns to the list.
- linalg::populateWithGenerated(&getContext(), &patterns);
- applyPatternsGreedily(funcOp, patterns);
-
- // Drop the marker.
- funcOp.walk([](LinalgOp op) { op.removeAttr(kLinalgTransformMarker); });
-}
-
-static PassRegistration<LinalgTransforms>
- pass("test-linalg-transform-patterns",
- "Test Linalg transformation patterns by applying them greedily.");
diff --git a/third_party/mlir/lib/Dialect/Linalg/Transforms/Promotion.cpp b/third_party/mlir/lib/Dialect/Linalg/Transforms/Promotion.cpp
index 3c6e139..32b7034 100644
--- a/third_party/mlir/lib/Dialect/Linalg/Transforms/Promotion.cpp
+++ b/third_party/mlir/lib/Dialect/Linalg/Transforms/Promotion.cpp
@@ -55,11 +55,6 @@
llvm::cl::desc("Test generation of dynamic promoted buffers"),
llvm::cl::cat(clOptionsCategory), llvm::cl::init(false));
-static AffineMap getAffineDifferenceMap(MLIRContext *context) {
- AffineExpr d0(getAffineDimExpr(0, context)), d1(getAffineDimExpr(1, context));
- return AffineMap::get(2, 0, {d0 - d1});
-}
-
static Value *allocBuffer(Type elementType, Value *size, bool dynamicBuffers) {
auto *ctx = size->getContext();
auto width = llvm::divideCeil(elementType.getIntOrFloatBitWidth(), 8);
diff --git a/third_party/mlir/lib/Dialect/StandardOps/Ops.cpp b/third_party/mlir/lib/Dialect/StandardOps/Ops.cpp
index 83c0867..c01bccc 100644
--- a/third_party/mlir/lib/Dialect/StandardOps/Ops.cpp
+++ b/third_party/mlir/lib/Dialect/StandardOps/Ops.cpp
@@ -44,37 +44,6 @@
// StandardOpsDialect Interfaces
//===----------------------------------------------------------------------===//
namespace {
-struct StdOpAsmInterface : public OpAsmDialectInterface {
- using OpAsmDialectInterface::OpAsmDialectInterface;
-
- /// Get a special name to use when printing the given operation. The desired
- /// name should be streamed into 'os'.
- void getOpResultName(Operation *op, raw_ostream &os) const final {
- if (ConstantOp constant = dyn_cast<ConstantOp>(op))
- return getConstantOpResultName(constant, os);
- }
-
- /// Get a special name to use when printing the given constant.
- static void getConstantOpResultName(ConstantOp op, raw_ostream &os) {
- Type type = op.getType();
- Attribute value = op.getValue();
- if (auto intCst = value.dyn_cast<IntegerAttr>()) {
- if (type.isIndex()) {
- os << 'c' << intCst.getInt();
- } else if (type.cast<IntegerType>().isInteger(1)) {
- // i1 constants get special names.
- os << (intCst.getInt() ? "true" : "false");
- } else {
- os << 'c' << intCst.getInt() << '_' << type;
- }
- } else if (type.isa<FunctionType>()) {
- os << 'f';
- } else {
- os << "cst";
- }
- }
-};
-
/// This class defines the interface for handling inlining with standard
/// operations.
struct StdInlinerInterface : public DialectInlinerInterface {
@@ -191,7 +160,7 @@
#define GET_OP_LIST
#include "mlir/Dialect/StandardOps/Ops.cpp.inc"
>();
- addInterfaces<StdInlinerInterface, StdOpAsmInterface>();
+ addInterfaces<StdInlinerInterface>();
}
void mlir::printDimAndSymbolList(Operation::operand_iterator begin,
@@ -1183,6 +1152,31 @@
return getValue();
}
+void ConstantOp::getAsmResultNames(
+ function_ref<void(Value *, StringRef)> setNameFn) {
+ Type type = getType();
+ if (auto intCst = getValue().dyn_cast<IntegerAttr>()) {
+ IntegerType intTy = type.dyn_cast<IntegerType>();
+
+ // Sugar i1 constants with 'true' and 'false'.
+ if (intTy && intTy.getWidth() == 1)
+ return setNameFn(getResult(), (intCst.getInt() ? "true" : "false"));
+
+ // Otherwise, build a complex name with the value and type.
+ SmallString<32> specialNameBuffer;
+ llvm::raw_svector_ostream specialName(specialNameBuffer);
+ specialName << 'c' << intCst.getInt();
+ if (intTy)
+ specialName << '_' << type;
+ setNameFn(getResult(), specialName.str());
+
+ } else if (type.isa<FunctionType>()) {
+ setNameFn(getResult(), "f");
+ } else {
+ setNameFn(getResult(), "cst");
+ }
+}
+
/// Returns true if a constant operation can be built with the given value and
/// result type.
bool ConstantOp::isBuildableWith(Attribute value, Type type) {
@@ -2537,16 +2531,37 @@
memRefType.getMemorySpace());
}
+void mlir::SubViewOp::build(Builder *b, OperationState &result, Type resultType,
+ Value *source, unsigned num_offsets,
+ unsigned num_sizes, unsigned num_strides,
+ ArrayRef<Value *> offsets, ArrayRef<Value *> sizes,
+ ArrayRef<Value *> strides) {
+ SmallVector<Value *, 8> operands;
+ operands.reserve(num_offsets + num_sizes + num_strides);
+ operands.append(offsets.begin(), offsets.end());
+ operands.append(sizes.begin(), sizes.end());
+ operands.append(strides.begin(), strides.end());
+ build(b, result, resultType, source, b->getI32IntegerAttr(num_offsets),
+ b->getI32IntegerAttr(num_sizes), b->getI32IntegerAttr(num_strides),
+ operands);
+}
+
void mlir::SubViewOp::build(Builder *b, OperationState &result, Value *source,
ArrayRef<Value *> offsets, ArrayRef<Value *> sizes,
ArrayRef<Value *> strides, Type resultType,
ArrayRef<NamedAttribute> attrs) {
if (!resultType)
resultType = inferSubViewResultType(source->getType().cast<MemRefType>());
- build(b, result, resultType, source, offsets, sizes, strides);
+ build(b, result, resultType, source, offsets.size(), sizes.size(),
+ strides.size(), offsets, sizes, strides);
result.addAttributes(attrs);
}
+void mlir::SubViewOp::build(Builder *b, OperationState &result, Type resultType,
+ Value *source) {
+ build(b, result, resultType, source, 0, 0, 0, {}, {}, {});
+}
+
static ParseResult parseSubViewOp(OpAsmParser &parser, OperationState &result) {
OpAsmParser::OperandType srcInfo;
SmallVector<OpAsmParser::OperandType, 4> offsetsInfo;
@@ -2554,11 +2569,20 @@
SmallVector<OpAsmParser::OperandType, 4> stridesInfo;
auto indexType = parser.getBuilder().getIndexType();
Type srcType, dstType;
- return failure(
- parser.parseOperand(srcInfo) ||
+ if (parser.parseOperand(srcInfo) ||
parser.parseOperandList(offsetsInfo, OpAsmParser::Delimiter::Square) ||
parser.parseOperandList(sizesInfo, OpAsmParser::Delimiter::Square) ||
- parser.parseOperandList(stridesInfo, OpAsmParser::Delimiter::Square) ||
+ parser.parseOperandList(stridesInfo, OpAsmParser::Delimiter::Square)) {
+ return failure();
+ }
+ auto builder = parser.getBuilder();
+ result.addAttribute("num_offsets",
+ builder.getI32IntegerAttr(offsetsInfo.size()));
+ result.addAttribute("num_sizes", builder.getI32IntegerAttr(sizesInfo.size()));
+ result.addAttribute("num_strides",
+ builder.getI32IntegerAttr(stridesInfo.size()));
+
+ return failure(
parser.parseOptionalAttrDict(result.attributes) ||
parser.parseColonType(srcType) ||
parser.resolveOperand(srcInfo, srcType, result.operands) ||
@@ -2577,13 +2601,21 @@
p << "][";
p.printOperands(op.getDynamicStrides());
p << ']';
- p.printOptionalAttrDict(op.getAttrs());
+ SmallVector<StringRef, 3> elidedAttrs = {"num_offsets", "num_sizes",
+ "num_strides"};
+ p.printOptionalAttrDict(op.getAttrs(), elidedAttrs);
p << " : " << op.getOperand(0)->getType() << " to " << op.getType();
}
static LogicalResult verify(SubViewOp op) {
- auto baseType = op.getOperand(0)->getType().cast<MemRefType>();
- auto subViewType = op.getResult()->getType().cast<MemRefType>();
+ auto baseType = op.getBaseMemRefType().cast<MemRefType>();
+ auto subViewType = op.getType();
+
+ // The rank of the base and result subview must match.
+ if (baseType.getRank() != subViewType.getRank()) {
+ return op.emitError(
+ "expected rank of result type to match rank of base type ");
+ }
// The base memref and the view memref should be in the same memory space.
if (baseType.getMemorySpace() != subViewType.getMemorySpace())
@@ -2603,23 +2635,83 @@
if (failed(getStridesAndOffset(subViewType, subViewStrides, subViewOffset)))
return op.emitError("result type ") << subViewType << " is not strided";
- unsigned memrefOperandCount = 1;
- unsigned numDynamicOffsets = llvm::size(op.getDynamicOffsets());
- unsigned numDynamicSizes = llvm::size(op.getDynamicSizes());
- unsigned numDynamicStrides = llvm::size(op.getDynamicStrides());
-
- // Verify that we have the correct number of operands for the result type.
- if (op.getNumOperands() != memrefOperandCount + numDynamicOffsets +
- numDynamicSizes + numDynamicStrides)
- return op.emitError("incorrect number of operands for type ")
+ // Num offsets should either be zero or rank of memref.
+ if (op.getNumOffsets() != 0 && op.getNumOffsets() != subViewType.getRank()) {
+ return op.emitError("expected number of dynamic offsets specified to match "
+ "the rank of the result type ")
<< subViewType;
+ }
- // Verify that the subview layout map has a dynamic offset.
- if (op.getNumOperands() > 1 &&
- subViewOffset != MemRefType::getDynamicStrideOrOffset())
- return op.emitError("subview memref layout map must specify a dynamic "
- "offset for type ")
+ // Num sizes should either be zero or rank of memref.
+ if (op.getNumSizes() != 0 && op.getNumSizes() != subViewType.getRank()) {
+ return op.emitError("expected number of dynamic sizes specified to match "
+ "the rank of the result type ")
<< subViewType;
+ }
+
+ // Num strides should either be zero or rank of memref.
+ if (op.getNumStrides() != 0 && op.getNumStrides() != subViewType.getRank()) {
+ return op.emitError("expected number of dynamic strides specified to match "
+ "the rank of the result type ")
+ << subViewType;
+ }
+
+ // Verify that if the shape of the subview type is static, then sizes are not
+ // dynamic values, and viceversa.
+ if ((subViewType.hasStaticShape() && op.getNumSizes() != 0) ||
+ (op.getNumSizes() == 0 && !subViewType.hasStaticShape())) {
+ return op.emitError("invalid to specify dynamic sizes when subview result "
+ "type is statically shaped and viceversa");
+ }
+ if (op.getNumSizes() > 0) {
+ // Verify that non if the shape values of the result type are static.
+ if (llvm::any_of(subViewType.getShape(), [](int64_t dim) {
+ return dim != ShapedType::kDynamicSize;
+ })) {
+ // TODO: This is based on the assumption that number of size arguments are
+ // either 0, or the rank of the result type. It is possible to have more
+ // fine-grained verification where only particular dimensions are
+ // dynamic. That probably needs further changes to the shape op
+ // specification.
+ return op.emitError("expected shape of result type to be fully dynamic "
+ "when sizes are specified");
+ }
+ }
+
+ // Verify that if dynamic offsets are specified or base memref has dynamic
+ // offset or base memref has dynamic strides, then the subview offset is
+ // dynamic.
+ if ((op.getNumOffsets() > 0 ||
+ baseOffset == MemRefType::getDynamicStrideOrOffset() ||
+ llvm::is_contained(baseStrides,
+ MemRefType::getDynamicStrideOrOffset())) &&
+ subViewOffset != MemRefType::getDynamicStrideOrOffset()) {
+ return op.emitError(
+ "expected result memref layout map to have dynamic offset");
+ }
+
+ // For now, verify that if dynamic strides are specified, then all the result
+ // memref type have dynamic strides.
+ if (op.getNumStrides() > 0) {
+ if (llvm::any_of(subViewStrides, [](int64_t stride) {
+ return stride != MemRefType::getDynamicStrideOrOffset();
+ })) {
+ return op.emitError("expected result type to have dynamic strides");
+ }
+ }
+
+ // If any of the base memref has dynamic stride, then the corresponding
+ // stride of the subview must also have dynamic stride.
+ assert(baseStrides.size() == subViewStrides.size());
+ for (auto stride : enumerate(baseStrides)) {
+ if (stride.value() == MemRefType::getDynamicStrideOrOffset() &&
+ subViewStrides[stride.index()] !=
+ MemRefType::getDynamicStrideOrOffset()) {
+ return op.emitError(
+ "expected result type to have dynamic stride along a dimension if "
+ "the base memref type has dynamic stride along that dimension");
+ }
+ }
// Verify dynamic strides symbols were added to correct dimensions based
// on dynamic sizes.
@@ -2773,30 +2865,27 @@
};
} // end anonymous namespace
-
SubViewOp::operand_range SubViewOp::getDynamicOffsets() {
- if (hasConstantOffsetSizesAndStrides(getBaseMemRefType()) &&
- hasConstantOffsetSizesAndStrides(getType()))
- return {operand_end(), operand_end()};
- return {operand_begin() + 1, operand_begin() + 1 + getType().getRank()};
+ auto numOffsets = getNumOffsets();
+ assert(getNumOperands() >= numOffsets + 1);
+ return {operand_begin() + 1, operand_begin() + 1 + numOffsets};
}
SubViewOp::operand_range SubViewOp::getDynamicSizes() {
- if (hasConstantOffsetSizesAndStrides(getBaseMemRefType()) &&
- hasConstantOffsetSizesAndStrides(getType()))
- return {operand_end(), operand_end()};
- unsigned sizesOperandsStart = 1 + getType().getRank();
- return {operand_begin() + sizesOperandsStart,
- operand_begin() + sizesOperandsStart + getType().getRank()};
+ auto numSizes = getNumSizes();
+ auto numOffsets = getNumOffsets();
+ assert(getNumOperands() >= numSizes + numOffsets + 1);
+ return {operand_begin() + 1 + numOffsets,
+ operand_begin() + 1 + numOffsets + numSizes};
}
SubViewOp::operand_range SubViewOp::getDynamicStrides() {
- if (hasConstantOffsetSizesAndStrides(getBaseMemRefType()) &&
- hasConstantOffsetSizesAndStrides(getType()))
- return {operand_end(), operand_end()};
- unsigned stridesOperandsStart = 1 + 2 * getType().getRank();
- return {operand_begin() + stridesOperandsStart,
- operand_begin() + stridesOperandsStart + getType().getRank()};
+ auto numSizes = getNumSizes();
+ auto numOffsets = getNumOffsets();
+ auto numStrides = getNumStrides();
+ assert(getNumOperands() >= numSizes + numOffsets + numStrides + 1);
+ return {operand_begin() + (1 + numOffsets + numSizes),
+ operand_begin() + (1 + numOffsets + numSizes + numStrides)};
}
void SubViewOp::getCanonicalizationPatterns(OwningRewritePatternList &results,
diff --git a/third_party/mlir/lib/Dialect/VectorOps/VectorOps.cpp b/third_party/mlir/lib/Dialect/VectorOps/VectorOps.cpp
index ed0ed43..684616f 100644
--- a/third_party/mlir/lib/Dialect/VectorOps/VectorOps.cpp
+++ b/third_party/mlir/lib/Dialect/VectorOps/VectorOps.cpp
@@ -44,7 +44,185 @@
}
//===----------------------------------------------------------------------===//
-// VectorExtractElementOp
+// ContractionOp
+//===----------------------------------------------------------------------===//
+
+static ParseResult parseContractionOp(OpAsmParser &parser,
+ OperationState &result) {
+ OpAsmParser::OperandType lhsInfo;
+ OpAsmParser::OperandType rhsInfo;
+ OpAsmParser::OperandType accInfo;
+ SmallVector<OpAsmParser::OperandType, 2> masksInfo;
+ SmallVector<Type, 2> types;
+ Type resultVectorType;
+ auto loc = parser.getCurrentLocation();
+ if (parser.parseOperand(lhsInfo) || parser.parseComma() ||
+ parser.parseOperand(rhsInfo) || parser.parseComma() ||
+ parser.parseOperand(accInfo) ||
+ parser.parseTrailingOperandList(masksInfo) ||
+ parser.parseOptionalAttrDict(result.attributes) ||
+ parser.parseColonTypeList(types) ||
+ parser.parseKeywordType("into", resultVectorType) ||
+ parser.resolveOperand(lhsInfo, types[0], result.operands) ||
+ parser.resolveOperand(rhsInfo, types[1], result.operands) ||
+ parser.resolveOperand(accInfo, resultVectorType, result.operands) ||
+ parser.addTypeToList(resultVectorType, result.types))
+ return failure();
+
+ if (masksInfo.empty())
+ return success();
+ if (masksInfo.size() != 2)
+ return parser.emitError(parser.getNameLoc(),
+ "expected zero or exactly 2 vector mask operands");
+ auto indexType = parser.getBuilder().getIndexType();
+ auto lhsType = types[0].cast<VectorType>();
+ auto rhsType = types[1].cast<VectorType>();
+ SmallVector<Type, 2> maskTypes;
+ SmallVector<Type, 4> lhsMaskElementTypes(lhsType.getRank(), indexType);
+ maskTypes.push_back(
+ TupleType::get(lhsMaskElementTypes, parser.getBuilder().getContext()));
+ SmallVector<Type, 4> rhsMaskElementTypes(rhsType.getRank(), indexType);
+ maskTypes.push_back(
+ TupleType::get(rhsMaskElementTypes, parser.getBuilder().getContext()));
+ if (parser.resolveOperands(masksInfo, maskTypes, loc, result.operands))
+ return failure();
+ return success();
+}
+
+static void print(OpAsmPrinter &p, ContractionOp op) {
+ p << op.getOperationName() << " " << *op.lhs() << ", " << *op.rhs();
+ p << ", " << *op.acc();
+ if (llvm::size(op.masks()) == 2) {
+ p << ", " << **op.masks().begin();
+ p << ", " << **(op.masks().begin() + 1);
+ }
+ p.printOptionalAttrDict(op.getAttrs());
+ p << " : " << op.lhs()->getType() << ", " << op.rhs()->getType() << " into "
+ << op.getResultType();
+}
+
+static bool verifyDimMap(VectorType lhsType, VectorType rhsType,
+ const std::vector<std::pair<int64_t, int64_t>> &map) {
+ for (auto &dimPair : map) {
+ if (dimPair.first < 0 || dimPair.first >= lhsType.getRank() ||
+ dimPair.second < 0 || dimPair.second >= rhsType.getRank() ||
+ lhsType.getDimSize(dimPair.first) != rhsType.getDimSize(dimPair.second))
+ return false;
+ }
+ return true;
+}
+
+static bool verifyOutputShape(
+ VectorType lhsType, VectorType rhsType, VectorType accType,
+ VectorType resType,
+ const std::vector<std::pair<int64_t, int64_t>> &contractingDimMap,
+ const std::vector<std::pair<int64_t, int64_t>> &batchDimMap) {
+ DenseSet<int64_t> lhsContractingDimSet;
+ DenseSet<int64_t> rhsContractingDimSet;
+ for (auto &dimPair : contractingDimMap) {
+ lhsContractingDimSet.insert(dimPair.first);
+ rhsContractingDimSet.insert(dimPair.second);
+ }
+ DenseSet<int64_t> rhsBatchDimSet;
+ for (auto &dimPair : batchDimMap)
+ rhsBatchDimSet.insert(dimPair.second);
+
+ // Add free and batch dimensions from 'lhsType' to 'expectedResultDims'.
+ SmallVector<int64_t, 4> expectedResultDims;
+ for (int64_t i = 0, e = lhsType.getRank(); i < e; ++i) {
+ if (lhsContractingDimSet.count(i) > 0)
+ continue;
+ expectedResultDims.push_back(lhsType.getDimSize(i));
+ }
+
+ // Add free dimensions from 'rhsType' to 'expectedResultDims'.
+ for (int64_t i = 0, e = rhsType.getRank(); i < e; ++i) {
+ if (rhsContractingDimSet.count(i) > 0 || rhsBatchDimSet.count(i) > 0)
+ continue;
+ expectedResultDims.push_back(rhsType.getDimSize(i));
+ }
+
+ // Verify dimension from 'resType' against 'expectedResultDims'.
+ if (resType.getShape().size() != expectedResultDims.size() ||
+ accType.getShape().size() != expectedResultDims.size())
+ return false;
+ for (int64_t i = 0, e = resType.getRank(); i < e; ++i) {
+ if (resType.getDimSize(i) != expectedResultDims[i] ||
+ accType.getDimSize(i) != expectedResultDims[i])
+ return false;
+ }
+ return true;
+}
+
+static LogicalResult verify(ContractionOp op) {
+ auto lhsType = op.getLhsType();
+ auto rhsType = op.getRhsType();
+ auto accType = op.getAccType();
+ auto resType = op.getResultType();
+ auto contractingDimMap = op.getContractingDimMap();
+ auto batchDimMap = op.getBatchDimMap();
+
+ // Verify at least one contracting dimension pair was specified.
+ if (contractingDimMap.empty())
+ return op.emitOpError("expected at least one contracting dimension pair");
+
+ // Verify contracting dimension map was properly constructed.
+ if (!verifyDimMap(lhsType, rhsType, contractingDimMap))
+ return op.emitOpError("invalid contracting dimension map");
+
+ // Verify batch dimension map was properly constructed.
+ if (!verifyDimMap(lhsType, rhsType, batchDimMap))
+ return op.emitOpError("invalid batch dimension map");
+
+ // Verify 'accType' and 'resType' shape.
+ if (!verifyOutputShape(lhsType, rhsType, accType, resType, contractingDimMap,
+ batchDimMap))
+ return op.emitOpError("invalid accumulator/result vector shape");
+
+ // Verify that either two vector masks are set or none are set.
+ auto lhsMaskType = op.getLHSVectorMaskType();
+ auto rhsMaskType = op.getRHSVectorMaskType();
+ if ((lhsMaskType && !rhsMaskType) || (!lhsMaskType && rhsMaskType))
+ return op.emitOpError("invalid number of vector masks specified");
+ if (lhsMaskType && rhsMaskType) {
+ // Verify tuple element size is != rank.
+ if (lhsMaskType.getTypes().size() != lhsType.getShape().size() ||
+ rhsMaskType.getTypes().size() != rhsType.getShape().size())
+ return op.emitOpError("invalid number of vector mask elements");
+ // Verify all tuple elements are index type.
+ for (auto eltType : lhsMaskType.getTypes()) {
+ if (!eltType.isa<IndexType>())
+ return op.emitOpError("vector mask element must have index type");
+ }
+ }
+ return success();
+}
+
+static std::vector<std::pair<int64_t, int64_t>> getDimMap(Attribute attr) {
+ std::vector<std::pair<int64_t, int64_t>> dimMap;
+ auto dimPairs = attr.dyn_cast_or_null<ArrayAttr>();
+ if (!dimPairs)
+ return dimMap;
+ for (auto dimPairAttr : dimPairs) {
+ auto dimPair = dimPairAttr.cast<ArrayAttr>();
+ assert(dimPair.size() == 2);
+ auto lhsDim = dimPair.begin()->cast<IntegerAttr>().getInt();
+ auto rhsDim = std::prev(dimPair.end())->cast<IntegerAttr>().getInt();
+ dimMap.push_back({lhsDim, rhsDim});
+ }
+ return dimMap;
+}
+
+std::vector<std::pair<int64_t, int64_t>> ContractionOp::getContractingDimMap() {
+ return getDimMap(getAttr(getContractingDimMapAttrName()));
+}
+
+std::vector<std::pair<int64_t, int64_t>> ContractionOp::getBatchDimMap() {
+ return getDimMap(getAttr(getBatchDimMapAttrName()));
+}
+
+//===----------------------------------------------------------------------===//
+// ExtractElementOp
//===----------------------------------------------------------------------===//
static Type inferExtractOpResultType(VectorType vectorType,
@@ -55,8 +233,8 @@
vectorType.getElementType());
}
-void VectorExtractElementOp::build(Builder *builder, OperationState &result,
- Value *source, ArrayRef<int32_t> position) {
+void ExtractElementOp::build(Builder *builder, OperationState &result,
+ Value *source, ArrayRef<int32_t> position) {
result.addOperands(source);
auto positionAttr = builder->getI32ArrayAttr(position);
result.addTypes(inferExtractOpResultType(source->getType().cast<VectorType>(),
@@ -64,14 +242,14 @@
result.addAttribute(getPositionAttrName(), positionAttr);
}
-static void print(OpAsmPrinter &p, VectorExtractElementOp op) {
+static void print(OpAsmPrinter &p, ExtractElementOp op) {
p << op.getOperationName() << " " << *op.vector() << op.position();
p.printOptionalAttrDict(op.getAttrs(), {"position"});
p << " : " << op.vector()->getType();
}
-static ParseResult parseVectorExtractElementOp(OpAsmParser &parser,
- OperationState &result) {
+static ParseResult parseExtractElementOp(OpAsmParser &parser,
+ OperationState &result) {
llvm::SMLoc attributeLoc, typeLoc;
SmallVector<NamedAttribute, 4> attrs;
OpAsmParser::OperandType vector;
@@ -100,7 +278,7 @@
parser.addTypeToList(resType, result.types));
}
-static LogicalResult verify(VectorExtractElementOp op) {
+static LogicalResult verify(ExtractElementOp op) {
auto positionAttr = op.position().getValue();
if (positionAttr.empty())
return op.emitOpError("expected non-empty position attribute");
@@ -120,13 +298,12 @@
}
//===----------------------------------------------------------------------===//
-// VectorStridedSliceOp
+// StridedSliceOp
//===----------------------------------------------------------------------===//
-static Type inferVectorExtractRangeOpResultType(VectorType vectorType,
- ArrayAttr offsets,
- ArrayAttr sizes,
- ArrayAttr strides) {
+static Type inferExtractRangeOpResultType(VectorType vectorType,
+ ArrayAttr offsets, ArrayAttr sizes,
+ ArrayAttr strides) {
assert(offsets.size() == sizes.size() && offsets.size() == strides.size());
SmallVector<int64_t, 4> shape;
shape.reserve(vectorType.getRank());
@@ -139,30 +316,29 @@
return VectorType::get(shape, vectorType.getElementType());
}
-void VectorStridedSliceOp::build(Builder *builder, OperationState &result,
- Value *source, ArrayRef<int64_t> offsets,
- ArrayRef<int64_t> sizes,
- ArrayRef<int64_t> strides) {
+void StridedSliceOp::build(Builder *builder, OperationState &result,
+ Value *source, ArrayRef<int64_t> offsets,
+ ArrayRef<int64_t> sizes, ArrayRef<int64_t> strides) {
result.addOperands(source);
auto offsetsAttr = builder->getI64ArrayAttr(offsets);
auto sizesAttr = builder->getI64ArrayAttr(sizes);
auto stridesAttr = builder->getI64ArrayAttr(strides);
result.addTypes(
- inferVectorExtractRangeOpResultType(source->getType().cast<VectorType>(),
- offsetsAttr, sizesAttr, stridesAttr));
+ inferExtractRangeOpResultType(source->getType().cast<VectorType>(),
+ offsetsAttr, sizesAttr, stridesAttr));
result.addAttribute(getOffsetsAttrName(), offsetsAttr);
result.addAttribute(getSizesAttrName(), sizesAttr);
result.addAttribute(getStridesAttrName(), stridesAttr);
}
-static void print(OpAsmPrinter &p, VectorStridedSliceOp op) {
+static void print(OpAsmPrinter &p, StridedSliceOp op) {
p << op.getOperationName() << " " << *op.vector();
p.printOptionalAttrDict(op.getAttrs());
p << " : " << op.vector()->getType() << " to " << op.getResult()->getType();
}
-static ParseResult parseVectorStridedSliceOp(OpAsmParser &parser,
- OperationState &result) {
+static ParseResult parseStridedSliceOp(OpAsmParser &parser,
+ OperationState &result) {
llvm::SMLoc attributeLoc, typeLoc;
OpAsmParser::OperandType vector;
VectorType vectorType, resultVectorType;
@@ -177,7 +353,7 @@
}
// TODO(ntv) Should be moved to Tablegen Confined attributes.
-static bool isIntegerArrayAttrSmallerThanShape(VectorStridedSliceOp op,
+static bool isIntegerArrayAttrSmallerThanShape(StridedSliceOp op,
ArrayAttr arrayAttr,
ShapedType shape,
StringRef attrName) {
@@ -192,7 +368,7 @@
// Returns true if all integers in `arrayAttr` are in the half-open [min, max}
// interval. If `halfOpen` is true then the admissible interval is [min, max).
// Otherwise, the admissible interval is [min, max].
-static bool isIntegerArrayAttrConfinedToRange(VectorStridedSliceOp op,
+static bool isIntegerArrayAttrConfinedToRange(StridedSliceOp op,
ArrayAttr arrayAttr, int64_t min,
int64_t max, StringRef attrName,
bool halfOpen = true) {
@@ -214,7 +390,7 @@
// interval. If `halfOpen` is true then the admissible interval is [min, max).
// Otherwise, the admissible interval is [min, max].
static bool
-isIntegerArrayAttrConfinedToShape(VectorStridedSliceOp op, ArrayAttr arrayAttr,
+isIntegerArrayAttrConfinedToShape(StridedSliceOp op, ArrayAttr arrayAttr,
ShapedType shape, StringRef attrName,
bool halfOpen = true, int64_t min = 0) {
assert(arrayAttr.size() <= static_cast<unsigned>(shape.getRank()));
@@ -235,10 +411,11 @@
// Returns true if all integers in `arrayAttr` are in the interval [min, max}.
// interval. If `halfOpen` is true then the admissible interval is [min, max).
// Otherwise, the admissible interval is [min, max].
-static bool isSumOfIntegerArrayAttrConfinedToShape(
- VectorStridedSliceOp op, ArrayAttr arrayAttr1, ArrayAttr arrayAttr2,
- ShapedType shape, StringRef attrName1, StringRef attrName2,
- bool halfOpen = true, int64_t min = 1) {
+static bool
+isSumOfIntegerArrayAttrConfinedToShape(StridedSliceOp op, ArrayAttr arrayAttr1,
+ ArrayAttr arrayAttr2, ShapedType shape,
+ StringRef attrName1, StringRef attrName2,
+ bool halfOpen = true, int64_t min = 1) {
assert(arrayAttr1.size() <= static_cast<unsigned>(shape.getRank()));
assert(arrayAttr2.size() <= static_cast<unsigned>(shape.getRank()));
for (auto it : llvm::zip(arrayAttr1, arrayAttr2, shape.getShape())) {
@@ -257,7 +434,7 @@
return true;
}
-static LogicalResult verify(VectorStridedSliceOp op) {
+static LogicalResult verify(StridedSliceOp op) {
auto type = op.getVectorType();
auto offsets = op.offsets();
auto sizes = op.sizes();
@@ -268,9 +445,9 @@
return failure();
}
- auto offName = VectorStridedSliceOp::getOffsetsAttrName();
- auto sizesName = VectorStridedSliceOp::getSizesAttrName();
- auto stridesName = VectorStridedSliceOp::getStridesAttrName();
+ auto offName = StridedSliceOp::getOffsetsAttrName();
+ auto sizesName = StridedSliceOp::getSizesAttrName();
+ auto stridesName = StridedSliceOp::getStridesAttrName();
if (!isIntegerArrayAttrSmallerThanShape(op, offsets, type, offName) ||
!isIntegerArrayAttrSmallerThanShape(op, sizes, type, sizesName) ||
!isIntegerArrayAttrSmallerThanShape(op, strides, type, stridesName) ||
@@ -283,7 +460,7 @@
sizesName, /*halfOpen=*/false))
return failure();
- auto resultType = inferVectorExtractRangeOpResultType(
+ auto resultType = inferExtractRangeOpResultType(
op.getVectorType(), op.offsets(), op.sizes(), op.strides());
if (op.getResult()->getType() != resultType) {
op.emitOpError("expected result type to be ") << resultType;
@@ -294,18 +471,18 @@
}
//===----------------------------------------------------------------------===//
-// VectorOuterProductOp
+// OuterProductOp
//===----------------------------------------------------------------------===//
-static void print(OpAsmPrinter &p, VectorOuterProductOp op) {
+static void print(OpAsmPrinter &p, OuterProductOp op) {
p << op.getOperationName() << " " << *op.lhs() << ", " << *op.rhs();
if (llvm::size(op.acc()) > 0)
p << ", " << **op.acc().begin();
p << " : " << op.lhs()->getType() << ", " << op.rhs()->getType();
}
-static ParseResult parseVectorOuterProductOp(OpAsmParser &parser,
- OperationState &result) {
+static ParseResult parseOuterProductOp(OpAsmParser &parser,
+ OperationState &result) {
SmallVector<OpAsmParser::OperandType, 3> operandsInfo;
Type tLHS, tRHS;
if (parser.parseOperandList(operandsInfo) || parser.parseColonType(tLHS) ||
@@ -328,7 +505,7 @@
parser.addTypeToList(resType, result.types));
}
-static LogicalResult verify(VectorOuterProductOp op) {
+static LogicalResult verify(OuterProductOp op) {
VectorType vLHS = op.getOperandVectorTypeLHS(),
vRHS = op.getOperandVectorTypeRHS(),
vACC = op.getOperandVectorTypeACC(), vRES = op.getVectorType();
@@ -348,7 +525,7 @@
}
//===----------------------------------------------------------------------===//
-// VectorTransferReadOp
+// TransferReadOp
//===----------------------------------------------------------------------===//
template <typename EmitFun>
static LogicalResult verifyPermutationMap(AffineMap permutationMap,
@@ -379,7 +556,7 @@
return success();
}
-static void print(OpAsmPrinter &p, VectorTransferReadOp op) {
+static void print(OpAsmPrinter &p, TransferReadOp op) {
p << op.getOperationName() << " ";
p.printOperand(op.memref());
p << "[";
@@ -392,8 +569,7 @@
p << ", " << op.getVectorType();
}
-ParseResult parseVectorTransferReadOp(OpAsmParser &parser,
- OperationState &result) {
+ParseResult parseTransferReadOp(OpAsmParser &parser, OperationState &result) {
llvm::SMLoc typesLoc;
OpAsmParser::OperandType memrefInfo;
SmallVector<OpAsmParser::OperandType, 8> indexInfo;
@@ -421,7 +597,7 @@
parser.addTypeToList(vectorType, result.types));
}
-static LogicalResult verify(VectorTransferReadOp op) {
+static LogicalResult verify(TransferReadOp op) {
// Consistency of elemental types in memref and vector.
MemRefType memrefType = op.getMemRefType();
VectorType vectorType = op.getVectorType();
@@ -450,9 +626,9 @@
}
//===----------------------------------------------------------------------===//
-// VectorTransferWriteOp
+// TransferWriteOp
//===----------------------------------------------------------------------===//
-static void print(OpAsmPrinter &p, VectorTransferWriteOp op) {
+static void print(OpAsmPrinter &p, TransferWriteOp op) {
p << op.getOperationName() << " " << *op.vector() << ", " << *op.memref();
p << "[";
p.printOperands(op.indices());
@@ -464,8 +640,7 @@
p.printType(op.getMemRefType());
}
-ParseResult parseVectorTransferWriteOp(OpAsmParser &parser,
- OperationState &result) {
+ParseResult parseTransferWriteOp(OpAsmParser &parser, OperationState &result) {
llvm::SMLoc typesLoc;
OpAsmParser::OperandType storeValueInfo;
OpAsmParser::OperandType memRefInfo;
@@ -487,7 +662,7 @@
parser.resolveOperands(indexInfo, indexType, result.operands));
}
-static LogicalResult verify(VectorTransferWriteOp op) {
+static LogicalResult verify(TransferWriteOp op) {
// Consistency of elemental types in memref and vector.
MemRefType memrefType = op.getMemRefType();
VectorType vectorType = op.getVectorType();
@@ -514,36 +689,67 @@
}
//===----------------------------------------------------------------------===//
-// VectorTypeCastOp
+// TypeCastOp
//===----------------------------------------------------------------------===//
static MemRefType inferVectorTypeCastResultType(MemRefType t) {
return MemRefType::get({}, VectorType::get(t.getShape(), t.getElementType()));
}
-void VectorTypeCastOp::build(Builder *builder, OperationState &result,
- Value *source) {
+void TypeCastOp::build(Builder *builder, OperationState &result,
+ Value *source) {
result.addOperands(source);
result.addTypes(
inferVectorTypeCastResultType(source->getType().cast<MemRefType>()));
}
-static void print(OpAsmPrinter &p, VectorTypeCastOp &op) {
+static void print(OpAsmPrinter &p, TypeCastOp &op) {
auto type = op.getOperand()->getType().cast<MemRefType>();
p << op.getOperationName() << ' ' << *op.memref() << " : " << type << " to "
<< inferVectorTypeCastResultType(type);
}
-static LogicalResult verify(VectorTypeCastOp &op) {
+static LogicalResult verify(TypeCastOp &op) {
auto resultType = inferVectorTypeCastResultType(op.getMemRefType());
if (op.getResultMemRefType() != resultType)
return op.emitOpError("expects result type to be: ") << resultType;
return success();
}
+//===----------------------------------------------------------------------===//
+// IndexTupleOp
+//===----------------------------------------------------------------------===//
+
+ParseResult parseIndexTupleOp(OpAsmParser &parser, OperationState &result) {
+ auto indexType = parser.getBuilder().getIndexType();
+ Type resultType;
+ SmallVector<OpAsmParser::OperandType, 4> operandInfo;
+ return failure(
+ parser.parseOperandList(operandInfo) ||
+ parser.parseOptionalAttrDict(result.attributes) ||
+ parser.parseColonType(resultType) ||
+ parser.resolveOperands(operandInfo, indexType, result.operands) ||
+ parser.addTypeToList(resultType, result.types));
+}
+
+static void print(OpAsmPrinter &p, IndexTupleOp &op) {
+ p << op.getOperationName() << ' ';
+ p.printOperands(op.operands());
+ p << " : " << op.getResult()->getType();
+}
+
+static LogicalResult verify(IndexTupleOp &op) {
+ for (auto operand : op.getOperands())
+ if (!operand->getType().isa<IndexType>())
+ return op.emitOpError("all operands must be of index type");
+ return success();
+}
+
namespace mlir {
+namespace vector {
#define GET_OP_CLASSES
#include "mlir/Dialect/VectorOps/VectorOps.cpp.inc"
+} // namespace vector
} // namespace mlir
diff --git a/third_party/mlir/lib/IR/AsmPrinter.cpp b/third_party/mlir/lib/IR/AsmPrinter.cpp
index 8ffe9c5..655a776 100644
--- a/third_party/mlir/lib/IR/AsmPrinter.cpp
+++ b/third_party/mlir/lib/IR/AsmPrinter.cpp
@@ -58,6 +58,13 @@
OpAsmPrinter::~OpAsmPrinter() {}
+//===--------------------------------------------------------------------===//
+// Operation OpAsm interface.
+//===--------------------------------------------------------------------===//
+
+/// The OpAsmOpInterface, see OpAsmInterface.td for more details.
+#include "mlir/IR/OpAsmInterface.cpp.inc"
+
//===----------------------------------------------------------------------===//
// OpPrintingFlags
//===----------------------------------------------------------------------===//
@@ -1490,17 +1497,26 @@
const static unsigned indentWidth = 2;
protected:
- void numberValueID(Value *value);
void numberValuesInRegion(Region ®ion);
void numberValuesInBlock(Block &block);
+ void numberValuesInOp(Operation &op);
void printValueID(Value *value, bool printResultNo = true) const {
printValueIDImpl(value, printResultNo, os);
}
private:
+ /// Given a result of an operation 'result', find the result group head
+ /// 'lookupValue' and the result of 'result' within that group in
+ /// 'lookupResultNo'. 'lookupResultNo' is only filled in if the result group
+ /// has more than 1 result.
+ void getResultIDAndNumber(OpResult *result, Value *&lookupValue,
+ int &lookupResultNo) const;
void printValueIDImpl(Value *value, bool printResultNo,
raw_ostream &stream) const;
+ /// Set a special value name for the given value.
+ void setValueName(Value *value, StringRef name);
+
/// Uniques the given value name within the printer. If the given name
/// conflicts, it is automatically renamed.
StringRef uniqueValueName(StringRef name);
@@ -1510,6 +1526,11 @@
DenseMap<Value *, unsigned> valueIDs;
DenseMap<Value *, StringRef> valueNames;
+ /// This is a map of operations that contain multiple named result groups,
+ /// i.e. there may be multiple names for the results of the operation. The key
+ /// of this map are the result numbers that start a result group.
+ DenseMap<Operation *, SmallVector<int, 1>> opResultGroups;
+
/// This is the block ID for each block in the current.
DenseMap<Block *, unsigned> blockIDs;
@@ -1534,8 +1555,7 @@
OperationPrinter::OperationPrinter(Operation *op, ModulePrinter &other)
: ModulePrinter(other) {
llvm::ScopedHashTable<StringRef, char>::ScopeTy usedNamesScope(usedNames);
- if (op->getNumResults() != 0)
- numberValueID(op->getResult(0));
+ numberValuesInOp(*op);
for (auto ®ion : op->getRegions())
numberValuesInRegion(region);
@@ -1546,7 +1566,6 @@
numberValuesInRegion(*region);
}
-/// Number all of the SSA values in the specified region.
void OperationPrinter::numberValuesInRegion(Region ®ion) {
// Save the current value ids to allow for numbering values in sibling regions
// the same.
@@ -1580,59 +1599,72 @@
nextConflictID = curConflictID;
}
-/// Number all of the SSA values in the specified block, without traversing
-/// nested regions.
void OperationPrinter::numberValuesInBlock(Block &block) {
- // Number the block arguments.
- for (auto *arg : block.getArguments())
- numberValueID(arg);
+ bool isEntryBlock = block.isEntryBlock();
- // We number operation that have results, and we only number the first result.
+ // Number the block arguments. We give entry block arguments a special name
+ // 'arg'.
+ SmallString<32> specialNameBuffer(isEntryBlock ? "arg" : "");
+ llvm::raw_svector_ostream specialName(specialNameBuffer);
+ for (auto *arg : block.getArguments()) {
+ if (isEntryBlock) {
+ specialNameBuffer.resize(strlen("arg"));
+ specialName << nextArgumentID++;
+ }
+ setValueName(arg, specialName.str());
+ }
+
+ // Number the operations in this block.
for (auto &op : block)
- if (op.getNumResults() != 0)
- numberValueID(op.getResult(0));
+ numberValuesInOp(op);
}
-void OperationPrinter::numberValueID(Value *value) {
- assert(!valueIDs.count(value) && "Value numbered multiple times");
+void OperationPrinter::numberValuesInOp(Operation &op) {
+ unsigned numResults = op.getNumResults();
+ if (numResults == 0)
+ return;
+ Value *resultBegin = op.getResult(0);
- SmallString<32> specialNameBuffer;
- llvm::raw_svector_ostream specialName(specialNameBuffer);
+ // Function used to set the special result names for the operation.
+ SmallVector<int, 2> resultGroups(/*Size=*/1, /*Value=*/0);
+ auto setResultNameFn = [&](Value *result, StringRef name) {
+ assert(!valueIDs.count(result) && "result numbered multiple times");
+ assert(result->getDefiningOp() == &op && "result not defined by 'op'");
+ setValueName(result, name);
- // Check to see if this value requested a special name.
- auto *op = value->getDefiningOp();
- if (state && op) {
- if (auto *interface = state->getOpAsmInterface(op->getDialect()))
- interface->getOpResultName(op, specialName);
+ // Record the result number for groups not anchored at 0.
+ if (int resultNo = cast<OpResult>(result)->getResultNumber())
+ resultGroups.push_back(resultNo);
+ };
+
+ if (OpAsmOpInterface asmInterface = dyn_cast<OpAsmOpInterface>(&op)) {
+ asmInterface.getAsmResultNames(setResultNameFn);
+ } else if (auto *dialectAsmInterface =
+ state ? state->getOpAsmInterface(op.getDialect()) : nullptr) {
+ dialectAsmInterface->getAsmResultNames(&op, setResultNameFn);
}
- if (specialNameBuffer.empty()) {
- auto *blockArg = dyn_cast<BlockArgument>(value);
- if (!blockArg) {
- // This is an uninteresting operation result, give it a boring number and
- // be done with it.
- valueIDs[value] = nextValueID++;
- return;
- }
+ // If the first result wasn't numbered, give it a default number.
+ if (valueIDs.try_emplace(resultBegin, nextValueID).second)
+ ++nextValueID;
- // Otherwise, if this is an argument to the entry block of a region, give it
- // an 'arg' name.
- if (auto *block = blockArg->getOwner()) {
- auto *parentRegion = block->getParent();
- if (parentRegion && block == &parentRegion->front())
- specialName << "arg" << nextArgumentID++;
- }
+ // If this operation has multiple result groups, mark it.
+ if (resultGroups.size() != 1) {
+ llvm::array_pod_sort(resultGroups.begin(), resultGroups.end());
+ opResultGroups.try_emplace(&op, std::move(resultGroups));
+ }
+}
- // Otherwise number it normally.
- if (specialNameBuffer.empty()) {
- valueIDs[value] = nextValueID++;
- return;
- }
+/// Set a special value name for the given value.
+void OperationPrinter::setValueName(Value *value, StringRef name) {
+ // If the name is empty, the value uses the default numbering.
+ if (name.empty()) {
+ valueIDs[value] = nextValueID++;
+ return;
}
- // Ok, this value had an interesting name. Remember it with a sentinel.
valueIDs[value] = nameSentinel;
- valueNames[value] = uniqueValueName(specialName.str());
+ valueNames[value] = uniqueValueName(name);
}
/// Uniques the given value name within the printer. If the given name
@@ -1722,6 +1754,45 @@
printTrailingLocation(op->getLoc());
}
+void OperationPrinter::getResultIDAndNumber(OpResult *result,
+ Value *&lookupValue,
+ int &lookupResultNo) const {
+ Operation *owner = result->getOwner();
+ if (owner->getNumResults() == 1)
+ return;
+ int resultNo = result->getResultNumber();
+
+ // If this operation has multiple result groups, we will need to find the
+ // one corresponding to this result.
+ auto resultGroupIt = opResultGroups.find(owner);
+ if (resultGroupIt == opResultGroups.end()) {
+ // If not, just use the first result.
+ lookupResultNo = resultNo;
+ lookupValue = owner->getResult(0);
+ return;
+ }
+
+ // Find the correct index using a binary search, as the groups are ordered.
+ ArrayRef<int> resultGroups = resultGroupIt->second;
+ auto it = llvm::upper_bound(resultGroups, resultNo);
+ int groupResultNo = 0, groupSize = 0;
+
+ // If there are no smaller elements, the last result group is the lookup.
+ if (it == resultGroups.end()) {
+ groupResultNo = resultGroups.back();
+ groupSize = static_cast<int>(owner->getNumResults()) - resultGroups.back();
+ } else {
+ // Otherwise, the previous element is the lookup.
+ groupResultNo = *std::prev(it);
+ groupSize = *it - groupResultNo;
+ }
+
+ // We only record the result number for a group of size greater than 1.
+ if (groupSize != 1)
+ lookupResultNo = resultNo - groupResultNo;
+ lookupValue = owner->getResult(groupResultNo);
+}
+
void OperationPrinter::printValueIDImpl(Value *value, bool printResultNo,
raw_ostream &stream) const {
if (!value) {
@@ -1735,12 +1806,8 @@
// If this is a reference to the result of a multi-result operation or
// operation, print out the # identifier and make sure to map our lookup
// to the first result of the operation.
- if (auto *result = dyn_cast<OpResult>(value)) {
- if (result->getOwner()->getNumResults() != 1) {
- resultNo = result->getResultNumber();
- lookupValue = result->getOwner()->getResult(0);
- }
- }
+ if (OpResult *result = dyn_cast<OpResult>(value))
+ getResultIDAndNumber(result, lookupValue, resultNo);
auto it = valueIDs.find(lookupValue);
if (it == valueIDs.end()) {
@@ -1798,9 +1865,29 @@
void OperationPrinter::printOperation(Operation *op) {
if (size_t numResults = op->getNumResults()) {
- printValueID(op->getResult(0), /*printResultNo=*/false);
- if (numResults > 1)
- os << ':' << numResults;
+ auto printResultGroup = [&](size_t resultNo, size_t resultCount) {
+ printValueID(op->getResult(resultNo), /*printResultNo=*/false);
+ if (resultCount > 1)
+ os << ':' << resultCount;
+ };
+
+ // Check to see if this operation has multiple result groups.
+ auto resultGroupIt = opResultGroups.find(op);
+ if (resultGroupIt != opResultGroups.end()) {
+ ArrayRef<int> resultGroups = resultGroupIt->second;
+ // Interleave the groups excluding the last one, this one will be handled
+ // separately.
+ interleaveComma(llvm::seq<int>(0, resultGroups.size() - 1), [&](int i) {
+ printResultGroup(resultGroups[i],
+ resultGroups[i + 1] - resultGroups[i]);
+ });
+ os << ", ";
+ printResultGroup(resultGroups.back(), numResults - resultGroups.back());
+
+ } else {
+ printResultGroup(/*resultNo=*/0, /*resultCount=*/numResults);
+ }
+
os << " = ";
}
diff --git a/third_party/mlir/lib/IR/CMakeLists.txt b/third_party/mlir/lib/IR/CMakeLists.txt
index 2519b83..415d9d6 100644
--- a/third_party/mlir/lib/IR/CMakeLists.txt
+++ b/third_party/mlir/lib/IR/CMakeLists.txt
@@ -5,5 +5,5 @@
ADDITIONAL_HEADER_DIRS
${MLIR_MAIN_INCLUDE_DIR}/mlir/IR
)
-add_dependencies(MLIRIR MLIRCallOpInterfacesIncGen MLIRSupport LLVMSupport)
+add_dependencies(MLIRIR MLIRCallOpInterfacesIncGen MLIROpAsmInterfacesIncGen MLIRSupport LLVMSupport)
target_link_libraries(MLIRIR MLIRSupport LLVMSupport)
diff --git a/third_party/mlir/lib/IR/FunctionSupport.cpp b/third_party/mlir/lib/IR/FunctionSupport.cpp
index 6b27eb8..1f39575 100644
--- a/third_party/mlir/lib/IR/FunctionSupport.cpp
+++ b/third_party/mlir/lib/IR/FunctionSupport.cpp
@@ -299,11 +299,7 @@
resultAttrStorage.emplace_back(attrNameBuf);
ignoredAttrs.append(resultAttrStorage.begin(), resultAttrStorage.end());
- auto attrs = op->getAttrs();
- if (attrs.size() > ignoredAttrs.size()) {
- p << "\n attributes ";
- p.printOptionalAttrDict(attrs, ignoredAttrs);
- }
+ p.printOptionalAttrDictWithKeyword(op->getAttrs(), ignoredAttrs);
// Print the body if this is not an external function.
Region &body = op->getRegion(0);
diff --git a/third_party/mlir/lib/IR/Operation.cpp b/third_party/mlir/lib/IR/Operation.cpp
index aa13a71..f53f38d 100644
--- a/third_party/mlir/lib/IR/Operation.cpp
+++ b/third_party/mlir/lib/IR/Operation.cpp
@@ -531,6 +531,21 @@
return getNumOperands() - postSuccessorOpCount;
}
+Optional<std::pair<unsigned, unsigned>>
+Operation::decomposeSuccessorOperandIndex(unsigned operandIndex) {
+ assert(!isKnownNonTerminator() && "only terminators may have successors");
+ assert(operandIndex < getNumOperands());
+ unsigned currentOperandIndex = getNumOperands();
+ auto *successorOperandCounts = getTrailingObjects<unsigned>();
+ for (unsigned i = 0, e = getNumSuccessors(); i < e; i++) {
+ unsigned successorIndex = e - i - 1;
+ currentOperandIndex -= successorOperandCounts[successorIndex];
+ if (currentOperandIndex <= operandIndex)
+ return std::make_pair(successorIndex, operandIndex - currentOperandIndex);
+ }
+ return None;
+}
+
auto Operation::getSuccessorOperands(unsigned index) -> operand_range {
unsigned succOperandIndex = getSuccessorOperandIndex(index);
return {operand_iterator(this, succOperandIndex),
diff --git a/third_party/mlir/lib/Parser/Parser.cpp b/third_party/mlir/lib/Parser/Parser.cpp
index e7842d3..ccd5e17 100644
--- a/third_party/mlir/lib/Parser/Parser.cpp
+++ b/third_party/mlir/lib/Parser/Parser.cpp
@@ -2901,7 +2901,7 @@
return failure();
// Parsed a valid affine map.
if (exprs.empty())
- map = AffineMap();
+ map = AffineMap::get(getContext());
else
map = AffineMap::get(numDimOperands, dimsAndSymbols.size() - numDimOperands,
exprs);
@@ -2912,7 +2912,8 @@
///
/// affine-map ::= dim-and-symbol-id-lists `->` multi-dim-affine-expr
///
-/// multi-dim-affine-expr ::= `(` affine-expr (`,` affine-expr)* `)
+/// multi-dim-affine-expr ::= `(` `)`
+/// multi-dim-affine-expr ::= `(` affine-expr (`,` affine-expr)* `)`
AffineMap AffineParser::parseAffineMapRange(unsigned numDims,
unsigned numSymbols) {
parseToken(Token::l_paren, "expected '(' at start of affine map range");
@@ -2928,9 +2929,12 @@
// Parse a multi-dimensional affine expression (a comma-separated list of
// 1-d affine expressions); the list cannot be empty. Grammar:
// multi-dim-affine-expr ::= `(` affine-expr (`,` affine-expr)* `)
- if (parseCommaSeparatedListUntil(Token::r_paren, parseElt, false))
+ if (parseCommaSeparatedListUntil(Token::r_paren, parseElt, true))
return AffineMap();
+ if (exprs.empty())
+ return AffineMap::get(getContext());
+
// Parsed a valid affine map.
return AffineMap::get(numDims, numSymbols, exprs);
}
diff --git a/third_party/mlir/lib/TableGen/Operator.cpp b/third_party/mlir/lib/TableGen/Operator.cpp
index 8afffd0..927f275 100644
--- a/third_party/mlir/lib/TableGen/Operator.cpp
+++ b/third_party/mlir/lib/TableGen/Operator.cpp
@@ -153,6 +153,9 @@
} else if (auto opTrait = dyn_cast<tblgen::InternalOpTrait>(&t)) {
if (opTrait->getTrait() == trait)
return true;
+ } else if (auto opTrait = dyn_cast<tblgen::InterfaceOpTrait>(&t)) {
+ if (opTrait->getTrait() == trait)
+ return true;
}
}
return false;
diff --git a/third_party/mlir/lib/Transforms/MaterializeVectors.cpp b/third_party/mlir/lib/Transforms/MaterializeVectors.cpp
index 06016da..874eac6 100644
--- a/third_party/mlir/lib/Transforms/MaterializeVectors.cpp
+++ b/third_party/mlir/lib/Transforms/MaterializeVectors.cpp
@@ -146,8 +146,8 @@
using llvm::SetVector;
using namespace mlir;
-using vector::VectorTransferReadOp;
-using vector::VectorTransferWriteOp;
+using vector::TransferReadOp;
+using vector::TransferWriteOp;
using functional::makePtrDynCaster;
using functional::map;
@@ -181,7 +181,7 @@
SmallVector<int64_t, 8> hwVectorSize;
VectorType superVectorType;
VectorType hwVectorType;
- SmallVector<unsigned, 8> hwVectorInstance;
+ SmallVector<int64_t, 8> hwVectorInstance;
DenseMap<Value *, Value *> *substitutionsMap;
};
@@ -206,24 +206,24 @@
/// returns the distance, in number of elements, between a slice in a dimension
/// and the next slice in the same dimension.
/// e.g. shape[3, 4, 5] -> strides[20, 5, 1]
-static SmallVector<unsigned, 8> makeStrides(ArrayRef<unsigned> shape) {
- SmallVector<unsigned, 8> tmp;
+static SmallVector<int64_t, 8> makeStrides(ArrayRef<int64_t> shape) {
+ SmallVector<int64_t, 8> tmp;
tmp.reserve(shape.size());
- unsigned running = 1;
+ int64_t running = 1;
for (auto rit = shape.rbegin(), reit = shape.rend(); rit != reit; ++rit) {
assert(*rit > 0 && "size must be greater than 0 along all dimensions of "
"shape");
tmp.push_back(running);
running *= *rit;
}
- return SmallVector<unsigned, 8>(tmp.rbegin(), tmp.rend());
+ return SmallVector<int64_t, 8>(tmp.rbegin(), tmp.rend());
}
/// Given a shape with sizes greater than 0 along all dimensions, returns the
/// delinearized components of linearIndex along shape.
-static SmallVector<unsigned, 8> delinearize(unsigned linearIndex,
- ArrayRef<unsigned> shape) {
- SmallVector<unsigned, 8> res;
+static SmallVector<int64_t, 8> delinearize(int64_t linearIndex,
+ ArrayRef<int64_t> shape) {
+ SmallVector<int64_t, 8> res;
res.reserve(shape.size());
auto strides = makeStrides(shape);
for (unsigned idx = 0; idx < strides.size(); ++idx) {
@@ -333,7 +333,7 @@
/// vectorization trait at the op level directly.
static SmallVector<mlir::Value *, 8>
reindexAffineIndices(OpBuilder b, VectorType hwVectorType,
- ArrayRef<unsigned> hwVectorInstance,
+ ArrayRef<int64_t> hwVectorInstance,
ArrayRef<Value *> memrefIndices) {
auto vectorShape = hwVectorType.getShape();
assert(hwVectorInstance.size() >= vectorShape.size());
@@ -408,9 +408,9 @@
static Operation *instantiate(OpBuilder b, Operation *opInst,
VectorType hwVectorType,
DenseMap<Value *, Value *> *substitutionsMap) {
- assert(!isa<VectorTransferReadOp>(opInst) &&
+ assert(!isa<TransferReadOp>(opInst) &&
"Should call the function specialized for VectorTransferReadOp");
- assert(!isa<VectorTransferWriteOp>(opInst) &&
+ assert(!isa<TransferWriteOp>(opInst) &&
"Should call the function specialized for VectorTransferWriteOp");
if (opInst->getNumRegions() != 0)
return nullptr;
@@ -443,10 +443,9 @@
template <typename VectorTransferOpTy>
static AffineMap projectedPermutationMap(VectorTransferOpTy transfer,
VectorType hwVectorType) {
- static_assert(
- std::is_same<VectorTransferOpTy, VectorTransferReadOp>::value ||
- std::is_same<VectorTransferOpTy, VectorTransferWriteOp>::value,
- "Must be called on a VectorTransferOp");
+ static_assert(std::is_same<VectorTransferOpTy, TransferReadOp>::value ||
+ std::is_same<VectorTransferOpTy, TransferWriteOp>::value,
+ "Must be called on a VectorTransferOp");
auto superVectorType = transfer.getVectorType();
auto optionalRatio = shapeRatio(superVectorType, hwVectorType);
assert(optionalRatio &&
@@ -481,9 +480,9 @@
/// `hwVectorType` int the covering of the super-vector type. For a more
/// detailed description of the problem, see the description of
/// reindexAffineIndices.
-static Operation *instantiate(OpBuilder b, VectorTransferReadOp read,
+static Operation *instantiate(OpBuilder b, TransferReadOp read,
VectorType hwVectorType,
- ArrayRef<unsigned> hwVectorInstance,
+ ArrayRef<int64_t> hwVectorInstance,
DenseMap<Value *, Value *> *substitutionsMap) {
SmallVector<Value *, 8> indices =
map(makePtrDynCaster<Value>(), read.indices());
@@ -493,7 +492,7 @@
if (!map) {
return nullptr;
}
- auto cloned = b.create<VectorTransferReadOp>(
+ auto cloned = b.create<TransferReadOp>(
read.getLoc(), hwVectorType, read.memref(), affineIndices,
AffineMapAttr::get(map), read.padding());
return cloned.getOperation();
@@ -505,15 +504,15 @@
/// `hwVectorType` int the covering of th3e super-vector type. For a more
/// detailed description of the problem, see the description of
/// reindexAffineIndices.
-static Operation *instantiate(OpBuilder b, VectorTransferWriteOp write,
+static Operation *instantiate(OpBuilder b, TransferWriteOp write,
VectorType hwVectorType,
- ArrayRef<unsigned> hwVectorInstance,
+ ArrayRef<int64_t> hwVectorInstance,
DenseMap<Value *, Value *> *substitutionsMap) {
SmallVector<Value *, 8> indices =
map(makePtrDynCaster<Value>(), write.indices());
auto affineIndices =
reindexAffineIndices(b, hwVectorType, hwVectorInstance, indices);
- auto cloned = b.create<VectorTransferWriteOp>(
+ auto cloned = b.create<TransferWriteOp>(
write.getLoc(),
substitute(write.vector(), hwVectorType, substitutionsMap),
write.memref(), affineIndices,
@@ -556,12 +555,12 @@
if (op->getNumRegions() != 0)
return op->emitError("NYI path Op with region"), true;
- if (auto write = dyn_cast<VectorTransferWriteOp>(op)) {
+ if (auto write = dyn_cast<TransferWriteOp>(op)) {
auto *clone = instantiate(b, write, state->hwVectorType,
state->hwVectorInstance, state->substitutionsMap);
return clone == nullptr;
}
- if (auto read = dyn_cast<VectorTransferReadOp>(op)) {
+ if (auto read = dyn_cast<TransferReadOp>(op)) {
auto *clone = instantiate(b, read, state->hwVectorType,
state->hwVectorInstance, state->substitutionsMap);
if (!clone) {
@@ -679,7 +678,7 @@
continue;
}
- auto terminator = cast<VectorTransferWriteOp>(term);
+ auto terminator = cast<TransferWriteOp>(term);
LLVM_DEBUG(dbgs() << "\nFrom terminator:" << *term);
// Get the transitive use-defs starting from terminator, limited to the
@@ -749,7 +748,7 @@
// Capture terminators; i.e. vector.transfer_write ops involving a strict
// super-vector of subVectorType.
auto filter = [subVectorType](Operation &op) {
- if (!isa<VectorTransferWriteOp>(op)) {
+ if (!isa<TransferWriteOp>(op)) {
return false;
}
return matcher::operatesOnSuperVectorsOf(op, subVectorType);
diff --git a/third_party/mlir/lib/Transforms/Utils/GreedyPatternRewriteDriver.cpp b/third_party/mlir/lib/Transforms/Utils/GreedyPatternRewriteDriver.cpp
index 8c1fddf..aa4563c 100644
--- a/third_party/mlir/lib/Transforms/Utils/GreedyPatternRewriteDriver.cpp
+++ b/third_party/mlir/lib/Transforms/Utils/GreedyPatternRewriteDriver.cpp
@@ -22,10 +22,9 @@
#include "mlir/Dialect/StandardOps/Ops.h"
#include "mlir/IR/Builders.h"
#include "mlir/IR/PatternMatch.h"
-#include "mlir/IR/RegionGraphTraits.h"
#include "mlir/Transforms/FoldUtils.h"
+#include "mlir/Transforms/RegionUtils.h"
#include "llvm/ADT/DenseMap.h"
-#include "llvm/ADT/DepthFirstIterator.h"
#include "llvm/Support/CommandLine.h"
#include "llvm/Support/Debug.h"
#include "llvm/Support/raw_ostream.h"
@@ -115,56 +114,6 @@
}
private:
- /// Erase the unreachable blocks within the provided regions. Returns success
- /// if any blocks were erased, failure otherwise.
- LogicalResult eraseUnreachableBlocks(MutableArrayRef<Region> regions) {
- // Set of blocks found to be reachable within a given region.
- llvm::df_iterator_default_set<Block *, 16> reachable;
- // If any blocks were found to be dead.
- bool erasedDeadBlocks = false;
-
- SmallVector<Region *, 1> worklist;
- worklist.reserve(regions.size());
- for (Region ®ion : regions)
- worklist.push_back(®ion);
- while (!worklist.empty()) {
- Region *region = worklist.pop_back_val();
- if (region->empty())
- continue;
-
- // If this is a single block region, just collect the nested regions.
- if (std::next(region->begin()) == region->end()) {
- for (Operation &op : region->front())
- for (Region ®ion : op.getRegions())
- worklist.push_back(®ion);
- continue;
- }
-
- // Mark all reachable blocks.
- reachable.clear();
- for (Block *block : depth_first_ext(®ion->front(), reachable))
- (void)block /* Mark all reachable blocks */;
-
- // Collect all of the dead blocks and push the live regions onto the
- // worklist.
- for (Block &block : llvm::make_early_inc_range(*region)) {
- if (!reachable.count(&block)) {
- block.dropAllDefinedValueUses();
- block.erase();
- erasedDeadBlocks = true;
- continue;
- }
-
- // Walk any regions within this block.
- for (Operation &op : block)
- for (Region ®ion : op.getRegions())
- worklist.push_back(®ion);
- }
- }
-
- return success(erasedDeadBlocks);
- }
-
// Look over the provided operands for any defining operations that should
// be re-added to the worklist. This function should be called when an
// operation is modified or removed, as it may trigger further
@@ -264,7 +213,7 @@
// After applying patterns, make sure that the CFG of each of the regions is
// kept up to date.
- changed |= succeeded(eraseUnreachableBlocks(regions));
+ changed |= succeeded(simplifyRegions(regions));
} while (changed && ++i < maxIterations);
// Whether the rewrite converges, i.e. wasn't changed in the last iteration.
return !changed;
diff --git a/third_party/mlir/lib/Transforms/Utils/RegionUtils.cpp b/third_party/mlir/lib/Transforms/Utils/RegionUtils.cpp
index 24c38c4..ba77cea 100644
--- a/third_party/mlir/lib/Transforms/Utils/RegionUtils.cpp
+++ b/third_party/mlir/lib/Transforms/Utils/RegionUtils.cpp
@@ -18,8 +18,11 @@
#include "mlir/Transforms/RegionUtils.h"
#include "mlir/IR/Block.h"
#include "mlir/IR/Operation.h"
+#include "mlir/IR/RegionGraphTraits.h"
#include "mlir/IR/Value.h"
+#include "llvm/ADT/DepthFirstIterator.h"
+#include "llvm/ADT/PostOrderIterator.h"
#include "llvm/ADT/SmallSet.h"
using namespace mlir;
@@ -73,3 +76,284 @@
for (Region ®ion : regions)
getUsedValuesDefinedAbove(region, region, values);
}
+
+//===----------------------------------------------------------------------===//
+// Unreachable Block Elimination
+//===----------------------------------------------------------------------===//
+
+/// Erase the unreachable blocks within the provided regions. Returns success
+/// if any blocks were erased, failure otherwise.
+// TODO: We could likely merge this with the DCE algorithm below.
+static LogicalResult eraseUnreachableBlocks(MutableArrayRef<Region> regions) {
+ // Set of blocks found to be reachable within a given region.
+ llvm::df_iterator_default_set<Block *, 16> reachable;
+ // If any blocks were found to be dead.
+ bool erasedDeadBlocks = false;
+
+ SmallVector<Region *, 1> worklist;
+ worklist.reserve(regions.size());
+ for (Region ®ion : regions)
+ worklist.push_back(®ion);
+ while (!worklist.empty()) {
+ Region *region = worklist.pop_back_val();
+ if (region->empty())
+ continue;
+
+ // If this is a single block region, just collect the nested regions.
+ if (std::next(region->begin()) == region->end()) {
+ for (Operation &op : region->front())
+ for (Region ®ion : op.getRegions())
+ worklist.push_back(®ion);
+ continue;
+ }
+
+ // Mark all reachable blocks.
+ reachable.clear();
+ for (Block *block : depth_first_ext(®ion->front(), reachable))
+ (void)block /* Mark all reachable blocks */;
+
+ // Collect all of the dead blocks and push the live regions onto the
+ // worklist.
+ for (Block &block : llvm::make_early_inc_range(*region)) {
+ if (!reachable.count(&block)) {
+ block.dropAllDefinedValueUses();
+ block.erase();
+ erasedDeadBlocks = true;
+ continue;
+ }
+
+ // Walk any regions within this block.
+ for (Operation &op : block)
+ for (Region ®ion : op.getRegions())
+ worklist.push_back(®ion);
+ }
+ }
+
+ return success(erasedDeadBlocks);
+}
+
+//===----------------------------------------------------------------------===//
+// Dead Code Elimination
+//===----------------------------------------------------------------------===//
+
+namespace {
+/// Data structure used to track which values have already been proved live.
+///
+/// Because Operation's can have multiple results, this data structure tracks
+/// liveness for both Value's and Operation's to avoid having to look through
+/// all Operation results when analyzing a use.
+///
+/// This data structure essentially tracks the dataflow lattice.
+/// The set of values/ops proved live increases monotonically to a fixed-point.
+class LiveMap {
+public:
+ /// Value methods.
+ bool wasProvenLive(Value *value) { return liveValues.count(value); }
+ void setProvedLive(Value *value) {
+ changed |= liveValues.insert(value).second;
+ }
+
+ /// Operation methods.
+ bool wasProvenLive(Operation *op) { return liveOps.count(op); }
+ void setProvedLive(Operation *op) { changed |= liveOps.insert(op).second; }
+
+ /// Methods for tracking if we have reached a fixed-point.
+ void resetChanged() { changed = false; }
+ bool hasChanged() { return changed; }
+
+private:
+ bool changed = false;
+ DenseSet<Value *> liveValues;
+ DenseSet<Operation *> liveOps;
+};
+} // namespace
+
+static bool isUseSpeciallyKnownDead(OpOperand &use, LiveMap &liveMap) {
+ Operation *owner = use.getOwner();
+ unsigned operandIndex = use.getOperandNumber();
+ // This pass generally treats all uses of an op as live if the op itself is
+ // considered live. However, for successor operands to terminators we need a
+ // finer-grained notion where we deduce liveness for operands individually.
+ // The reason for this is easiest to think about in terms of a classical phi
+ // node based SSA IR, where each successor operand is really an operand to a
+ // *separate* phi node, rather than all operands to the branch itself as with
+ // the block argument representation that MLIR uses.
+ //
+ // And similarly, because each successor operand is really an operand to a phi
+ // node, rather than to the terminator op itself, a terminator op can't e.g.
+ // "print" the value of a successor operand.
+ if (owner->isKnownTerminator()) {
+ if (auto arg = owner->getSuccessorBlockArgument(operandIndex))
+ return !liveMap.wasProvenLive(*arg);
+ return false;
+ }
+ return false;
+}
+
+static void processValue(Value *value, LiveMap &liveMap) {
+ bool provedLive = llvm::any_of(value->getUses(), [&](OpOperand &use) {
+ if (isUseSpeciallyKnownDead(use, liveMap))
+ return false;
+ return liveMap.wasProvenLive(use.getOwner());
+ });
+ if (provedLive)
+ liveMap.setProvedLive(value);
+}
+
+static bool isOpIntrinsicallyLive(Operation *op) {
+ // This pass doesn't modify the CFG, so terminators are never deleted.
+ if (!op->isKnownNonTerminator())
+ return true;
+ // If the op has a side effect, we treat it as live.
+ if (!op->hasNoSideEffect())
+ return true;
+ return false;
+}
+
+static void propagateLiveness(Region ®ion, LiveMap &liveMap);
+static void propagateLiveness(Operation *op, LiveMap &liveMap) {
+ // All Value's are either a block argument or an op result.
+ // We call processValue on those cases.
+
+ // Recurse on any regions the op has.
+ for (Region ®ion : op->getRegions())
+ propagateLiveness(region, liveMap);
+
+ // Process the op itself.
+ if (isOpIntrinsicallyLive(op)) {
+ liveMap.setProvedLive(op);
+ return;
+ }
+ for (Value *value : op->getResults())
+ processValue(value, liveMap);
+ bool provedLive = llvm::any_of(op->getResults(), [&](Value *value) {
+ return liveMap.wasProvenLive(value);
+ });
+ if (provedLive)
+ liveMap.setProvedLive(op);
+}
+
+static void propagateLiveness(Region ®ion, LiveMap &liveMap) {
+ if (region.empty())
+ return;
+
+ for (Block *block : llvm::post_order(®ion.front())) {
+ // We process block arguments after the ops in the block, to promote
+ // faster convergence to a fixed point (we try to visit uses before defs).
+ for (Operation &op : llvm::reverse(block->getOperations()))
+ propagateLiveness(&op, liveMap);
+ for (Value *value : block->getArguments())
+ processValue(value, liveMap);
+ }
+}
+
+static void eraseTerminatorSuccessorOperands(Operation *terminator,
+ LiveMap &liveMap) {
+ for (unsigned succI = 0, succE = terminator->getNumSuccessors();
+ succI < succE; succI++) {
+ // Iterating successors in reverse is not strictly needed, since we
+ // aren't erasing any successors. But it is slightly more efficient
+ // since it will promote later operands of the terminator being erased
+ // first, reducing the quadratic-ness.
+ unsigned succ = succE - succI - 1;
+ for (unsigned argI = 0, argE = terminator->getNumSuccessorOperands(succ);
+ argI < argE; argI++) {
+ // Iterating args in reverse is needed for correctness, to avoid
+ // shifting later args when earlier args are erased.
+ unsigned arg = argE - argI - 1;
+ Value *value = terminator->getSuccessor(succ)->getArgument(arg);
+ if (!liveMap.wasProvenLive(value)) {
+ terminator->eraseSuccessorOperand(succ, arg);
+ }
+ }
+ }
+}
+
+static LogicalResult deleteDeadness(MutableArrayRef<Region> regions,
+ LiveMap &liveMap) {
+ bool erasedAnything = false;
+ for (Region ®ion : regions) {
+ if (region.empty())
+ continue;
+
+ // We do the deletion in an order that deletes all uses before deleting
+ // defs.
+ // MLIR's SSA structural invariants guarantee that except for block
+ // arguments, the use-def graph is acyclic, so this is possible with a
+ // single walk of ops and then a final pass to clean up block arguments.
+ //
+ // To do this, we visit ops in an order that visits domtree children
+ // before domtree parents. A CFG post-order (with reverse iteration with a
+ // block) satisfies that without needing an explicit domtree calculation.
+ for (Block *block : llvm::post_order(®ion.front())) {
+ eraseTerminatorSuccessorOperands(block->getTerminator(), liveMap);
+ for (Operation &childOp :
+ llvm::make_early_inc_range(llvm::reverse(block->getOperations()))) {
+ erasedAnything |=
+ succeeded(deleteDeadness(childOp.getRegions(), liveMap));
+ if (!liveMap.wasProvenLive(&childOp)) {
+ erasedAnything = true;
+ childOp.erase();
+ }
+ }
+ }
+ // Delete block arguments.
+ // The entry block has an unknown contract with their enclosing block, so
+ // skip it.
+ for (Block &block : llvm::drop_begin(region.getBlocks(), 1)) {
+ // Iterate in reverse to avoid shifting later arguments when deleting
+ // earlier arguments.
+ for (unsigned i = 0, e = block.getNumArguments(); i < e; i++)
+ if (!liveMap.wasProvenLive(block.getArgument(e - i - 1))) {
+ block.eraseArgument(e - i - 1, /*updatePredTerms=*/false);
+ erasedAnything = true;
+ }
+ }
+ }
+ return success(erasedAnything);
+}
+
+// This function performs a simple dead code elimination algorithm over the
+// given regions.
+//
+// The overall goal is to prove that Values are dead, which allows deleting ops
+// and block arguments.
+//
+// This uses an optimistic algorithm that assumes everything is dead until
+// proved otherwise, allowing it to delete recursively dead cycles.
+//
+// This is a simple fixed-point dataflow analysis algorithm on a lattice
+// {Dead,Alive}. Because liveness flows backward, we generally try to
+// iterate everything backward to speed up convergence to the fixed-point. This
+// allows for being able to delete recursively dead cycles of the use-def graph,
+// including block arguments.
+//
+// This function returns success if any operations or arguments were deleted,
+// failure otherwise.
+static LogicalResult runRegionDCE(MutableArrayRef<Region> regions) {
+ assert(regions.size() == 1);
+
+ LiveMap liveMap;
+ do {
+ liveMap.resetChanged();
+
+ for (Region ®ion : regions)
+ propagateLiveness(region, liveMap);
+ } while (liveMap.hasChanged());
+
+ return deleteDeadness(regions, liveMap);
+}
+
+//===----------------------------------------------------------------------===//
+// Region Simplification
+//===----------------------------------------------------------------------===//
+
+/// Run a set of structural simplifications over the given regions. This
+/// includes transformations like unreachable block elimination, dead argument
+/// elimination, as well as some other DCE. This function returns success if any
+/// of the regions were simplified, failure otherwise.
+LogicalResult mlir::simplifyRegions(llvm::MutableArrayRef<Region> regions) {
+ LogicalResult eliminatedBlocks = eraseUnreachableBlocks(regions);
+ LogicalResult eliminatedOpsOrArgs = runRegionDCE(regions);
+ return success(succeeded(eliminatedBlocks) || succeeded(eliminatedOpsOrArgs));
+}
diff --git a/third_party/mlir/lib/Transforms/Vectorize.cpp b/third_party/mlir/lib/Transforms/Vectorize.cpp
index b3eea35..2a0ce09 100644
--- a/third_party/mlir/lib/Transforms/Vectorize.cpp
+++ b/third_party/mlir/lib/Transforms/Vectorize.cpp
@@ -833,7 +833,7 @@
return LogicalResult::Failure;
LLVM_DEBUG(dbgs() << "\n[early-vect]+++++ permutationMap: ");
LLVM_DEBUG(permutationMap.print(dbgs()));
- auto transfer = b.create<vector::VectorTransferReadOp>(
+ auto transfer = b.create<vector::TransferReadOp>(
opInst->getLoc(), vectorType, memoryOp.getMemRef(),
map(makePtrDynCaster<Value>(), indices),
AffineMapAttr::get(permutationMap),
@@ -1035,9 +1035,9 @@
// Sanity checks.
assert(!isa<AffineLoadOp>(opInst) &&
"all loads must have already been fully vectorized independently");
- assert(!isa<vector::VectorTransferReadOp>(opInst) &&
+ assert(!isa<vector::TransferReadOp>(opInst) &&
"vector.transfer_read cannot be further vectorized");
- assert(!isa<vector::VectorTransferWriteOp>(opInst) &&
+ assert(!isa<vector::TransferWriteOp>(opInst) &&
"vector.transfer_write cannot be further vectorized");
if (auto store = dyn_cast<AffineStoreOp>(opInst)) {
@@ -1064,7 +1064,7 @@
return nullptr;
LLVM_DEBUG(dbgs() << "\n[early-vect]+++++ permutationMap: ");
LLVM_DEBUG(permutationMap.print(dbgs()));
- auto transfer = b.create<vector::VectorTransferWriteOp>(
+ auto transfer = b.create<vector::TransferWriteOp>(
opInst->getLoc(), vectorValue, memRef, indices,
AffineMapAttr::get(permutationMap));
auto *res = transfer.getOperation();
diff --git a/third_party/mlir/test/BUILD b/third_party/mlir/test/BUILD
index c1e7a44..268e5cf 100644
--- a/third_party/mlir/test/BUILD
+++ b/third_party/mlir/test/BUILD
@@ -18,6 +18,21 @@
)
gentbl(
+ name = "TestTransformPatternsIncGen",
+ tbl_outs = [
+ (
+ "-gen-rewriters",
+ "lib/DeclarativeTransforms/TestLinalgTransformPatterns.h.inc",
+ ),
+ ],
+ tblgen = "@local_config_mlir//:mlir-tblgen",
+ td_file = "lib/DeclarativeTransforms/TestLinalgTransformPatterns.td",
+ td_srcs = [
+ "@local_config_mlir//:LinalgTransformPatternsTdFiles",
+ ],
+)
+
+gentbl(
name = "TestOpsIncGen",
strip_include_prefix = "lib/TestDialect",
tbl_outs = [
@@ -37,7 +52,8 @@
tblgen = "@local_config_mlir//:mlir-tblgen",
td_file = "lib/TestDialect/TestOps.td",
td_srcs = [
- "@local_config_mlir//:include/mlir/IR/OpBase.td",
+ "@local_config_mlir//:OpBaseTdFiles",
+ "@local_config_mlir//:include/mlir/IR/OpAsmInterface.td",
"@local_config_mlir//:include/mlir/Analysis/CallInterfaces.td",
"@local_config_mlir//:include/mlir/Analysis/InferTypeOpInterface.td",
],
@@ -53,7 +69,10 @@
hdrs = [
"lib/TestDialect/TestDialect.h",
],
- includes = ["lib/TestDialect"],
+ includes = [
+ "lib/DeclarativeTransforms",
+ "lib/TestDialect",
+ ],
deps = [
":TestOpsIncGen",
"@local_config_mlir//:Analysis",
@@ -101,22 +120,26 @@
"lib/Transforms/TestCallGraph.cpp",
"lib/Transforms/TestConstantFold.cpp",
"lib/Transforms/TestInlining.cpp",
+ "lib/Transforms/TestLinalgTransforms.cpp",
"lib/Transforms/TestLoopFusion.cpp",
"lib/Transforms/TestLoopMapping.cpp",
"lib/Transforms/TestLoopParametricTiling.cpp",
"lib/Transforms/TestLowerVectorTransfers.cpp",
"lib/Transforms/TestMemRefStrideCalculation.cpp",
"lib/Transforms/TestOpaqueLoc.cpp",
+ "lib/Transforms/TestVectorToVectorConversion.cpp",
"lib/Transforms/TestVectorizationUtils.cpp",
],
includes = ["lib/TestDialect"],
deps = [
":TestDialect",
+ ":TestTransformPatternsIncGen",
"@llvm//:support",
"@local_config_mlir//:AffineOps",
"@local_config_mlir//:Analysis",
"@local_config_mlir//:EDSC",
"@local_config_mlir//:IR",
+ "@local_config_mlir//:Linalg",
"@local_config_mlir//:LoopOps",
"@local_config_mlir//:Pass",
"@local_config_mlir//:StandardOps",
diff --git a/third_party/mlir/test/lib/CMakeLists.txt b/third_party/mlir/test/lib/CMakeLists.txt
index de7d50a..534d0d3 100644
--- a/third_party/mlir/test/lib/CMakeLists.txt
+++ b/third_party/mlir/test/lib/CMakeLists.txt
@@ -1,3 +1,4 @@
+add_subdirectory(DeclarativeTransforms)
add_subdirectory(IR)
add_subdirectory(Pass)
add_subdirectory(TestDialect)
diff --git a/third_party/mlir/test/lib/DeclarativeTransforms/CMakeLists.txt b/third_party/mlir/test/lib/DeclarativeTransforms/CMakeLists.txt
new file mode 100644
index 0000000..06e81a0
--- /dev/null
+++ b/third_party/mlir/test/lib/DeclarativeTransforms/CMakeLists.txt
@@ -0,0 +1,3 @@
+set(LLVM_TARGET_DEFINITIONS TestLinalgTransformPatterns.td)
+mlir_tablegen(TestLinalgTransformPatterns.h.inc -gen-rewriters)
+add_public_tablegen_target(MLIRTestLinalgTransformPatternsIncGen)
diff --git a/third_party/mlir/test/lib/DeclarativeTransforms/TestLinalgTransformPatterns.td b/third_party/mlir/test/lib/DeclarativeTransforms/TestLinalgTransformPatterns.td
new file mode 100644
index 0000000..c052c74
--- /dev/null
+++ b/third_party/mlir/test/lib/DeclarativeTransforms/TestLinalgTransformPatterns.td
@@ -0,0 +1,78 @@
+//===- TestLinalgTransformPatterns.td - Test patterns --*- tablegen ----*-===//
+//
+// Copyright 2019 The MLIR Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// =============================================================================
+//
+// This is the pattern definition file for declarative Linalg transformations
+// tests.
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef TEST_LINALG_TRANSFORMS_PATTERNS
+#define TEST_LINALG_TRANSFORMS_PATTERNS
+
+#ifndef LINALG_TRANSFORMS
+include "mlir/Dialect/Linalg/Transforms/LinalgTransformPatterns.td"
+#endif // LINALG_TRANSFORMS
+
+//===----------------------------------------------------------------------===//
+// Test Linalg fusion patterns.
+//===----------------------------------------------------------------------===//
+def : Pat<(MatmulOp:$consumer $A, $B, $C),
+ (TileAndFuseLinalgOp<[100, 150], [0], "L1"> $consumer),
+ [
+ (Constraint<HasNoLinalgTransformMarker> $consumer),
+ (Constraint<IsProducedByOpOfType<"MatmulOp">> $consumer, $A),
+ ],
+ // In the buffer world there is no use-def chains or dags so benefits
+ // cannot be computed automatically from the length of the matched
+ // pattern. Instead we specify the benefit ourselves for now.
+ // This is not expected to be a big challenge long-term because
+ // pattern benefits are akin to feature engineering: features should
+ // be learned.
+ (addBenefit 1)>;
+
+//===----------------------------------------------------------------------===//
+// Linalg tiling patterns.
+//===----------------------------------------------------------------------===//
+def : Pat<(MatmulOp:$op $A, $B, $C),
+ (TileLinalgOp<[2000, 3000, 4000], "L3"> $op),
+ [(Constraint<Or<[HasNoLinalgTransformMarker,
+ HasLinalgTransformMarker<"MEM">]>> $op)]>;
+def : Pat<(MatmulOp:$op $A, $B, $C),
+ (TileLinalgOp<[200, 300, 400], "L2"> $op),
+ [(Constraint<HasLinalgTransformMarker<"L3">> $op)]>;
+def : Pat<(MatmulOp:$op $A, $B, $C),
+ (TileLinalgOp<[20, 30, 40], "L1"> $op),
+ [(Constraint<HasLinalgTransformMarker<"L2">> $op)]>;
+def : Pat<(MatmulOp:$op $A, $B, $C),
+ (TileLinalgOp<[2, 3, 4], "REG"> $op),
+ [(Constraint<HasLinalgTransformMarker<"L1">> $op)]>;
+
+def : Pattern<(MatvecOp:$op $A, $b, $c),
+ [(TileLinalgOp<[5, 6], "L1"> $op)],
+ [(Constraint<HasNoLinalgTransformMarker> $op)]>;
+
+def : Pattern<(DotOp:$op $a, $b, $c),
+ [(TileLinalgOp<[8000], "L1"> $op)],
+ [(Constraint<Or<[HasNoLinalgTransformMarker,
+ HasLinalgTransformMarker<"MEM">,
+ HasLinalgTransformMarker<"L3">,
+ HasLinalgTransformMarker<"L2">]>> $op)]>;
+def : Pattern<(DotOp:$op $a, $b, $c),
+ [(TileLinalgOp<[8], "REG"> $op)],
+ [(Constraint<HasLinalgTransformMarker<"L1">> $op)]>;
+
+#endif // TEST_LINALG_TRANSFORMS_PATTERNS
diff --git a/third_party/mlir/test/lib/DeclarativeTransforms/lit.local.cfg b/third_party/mlir/test/lib/DeclarativeTransforms/lit.local.cfg
new file mode 100644
index 0000000..edb5b44
--- /dev/null
+++ b/third_party/mlir/test/lib/DeclarativeTransforms/lit.local.cfg
@@ -0,0 +1 @@
+config.suffixes.remove('.td')
\ No newline at end of file
diff --git a/third_party/mlir/test/lib/TestDialect/TestDialect.cpp b/third_party/mlir/test/lib/TestDialect/TestDialect.cpp
index 0178043..3c7fbee 100644
--- a/third_party/mlir/test/lib/TestDialect/TestDialect.cpp
+++ b/third_party/mlir/test/lib/TestDialect/TestDialect.cpp
@@ -30,6 +30,18 @@
//===----------------------------------------------------------------------===//
namespace {
+
+// Test support for interacting with the AsmPrinter.
+struct TestOpAsmInterface : public OpAsmDialectInterface {
+ using OpAsmDialectInterface::OpAsmDialectInterface;
+
+ void getAsmResultNames(Operation *op,
+ OpAsmSetValueNameFn setNameFn) const final {
+ if (auto asmOp = dyn_cast<AsmDialectInterfaceOp>(op))
+ setNameFn(asmOp, "result");
+ }
+};
+
struct TestOpFolderDialectInterface : public OpFolderDialectInterface {
using OpFolderDialectInterface::OpFolderDialectInterface;
@@ -112,7 +124,8 @@
#define GET_OP_LIST
#include "TestOps.cpp.inc"
>();
- addInterfaces<TestOpFolderDialectInterface, TestInlinerInterface>();
+ addInterfaces<TestOpAsmInterface, TestOpFolderDialectInterface,
+ TestInlinerInterface>();
allowUnknownOperations();
}
@@ -227,6 +240,7 @@
//===----------------------------------------------------------------------===//
// Test PolyForOp - parse list of region arguments.
//===----------------------------------------------------------------------===//
+
static ParseResult parsePolyForOp(OpAsmParser &parser, OperationState &result) {
SmallVector<OpAsmParser::OperandType, 4> ivsInfo;
// Parse list of region arguments without a delimiter.
diff --git a/third_party/mlir/test/lib/TestDialect/TestOps.td b/third_party/mlir/test/lib/TestDialect/TestOps.td
index a0e1cd6..d804fdc 100644
--- a/third_party/mlir/test/lib/TestDialect/TestOps.td
+++ b/third_party/mlir/test/lib/TestDialect/TestOps.td
@@ -19,6 +19,7 @@
#define TEST_OPS
include "mlir/IR/OpBase.td"
+include "mlir/IR/OpAsmInterface.td"
include "mlir/Analysis/CallInterfaces.td"
include "mlir/Analysis/InferTypeOpInterface.td"
@@ -408,6 +409,10 @@
(I32ElementsAttrOp ConstantAttr<I32ElementsAttr, "0">),
[(IsNotScalar $attr)]>;
+def TestBranchOp : TEST_Op<"br", [Terminator]> {
+ let arguments = (ins Variadic<AnyType>:$operands);
+}
+
//===----------------------------------------------------------------------===//
// Test Patterns
//===----------------------------------------------------------------------===//
@@ -995,4 +1000,16 @@
let parser = [{ return ::parse$cppClass(parser, result); }];
}
+//===----------------------------------------------------------------------===//
+// Test OpAsmInterface.
+
+def AsmInterfaceOp : TEST_Op<"asm_interface_op"> {
+ let results = (outs AnyType:$first, Variadic<AnyType>:$middle_results,
+ AnyType);
+}
+
+def AsmDialectInterfaceOp : TEST_Op<"asm_dialect_interface_op"> {
+ let results = (outs AnyType);
+}
+
#endif // TEST_OPS
diff --git a/third_party/mlir/test/lib/Transforms/CMakeLists.txt b/third_party/mlir/test/lib/Transforms/CMakeLists.txt
index 675b695..2d482e5 100644
--- a/third_party/mlir/test/lib/Transforms/CMakeLists.txt
+++ b/third_party/mlir/test/lib/Transforms/CMakeLists.txt
@@ -3,11 +3,13 @@
TestConstantFold.cpp
TestLoopFusion.cpp
TestInlining.cpp
+ TestLinalgTransforms.cpp
TestLoopMapping.cpp
TestLoopParametricTiling.cpp
TestLowerVectorTransfers.cpp
TestOpaqueLoc.cpp
TestMemRefStrideCalculation.cpp
+ TestVectorToVectorConversion.cpp
TestVectorizationUtils.cpp
ADDITIONAL_HEADER_DIRS
@@ -15,7 +17,10 @@
)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../TestDialect)
include_directories(${CMAKE_CURRENT_BINARY_DIR}/../TestDialect)
+include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../DeclarativeTransforms)
+include_directories(${CMAKE_CURRENT_BINARY_DIR}/../DeclarativeTransforms)
add_dependencies(MLIRTestTransforms MLIRStandardOpsIncGen)
+add_dependencies(MLIRTestTransforms MLIRTestLinalgTransformPatternsIncGen)
target_link_libraries(MLIRTestTransforms
MLIRAffineOps
MLIRAnalysis
diff --git a/third_party/mlir/test/lib/Transforms/TestLinalgTransforms.cpp b/third_party/mlir/test/lib/Transforms/TestLinalgTransforms.cpp
new file mode 100644
index 0000000..37030ca
--- /dev/null
+++ b/third_party/mlir/test/lib/Transforms/TestLinalgTransforms.cpp
@@ -0,0 +1,62 @@
+//===- TestLinalgTransforms.cpp - Test Linalg transformation patterns -----===//
+//
+// Copyright 2019 The MLIR Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// =============================================================================
+//
+// This file implements logic for testing Linalg transformations.
+//
+//===----------------------------------------------------------------------===//
+
+#include "mlir/Dialect/Linalg/IR/LinalgOps.h"
+#include "mlir/Dialect/Linalg/Transforms/LinalgTransforms.h"
+#include "mlir/Dialect/Linalg/Utils/Utils.h"
+#include "mlir/IR/PatternMatch.h"
+#include "mlir/Pass/Pass.h"
+
+using namespace mlir;
+using namespace mlir::linalg;
+
+namespace mlir {
+namespace linalg {
+namespace {
+#include "TestLinalgTransformPatterns.h.inc"
+} // end namespace
+} // end namespace linalg
+} // end namespace mlir
+
+namespace {
+struct TestLinalgTransforms : public FunctionPass<TestLinalgTransforms> {
+ void runOnFunction() override;
+};
+} // end anonymous namespace
+
+/// Apply transformations specified as patterns.
+void TestLinalgTransforms::runOnFunction() {
+ OwningRewritePatternList patterns;
+ auto funcOp = getFunction();
+
+ // Add the generated patterns to the list.
+ linalg::populateWithGenerated(&getContext(), &patterns);
+ applyPatternsGreedily(funcOp, patterns);
+
+ // Drop the marker.
+ funcOp.walk([](LinalgOp op) {
+ op.removeAttr(LinalgTransforms::kLinalgTransformMarker);
+ });
+}
+
+static PassRegistration<TestLinalgTransforms>
+ pass("test-linalg-transform-patterns",
+ "Test Linalg transformation patterns by applying them greedily.");
diff --git a/third_party/mlir/test/lib/Transforms/TestVectorToVectorConversion.cpp b/third_party/mlir/test/lib/Transforms/TestVectorToVectorConversion.cpp
new file mode 100644
index 0000000..2550796
--- /dev/null
+++ b/third_party/mlir/test/lib/Transforms/TestVectorToVectorConversion.cpp
@@ -0,0 +1,44 @@
+//===- TestVectorToVectorConversion.cpp - Test VectorTransfers lowering
+//-------===//
+//
+// Copyright 2019 The MLIR Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// =============================================================================
+
+#include <type_traits>
+
+#include "mlir/Conversion/VectorConversions/VectorConversions.h"
+#include "mlir/IR/PatternMatch.h"
+#include "mlir/Pass/Pass.h"
+#include "mlir/Transforms/Passes.h"
+
+using namespace mlir;
+
+namespace {
+
+struct TestVectorToVectorConversion
+ : public FunctionPass<TestVectorToVectorConversion> {
+ void runOnFunction() override {
+ OwningRewritePatternList patterns;
+ auto *context = &getContext();
+ populateVectorToVectorConversionPatterns(context, patterns);
+ applyPatternsGreedily(getFunction(), patterns);
+ }
+};
+
+} // end anonymous namespace
+
+static PassRegistration<TestVectorToVectorConversion>
+ pass("test-vector-to-vector-conversion",
+ "Test conversion patterns between ops in the vector dialect");
diff --git a/third_party/mlir/test/lib/Transforms/TestVectorizationUtils.cpp b/third_party/mlir/test/lib/Transforms/TestVectorizationUtils.cpp
index 4fdb660..f0f1f6b 100644
--- a/third_party/mlir/test/lib/Transforms/TestVectorizationUtils.cpp
+++ b/third_party/mlir/test/lib/Transforms/TestVectorizationUtils.cpp
@@ -131,7 +131,7 @@
opInst->emitRemark("NOT MATCHED");
} else {
outs << "\nmatched: " << *opInst << " with shape ratio: ";
- interleaveComma(MutableArrayRef<unsigned>(*ratio), outs);
+ interleaveComma(MutableArrayRef<int64_t>(*ratio), outs);
}
}
}
diff --git a/third_party/mlir/tools/mlir-cuda-runner/cuda-runtime-wrappers.cpp b/third_party/mlir/tools/mlir-cuda-runner/cuda-runtime-wrappers.cpp
index ac77258..0698095 100644
--- a/third_party/mlir/tools/mlir-cuda-runner/cuda-runtime-wrappers.cpp
+++ b/third_party/mlir/tools/mlir-cuda-runner/cuda-runtime-wrappers.cpp
@@ -113,16 +113,3 @@
mcuMemHostRegisterMemRef3dFloat(const MemRefType<float, 3> *arg) {
mcuMemHostRegisterMemRef(arg, 1.23f);
}
-
-/// Prints the given float array to stderr.
-extern "C" void mcuPrintFloat(const MemRefType<float, 1> *arg) {
- if (arg->sizes[0] == 0) {
- llvm::outs() << "[]\n";
- return;
- }
- llvm::outs() << "[" << arg->data[0];
- for (int pos = 1; pos < arg->sizes[0]; pos++) {
- llvm::outs() << ", " << arg->data[pos];
- }
- llvm::outs() << "]\n";
-}
diff --git a/third_party/mlir/tools/mlir-opt/CMakeLists.txt b/third_party/mlir/tools/mlir-opt/CMakeLists.txt
index 628557d..e38b43d 100644
--- a/third_party/mlir/tools/mlir-opt/CMakeLists.txt
+++ b/third_party/mlir/tools/mlir-opt/CMakeLists.txt
@@ -16,10 +16,12 @@
target_link_libraries(MLIRMlirOptLib ${LIB_LIBS})
set(LIBS
+ MLIRAnalysis
MLIRAffineOps
MLIRAffineToStandard
MLIRLoopsToGPU
- MLIRAnalysis
+ MLIRLinalgToLLVM
+
MLIRLoopToStandard
MLIREDSC
MLIRFxpMathOps
@@ -49,7 +51,7 @@
MLIRTestTransforms
MLIRSupport
MLIRVectorOps
- MLIRVectorToLLVM
+ MLIRVectorConversions
)
if(MLIR_CUDA_CONVERSIONS_ENABLED)
list(APPEND LIBS
diff --git a/third_party/mlir/tools/mlir-tblgen/OpDefinitionsGen.cpp b/third_party/mlir/tools/mlir-tblgen/OpDefinitionsGen.cpp
index 46803b5..538aa6e 100644
--- a/third_party/mlir/tools/mlir-tblgen/OpDefinitionsGen.cpp
+++ b/third_party/mlir/tools/mlir-tblgen/OpDefinitionsGen.cpp
@@ -459,6 +459,9 @@
void emitDecl(raw_ostream &os);
void emitDef(raw_ostream &os);
+ // Generates the OpAsmOpInterface for this operation if possible.
+ void genOpAsmInterface();
+
// Generates the `getOperationName` method for this op.
void genOpNameGetter();
@@ -575,6 +578,7 @@
genTraits();
// Generate C++ code for various op methods. The order here determines the
// methods in the generated file.
+ genOpAsmInterface();
genOpNameGetter();
genNamedOperandGetters();
genNamedResultGetters();
@@ -1393,6 +1397,38 @@
method.body() << " return \"" << op.getOperationName() << "\";\n";
}
+void OpEmitter::genOpAsmInterface() {
+ // If the user only has one results or specifically added the Asm trait,
+ // then don't generate it for them. We specifically only handle multi result
+ // operations, because the name of a single result in the common case is not
+ // interesting(generally 'result'/'output'/etc.).
+ // TODO: We could also add a flag to allow operations to opt in to this
+ // generation, even if they only have a single operation.
+ int numResults = op.getNumResults();
+ if (numResults <= 1 || op.hasTrait("OpAsmOpInterface::Trait"))
+ return;
+
+ SmallVector<StringRef, 4> resultNames(numResults);
+ for (int i = 0; i != numResults; ++i)
+ resultNames[i] = op.getResultName(i);
+
+ // Don't add the trait if none of the results have a valid name.
+ if (llvm::all_of(resultNames, [](StringRef name) { return name.empty(); }))
+ return;
+ opClass.addTrait("OpAsmOpInterface::Trait");
+
+ // Generate the right accessor for the number of results.
+ auto &method = opClass.newMethod("void", "getAsmResultNames",
+ "OpAsmSetValueNameFn setNameFn");
+ auto &body = method.body();
+ for (int i = 0; i != numResults; ++i) {
+ body << " auto resultGroup" << i << " = getODSResults(" << i << ");\n"
+ << " if (!llvm::empty(resultGroup" << i << "))\n"
+ << " setNameFn(*resultGroup" << i << ".begin(), \""
+ << resultNames[i] << "\");\n";
+ }
+}
+
//===----------------------------------------------------------------------===//
// OpOperandAdaptor emitter
//===----------------------------------------------------------------------===//